Merge branch 'master' of https://github.com/ageron/handson-ml into upstream

This commit is contained in:
rickiepark
2018-03-12 15:29:16 +09:00
5 changed files with 206 additions and 656 deletions

View File

@@ -3898,7 +3898,7 @@
"forest_reg = RandomForestRegressor(random_state=42)\n",
"# train across 5 folds, that's a total of (12+6)*5=90 rounds of training \n",
"grid_search = GridSearchCV(forest_reg, param_grid, cv=5,\n",
" scoring='neg_mean_squared_error')\n",
" scoring='neg_mean_squared_error', return_train_score=True)\n",
"grid_search.fit(housing_prepared, housing_labels)"
]
},

View File

@@ -364,7 +364,7 @@
"source": [
"from sklearn.linear_model import SGDClassifier\n",
"\n",
"sgd_clf = SGDClassifier(random_state=42)\n",
"sgd_clf = SGDClassifier(max_iter=5, random_state=42)\n",
"sgd_clf.fit(X_train, y_train_5)"
]
},
@@ -1292,7 +1292,7 @@
],
"source": [
"from sklearn.multiclass import OneVsOneClassifier\n",
"ovo_clf = OneVsOneClassifier(SGDClassifier(random_state=42))\n",
"ovo_clf = OneVsOneClassifier(SGDClassifier(max_iter=5, random_state=42))\n",
"ovo_clf.fit(X_train, y_train)\n",
"ovo_clf.predict([some_digit])"
]
@@ -1638,7 +1638,7 @@
}
],
"source": [
"y_train_knn_pred = cross_val_predict(knn_clf, X_train, y_multilabel, cv=3)\n",
"y_train_knn_pred = cross_val_predict(knn_clf, X_train, y_multilabel, cv=3, n_jobs=-1)\n",
"f1_score(y_multilabel, y_train_knn_pred, average=\"macro\")"
]
},
@@ -2133,7 +2133,7 @@
"param_grid = [{'weights': [\"uniform\", \"distance\"], 'n_neighbors': [3, 4, 5]}]\n",
"\n",
"knn_clf = KNeighborsClassifier()\n",
"grid_search = GridSearchCV(knn_clf, param_grid, cv=5, verbose=3)\n",
"grid_search = GridSearchCV(knn_clf, param_grid, cv=5, verbose=3, n_jobs=-1)\n",
"grid_search.fit(X_train, y_train)"
]
},

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -607,7 +607,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Okay, let's start by creating a trainable variable of shape (1, 1152, 10, 16, 8) that will hold all the transformation matrices. The first dimension of size 1 will make this array easy to tile. We initialize this variable randomly using a normal distribution with a standard deviation to 0.01."
"Okay, let's start by creating a trainable variable of shape (1, 1152, 10, 16, 8) that will hold all the transformation matrices. The first dimension of size 1 will make this array easy to tile. We initialize this variable randomly using a normal distribution with a standard deviation to 0.1."
]
},
{
@@ -616,7 +616,7 @@
"metadata": {},
"outputs": [],
"source": [
"init_sigma = 0.01\n",
"init_sigma = 0.1\n",
"\n",
"W_init = tf.random_normal(\n",
" shape=(1, caps1_n_caps, caps2_n_caps, caps2_n_dims, caps1_n_dims),\n",