diff --git a/ch07/ch07.ipynb b/ch07/ch07.ipynb index a6ba1670..181df4da 100644 --- a/ch07/ch07.ipynb +++ b/ch07/ch07.ipynb @@ -93,7 +93,7 @@ " - [Evaluating and tuning the ensemble classifier](#Evaluating-and-tuning-the-ensemble-classifier)\n", "- [Bagging – building an ensemble of classifiers from bootstrap samples](#Bagging----Building-an-ensemble-of-classifiers-from-bootstrap-samples)\n", " - [Bagging in a nutshell](#Bagging-in-a-nutshell)\n", - " - [Applying bagging to classify samples in the Wine dataset](#Applying-bagging-to-classify-samples-in-the-Wine-dataset)\n", + " - [Applying bagging to classify examples in the Wine dataset](#Applying-bagging-to-classify-examples-in-the-Wine-dataset)\n", "- [Leveraging weak learners via adaptive boosting](#Leveraging-weak-learners-via-adaptive-boosting)\n", " - [How boosting works](#How-boosting-works)\n", " - [Applying AdaBoost using scikit-learn](#Applying-AdaBoost-using-scikit-learn)\n", @@ -404,10 +404,10 @@ "\n", " Parameters\n", " ----------\n", - " X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n", - " Matrix of training samples.\n", + " X : {array-like, sparse matrix}, shape = [n_examples, n_features]\n", + " Matrix of training examples.\n", "\n", - " y : array-like, shape = [n_samples]\n", + " y : array-like, shape = [n_examples]\n", " Vector of target class labels.\n", "\n", " Returns\n", @@ -441,12 +441,12 @@ "\n", " Parameters\n", " ----------\n", - " X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n", - " Matrix of training samples.\n", + " X : {array-like, sparse matrix}, shape = [n_examples, n_features]\n", + " Matrix of training examples.\n", "\n", " Returns\n", " ----------\n", - " maj_vote : array-like, shape = [n_samples]\n", + " maj_vote : array-like, shape = [n_examples]\n", " Predicted class labels.\n", " \n", " \"\"\"\n", @@ -472,14 +472,14 @@ "\n", " Parameters\n", " ----------\n", - " X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n", - " Training vectors, where n_samples is the number of samples and\n", + " X : {array-like, sparse matrix}, shape = [n_examples, n_features]\n", + " Training vectors, where n_examples is the number of examples and\n", " n_features is the number of features.\n", "\n", " Returns\n", " ----------\n", - " avg_proba : array-like, shape = [n_samples, n_classes]\n", - " Weighted average probability for each class per sample.\n", + " avg_proba : array-like, shape = [n_examples, n_classes]\n", + " Weighted average probability for each class per example.\n", "\n", " \"\"\"\n", " probas = np.asarray([clf.predict_proba(X)\n", @@ -1256,7 +1256,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Applying bagging to classify samples in the Wine dataset" + "## Applying bagging to classify examples in the Wine dataset" ] }, { diff --git a/ch07/ch07.py b/ch07/ch07.py index 869134b8..433dbf98 100644 --- a/ch07/ch07.py +++ b/ch07/ch07.py @@ -56,7 +56,7 @@ # - [Evaluating and tuning the ensemble classifier](#Evaluating-and-tuning-the-ensemble-classifier) # - [Bagging – building an ensemble of classifiers from bootstrap samples](#Bagging----Building-an-ensemble-of-classifiers-from-bootstrap-samples) # - [Bagging in a nutshell](#Bagging-in-a-nutshell) -# - [Applying bagging to classify samples in the Wine dataset](#Applying-bagging-to-classify-samples-in-the-Wine-dataset) +# - [Applying bagging to classify examples in the Wine dataset](#Applying-bagging-to-classify-examples-in-the-Wine-dataset) # - [Leveraging weak learners via adaptive boosting](#Leveraging-weak-learners-via-adaptive-boosting) # - [How boosting works](#How-boosting-works) # - [Applying AdaBoost using scikit-learn](#Applying-AdaBoost-using-scikit-learn) @@ -188,10 +188,10 @@ def fit(self, X, y): Parameters ---------- - X : {array-like, sparse matrix}, shape = [n_samples, n_features] - Matrix of training samples. + X : {array-like, sparse matrix}, shape = [n_examples, n_features] + Matrix of training examples. - y : array-like, shape = [n_samples] + y : array-like, shape = [n_examples] Vector of target class labels. Returns @@ -225,12 +225,12 @@ def predict(self, X): Parameters ---------- - X : {array-like, sparse matrix}, shape = [n_samples, n_features] - Matrix of training samples. + X : {array-like, sparse matrix}, shape = [n_examples, n_features] + Matrix of training examples. Returns ---------- - maj_vote : array-like, shape = [n_samples] + maj_vote : array-like, shape = [n_examples] Predicted class labels. """ @@ -256,14 +256,14 @@ def predict_proba(self, X): Parameters ---------- - X : {array-like, sparse matrix}, shape = [n_samples, n_features] - Training vectors, where n_samples is the number of samples and + X : {array-like, sparse matrix}, shape = [n_examples, n_features] + Training vectors, where n_examples is the number of examples and n_features is the number of features. Returns ---------- - avg_proba : array-like, shape = [n_samples, n_classes] - Weighted average probability for each class per sample. + avg_proba : array-like, shape = [n_examples, n_classes] + Weighted average probability for each class per example. """ probas = np.asarray([clf.predict_proba(X) @@ -547,7 +547,7 @@ def get_params(self, deep=True): -# ## Applying bagging to classify samples in the Wine dataset +# ## Applying bagging to classify examples in the Wine dataset