Skip to content

Commit 4a79ddf

Browse files
committed
Pushing the docs to dev/ for branch: main, commit 15504329657fc6efc11a68cf1b672fc7e2289420
1 parent 183e45a commit 4a79ddf

File tree

760 files changed

+3203
-3088
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

760 files changed

+3203
-3088
lines changed

dev/.buildinfo

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: 98f80686770ccc4ae1b4560577c24427
3+
config: 261086da5a179db4ef72d848b547be45
44
tags: 645f666f9bcd5a90fca523b33c5a78b7
Binary file not shown.

dev/_downloads/179a84f8da8ce09af733c9a82135ca4d/plot_oneclass.ipynb

+12-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,18 @@
1515
},
1616
"outputs": [],
1717
"source": [
18-
"import matplotlib.font_manager\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nfrom sklearn import svm\n\nxx, yy = np.meshgrid(np.linspace(-5, 5, 500), np.linspace(-5, 5, 500))\n# Generate train data\nX = 0.3 * np.random.randn(100, 2)\nX_train = np.r_[X + 2, X - 2]\n# Generate some regular novel observations\nX = 0.3 * np.random.randn(20, 2)\nX_test = np.r_[X + 2, X - 2]\n# Generate some abnormal novel observations\nX_outliers = np.random.uniform(low=-4, high=4, size=(20, 2))\n\n# fit the model\nclf = svm.OneClassSVM(nu=0.1, kernel=\"rbf\", gamma=0.1)\nclf.fit(X_train)\ny_pred_train = clf.predict(X_train)\ny_pred_test = clf.predict(X_test)\ny_pred_outliers = clf.predict(X_outliers)\nn_error_train = y_pred_train[y_pred_train == -1].size\nn_error_test = y_pred_test[y_pred_test == -1].size\nn_error_outliers = y_pred_outliers[y_pred_outliers == 1].size\n\n# plot the line, the points, and the nearest vectors to the plane\nZ = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\nZ = Z.reshape(xx.shape)\n\nplt.title(\"Novelty Detection\")\nplt.contourf(xx, yy, Z, levels=np.linspace(Z.min(), 0, 7), cmap=plt.cm.PuBu)\na = plt.contour(xx, yy, Z, levels=[0], linewidths=2, colors=\"darkred\")\nplt.contourf(xx, yy, Z, levels=[0, Z.max()], colors=\"palevioletred\")\n\ns = 40\nb1 = plt.scatter(X_train[:, 0], X_train[:, 1], c=\"white\", s=s, edgecolors=\"k\")\nb2 = plt.scatter(X_test[:, 0], X_test[:, 1], c=\"blueviolet\", s=s, edgecolors=\"k\")\nc = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c=\"gold\", s=s, edgecolors=\"k\")\nplt.axis(\"tight\")\nplt.xlim((-5, 5))\nplt.ylim((-5, 5))\nplt.legend(\n [a.collections[0], b1, b2, c],\n [\n \"learned frontier\",\n \"training observations\",\n \"new regular observations\",\n \"new abnormal observations\",\n ],\n loc=\"upper left\",\n prop=matplotlib.font_manager.FontProperties(size=11),\n)\nplt.xlabel(\n \"error train: %d/200 ; errors novel regular: %d/40 ; errors novel abnormal: %d/40\"\n % (n_error_train, n_error_test, n_error_outliers)\n)\nplt.show()"
18+
"import numpy as np\n\nfrom sklearn import svm\n\n# Generate train data\nX = 0.3 * np.random.randn(100, 2)\nX_train = np.r_[X + 2, X - 2]\n# Generate some regular novel observations\nX = 0.3 * np.random.randn(20, 2)\nX_test = np.r_[X + 2, X - 2]\n# Generate some abnormal novel observations\nX_outliers = np.random.uniform(low=-4, high=4, size=(20, 2))\n\n# fit the model\nclf = svm.OneClassSVM(nu=0.1, kernel=\"rbf\", gamma=0.1)\nclf.fit(X_train)\ny_pred_train = clf.predict(X_train)\ny_pred_test = clf.predict(X_test)\ny_pred_outliers = clf.predict(X_outliers)\nn_error_train = y_pred_train[y_pred_train == -1].size\nn_error_test = y_pred_test[y_pred_test == -1].size\nn_error_outliers = y_pred_outliers[y_pred_outliers == 1].size"
19+
]
20+
},
21+
{
22+
"cell_type": "code",
23+
"execution_count": null,
24+
"metadata": {
25+
"collapsed": false
26+
},
27+
"outputs": [],
28+
"source": [
29+
"import matplotlib.font_manager\nimport matplotlib.lines as mlines\nimport matplotlib.pyplot as plt\n\nfrom sklearn.inspection import DecisionBoundaryDisplay\n\n_, ax = plt.subplots()\n\n# generate grid for the boundary display\nxx, yy = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))\nX = np.concatenate([xx.reshape(-1, 1), yy.reshape(-1, 1)], axis=1)\nDecisionBoundaryDisplay.from_estimator(\n clf,\n X,\n response_method=\"decision_function\",\n plot_method=\"contourf\",\n ax=ax,\n cmap=\"PuBu\",\n)\nDecisionBoundaryDisplay.from_estimator(\n clf,\n X,\n response_method=\"decision_function\",\n plot_method=\"contourf\",\n ax=ax,\n levels=[0, 10000],\n colors=\"palevioletred\",\n)\nDecisionBoundaryDisplay.from_estimator(\n clf,\n X,\n response_method=\"decision_function\",\n plot_method=\"contour\",\n ax=ax,\n levels=[0],\n colors=\"darkred\",\n linewidths=2,\n)\n\ns = 40\nb1 = ax.scatter(X_train[:, 0], X_train[:, 1], c=\"white\", s=s, edgecolors=\"k\")\nb2 = ax.scatter(X_test[:, 0], X_test[:, 1], c=\"blueviolet\", s=s, edgecolors=\"k\")\nc = ax.scatter(X_outliers[:, 0], X_outliers[:, 1], c=\"gold\", s=s, edgecolors=\"k\")\nplt.legend(\n [mlines.Line2D([], [], color=\"darkred\"), b1, b2, c],\n [\n \"learned frontier\",\n \"training observations\",\n \"new regular observations\",\n \"new abnormal observations\",\n ],\n loc=\"upper left\",\n prop=matplotlib.font_manager.FontProperties(size=11),\n)\nax.set(\n xlabel=(\n f\"error train: {n_error_train}/200 ; errors novel regular: {n_error_test}/40 ;\"\n f\" errors novel abnormal: {n_error_outliers}/40\"\n ),\n title=\"Novelty Detection\",\n xlim=(-5, 5),\n ylim=(-5, 5),\n)\nplt.show()"
1930
]
2031
}
2132
],

dev/_downloads/616e8a231ab03301473c9183f6cf03e8/plot_oneclass.py

+51-20
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,11 @@
1111
1212
"""
1313

14-
import matplotlib.font_manager
15-
import matplotlib.pyplot as plt
14+
# %%
1615
import numpy as np
1716

1817
from sklearn import svm
1918

20-
xx, yy = np.meshgrid(np.linspace(-5, 5, 500), np.linspace(-5, 5, 500))
2119
# Generate train data
2220
X = 0.3 * np.random.randn(100, 2)
2321
X_train = np.r_[X + 2, X - 2]
@@ -37,24 +35,52 @@
3735
n_error_test = y_pred_test[y_pred_test == -1].size
3836
n_error_outliers = y_pred_outliers[y_pred_outliers == 1].size
3937

40-
# plot the line, the points, and the nearest vectors to the plane
41-
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
42-
Z = Z.reshape(xx.shape)
38+
# %%
39+
import matplotlib.font_manager
40+
import matplotlib.lines as mlines
41+
import matplotlib.pyplot as plt
42+
43+
from sklearn.inspection import DecisionBoundaryDisplay
4344

44-
plt.title("Novelty Detection")
45-
plt.contourf(xx, yy, Z, levels=np.linspace(Z.min(), 0, 7), cmap=plt.cm.PuBu)
46-
a = plt.contour(xx, yy, Z, levels=[0], linewidths=2, colors="darkred")
47-
plt.contourf(xx, yy, Z, levels=[0, Z.max()], colors="palevioletred")
45+
_, ax = plt.subplots()
46+
47+
# generate grid for the boundary display
48+
xx, yy = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))
49+
X = np.concatenate([xx.reshape(-1, 1), yy.reshape(-1, 1)], axis=1)
50+
DecisionBoundaryDisplay.from_estimator(
51+
clf,
52+
X,
53+
response_method="decision_function",
54+
plot_method="contourf",
55+
ax=ax,
56+
cmap="PuBu",
57+
)
58+
DecisionBoundaryDisplay.from_estimator(
59+
clf,
60+
X,
61+
response_method="decision_function",
62+
plot_method="contourf",
63+
ax=ax,
64+
levels=[0, 10000],
65+
colors="palevioletred",
66+
)
67+
DecisionBoundaryDisplay.from_estimator(
68+
clf,
69+
X,
70+
response_method="decision_function",
71+
plot_method="contour",
72+
ax=ax,
73+
levels=[0],
74+
colors="darkred",
75+
linewidths=2,
76+
)
4877

4978
s = 40
50-
b1 = plt.scatter(X_train[:, 0], X_train[:, 1], c="white", s=s, edgecolors="k")
51-
b2 = plt.scatter(X_test[:, 0], X_test[:, 1], c="blueviolet", s=s, edgecolors="k")
52-
c = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c="gold", s=s, edgecolors="k")
53-
plt.axis("tight")
54-
plt.xlim((-5, 5))
55-
plt.ylim((-5, 5))
79+
b1 = ax.scatter(X_train[:, 0], X_train[:, 1], c="white", s=s, edgecolors="k")
80+
b2 = ax.scatter(X_test[:, 0], X_test[:, 1], c="blueviolet", s=s, edgecolors="k")
81+
c = ax.scatter(X_outliers[:, 0], X_outliers[:, 1], c="gold", s=s, edgecolors="k")
5682
plt.legend(
57-
[a.collections[0], b1, b2, c],
83+
[mlines.Line2D([], [], color="darkred"), b1, b2, c],
5884
[
5985
"learned frontier",
6086
"training observations",
@@ -64,8 +90,13 @@
6490
loc="upper left",
6591
prop=matplotlib.font_manager.FontProperties(size=11),
6692
)
67-
plt.xlabel(
68-
"error train: %d/200 ; errors novel regular: %d/40 ; errors novel abnormal: %d/40"
69-
% (n_error_train, n_error_test, n_error_outliers)
93+
ax.set(
94+
xlabel=(
95+
f"error train: {n_error_train}/200 ; errors novel regular: {n_error_test}/40 ;"
96+
f" errors novel abnormal: {n_error_outliers}/40"
97+
),
98+
title="Novelty Detection",
99+
xlim=(-5, 5),
100+
ylim=(-5, 5),
70101
)
71102
plt.show()
Binary file not shown.

dev/_downloads/scikit-learn-docs.zip

14.2 KB
Binary file not shown.
-169 Bytes
-76 Bytes
1 Byte
43 Bytes
36 Bytes
-15 Bytes
-240 Bytes
110 Bytes
7.22 KB
5.15 KB
75 Bytes
-74 Bytes
-42 Bytes
107 Bytes
-16 Bytes
25 Bytes
121 Bytes
109 Bytes
-88 Bytes
256 Bytes
36 Bytes
-52 Bytes
120 Bytes
411 Bytes
-48 Bytes
-676 Bytes
-466 Bytes
-164 Bytes
-108 Bytes
18 Bytes
-36 Bytes
38 Bytes
-10 Bytes

dev/_sources/auto_examples/applications/plot_cyclical_feature_engineering.rst.txt

+1-1

dev/_sources/auto_examples/applications/plot_digits_denoising.rst.txt

+1-1

dev/_sources/auto_examples/applications/plot_face_recognition.rst.txt

+4-4

dev/_sources/auto_examples/applications/plot_model_complexity_influence.rst.txt

+15-15

0 commit comments

Comments
 (0)