Skip to content

Commit 683e929

Browse files
committed
Pushing the docs to dev/ for branch: main, commit d4d5f8c7e02cfaced76757fbf38e21c5b28b67b0
1 parent bb02afc commit 683e929

File tree

1,984 files changed

+6217
-7501
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,984 files changed

+6217
-7501
lines changed

dev/_downloads/006fc185672e58b056a5c134db26935c/plot_coin_segmentation.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Author: Gael Varoquaux <[email protected]>, Brian Cheung\n# License: BSD 3 clause\n\nimport time\n\nimport numpy as np\nfrom scipy.ndimage.filters import gaussian_filter\nimport matplotlib.pyplot as plt\nimport skimage\nfrom skimage.data import coins\nfrom skimage.transform import rescale\n\nfrom sklearn.feature_extraction import image\nfrom sklearn.cluster import spectral_clustering\nfrom sklearn.utils.fixes import parse_version\n\n# these were introduced in skimage-0.14\nif parse_version(skimage.__version__) >= parse_version(\"0.14\"):\n rescale_params = {\"anti_aliasing\": False, \"multichannel\": False}\nelse:\n rescale_params = {}\n\n# load the coins as a numpy array\norig_coins = coins()\n\n# Resize it to 20% of the original size to speed up the processing\n# Applying a Gaussian filter for smoothing prior to down-scaling\n# reduces aliasing artifacts.\nsmoothened_coins = gaussian_filter(orig_coins, sigma=2)\nrescaled_coins = rescale(smoothened_coins, 0.2, mode=\"reflect\", **rescale_params)\n\n# Convert the image into a graph with the value of the gradient on the\n# edges.\ngraph = image.img_to_graph(rescaled_coins)\n\n# Take a decreasing function of the gradient: an exponential\n# The smaller beta is, the more independent the segmentation is of the\n# actual image. For beta=1, the segmentation is close to a voronoi\nbeta = 10\neps = 1e-6\ngraph.data = np.exp(-beta * graph.data / graph.data.std()) + eps\n\n# Apply spectral clustering (this step goes much faster if you have pyamg\n# installed)\nN_REGIONS = 25"
29+
"# Author: Gael Varoquaux <[email protected]>, Brian Cheung\n# License: BSD 3 clause\n\nimport time\n\nimport numpy as np\nfrom scipy.ndimage.filters import gaussian_filter\nimport matplotlib.pyplot as plt\nimport skimage\nfrom skimage.data import coins\nfrom skimage.transform import rescale\n\nfrom sklearn.feature_extraction import image\nfrom sklearn.cluster import spectral_clustering\nfrom sklearn.utils.fixes import parse_version\n\n# these were introduced in skimage-0.14\nif parse_version(skimage.__version__) >= parse_version(\"0.14\"):\n rescale_params = {\"anti_aliasing\": False, \"multichannel\": False}\nelse:\n rescale_params = {}\n\n# load the coins as a numpy array\norig_coins = coins()\n\n# Resize it to 20% of the original size to speed up the processing\n# Applying a Gaussian filter for smoothing prior to down-scaling\n# reduces aliasing artifacts.\nsmoothened_coins = gaussian_filter(orig_coins, sigma=2)\nrescaled_coins = rescale(smoothened_coins, 0.2, mode=\"reflect\", **rescale_params)\n\n# Convert the image into a graph with the value of the gradient on the\n# edges.\ngraph = image.img_to_graph(rescaled_coins)\n\n# Take a decreasing function of the gradient: an exponential\n# The smaller beta is, the more independent the segmentation is of the\n# actual image. For beta=1, the segmentation is close to a voronoi\nbeta = 10\neps = 1e-6\ngraph.data = np.exp(-beta * graph.data / graph.data.std()) + eps\n\n# Apply spectral clustering (this step goes much faster if you have pyamg\n# installed)\nN_REGIONS = 25"
3030
]
3131
},
3232
{

dev/_downloads/00ae629d652473137a3905a5e08ea815/plot_iris_dtc.py

-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
1515
We also show the tree structure of a model built on all of the features.
1616
"""
17-
print(__doc__)
1817

1918
import numpy as np
2019
import matplotlib.pyplot as plt

dev/_downloads/010337852815f8103ac6cca38a812b3c/plot_roc_crossval.py

-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
:ref:`sphx_glr_auto_examples_model_selection_plot_roc.py`,
3030
3131
"""
32-
print(__doc__)
3332

3433
import numpy as np
3534
import matplotlib.pyplot as plt

dev/_downloads/01fdc7c95204e4a420de7cd297711693/plot_feature_union.py

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
1414
The combination used in this example is not particularly helpful on this
1515
dataset and is only used to illustrate the usage of FeatureUnion.
16+
1617
"""
1718

1819
# Author: Andreas Mueller <[email protected]>

dev/_downloads/023324c27491610e7c0ccff87c59abf9/plot_kernel_pca.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
66
This example shows that Kernel PCA is able to find a projection of the data
77
that makes data linearly separable.
8+
89
"""
9-
print(__doc__)
1010

1111
# Authors: Mathieu Blondel
1212
# Andreas Mueller

dev/_downloads/02a1306a494b46cc56c930ceec6e8c4a/plot_species_kde.py

+1
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
3636
190:231-259, 2006.
3737
""" # noqa: E501
38+
3839
# Author: Jake Vanderplas <[email protected]>
3940
#
4041
# License: BSD 3 clause

dev/_downloads/02a7bbce3c39c70d62d80e875968e5c6/plot_digits_kde_sampling.py

+1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
a generative model for a dataset. With this generative model in place,
99
new samples can be drawn. These new samples reflect the underlying model
1010
of the data.
11+
1112
"""
1213

1314
import numpy as np

dev/_downloads/02d88d76c60b7397c8c6e221b31568dd/plot_grid_search_refit_callable.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
[1] Hastie, T., Tibshirani, R.,, Friedman, J. (2001). Model Assessment and
1616
Selection. The Elements of Statistical Learning (pp. 219-260). New York,
1717
NY, USA: Springer New York Inc..
18+
1819
"""
19-
# Author: Wenhao Zhang <[email protected]>
2020

21-
print(__doc__)
21+
# Author: Wenhao Zhang <[email protected]>
2222

2323
import numpy as np
2424
import matplotlib.pyplot as plt

dev/_downloads/0486bf9e537e44cedd2a236d034bcd90/plot_pcr_vs_pls.ipynb

-11
Original file line numberDiff line numberDiff line change
@@ -18,17 +18,6 @@
1818
"\n# Principal Component Regression vs Partial Least Squares Regression\n\nThis example compares `Principal Component Regression\n<https://en.wikipedia.org/wiki/Principal_component_regression>`_ (PCR) and\n`Partial Least Squares Regression\n<https://en.wikipedia.org/wiki/Partial_least_squares_regression>`_ (PLS) on a\ntoy dataset. Our goal is to illustrate how PLS can outperform PCR when the\ntarget is strongly correlated with some directions in the data that have a\nlow variance.\n\nPCR is a regressor composed of two steps: first,\n:class:`~sklearn.decomposition.PCA` is applied to the training data, possibly\nperforming dimensionality reduction; then, a regressor (e.g. a linear\nregressor) is trained on the transformed samples. In\n:class:`~sklearn.decomposition.PCA`, the transformation is purely\nunsupervised, meaning that no information about the targets is used. As a\nresult, PCR may perform poorly in some datasets where the target is strongly\ncorrelated with *directions* that have low variance. Indeed, the\ndimensionality reduction of PCA projects the data into a lower dimensional\nspace where the variance of the projected data is greedily maximized along\neach axis. Despite them having the most predictive power on the target, the\ndirections with a lower variance will be dropped, and the final regressor\nwill not be able to leverage them.\n\nPLS is both a transformer and a regressor, and it is quite similar to PCR: it\nalso applies a dimensionality reduction to the samples before applying a\nlinear regressor to the transformed data. The main difference with PCR is\nthat the PLS transformation is supervised. Therefore, as we will see in this\nexample, it does not suffer from the issue we just mentioned.\n"
1919
]
2020
},
21-
{
22-
"cell_type": "code",
23-
"execution_count": null,
24-
"metadata": {
25-
"collapsed": false
26-
},
27-
"outputs": [],
28-
"source": [
29-
"print(__doc__)"
30-
]
31-
},
3221
{
3322
"cell_type": "markdown",
3423
"metadata": {},

dev/_downloads/055e50ba9fa8c7dcf45dc8f1f32a0243/plot_nnls.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@
66
In this example, we fit a linear model with positive constraints on the
77
regression coefficients and compare the estimated coefficients to a classic
88
linear regression.
9+
910
"""
10-
print(__doc__)
11+
1112
import numpy as np
1213
import matplotlib.pyplot as plt
1314
from sklearn.metrics import r2_score

dev/_downloads/055e8313e28f2f3b5fd508054dfe5fe0/plot_roc_crossval.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import auc\nfrom sklearn.metrics import RocCurveDisplay\nfrom sklearn.model_selection import StratifiedKFold\n\n# #############################################################################\n# Data IO and generation\n\n# Import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\nX, y = X[y != 2], y[y != 2]\nn_samples, n_features = X.shape\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# #############################################################################\n# Classification and ROC analysis\n\n# Run classifier with cross-validation and plot ROC curves\ncv = StratifiedKFold(n_splits=6)\nclassifier = svm.SVC(kernel=\"linear\", probability=True, random_state=random_state)\n\ntprs = []\naucs = []\nmean_fpr = np.linspace(0, 1, 100)\n\nfig, ax = plt.subplots()\nfor i, (train, test) in enumerate(cv.split(X, y)):\n classifier.fit(X[train], y[train])\n viz = RocCurveDisplay.from_estimator(\n classifier,\n X[test],\n y[test],\n name=\"ROC fold {}\".format(i),\n alpha=0.3,\n lw=1,\n ax=ax,\n )\n interp_tpr = np.interp(mean_fpr, viz.fpr, viz.tpr)\n interp_tpr[0] = 0.0\n tprs.append(interp_tpr)\n aucs.append(viz.roc_auc)\n\nax.plot([0, 1], [0, 1], linestyle=\"--\", lw=2, color=\"r\", label=\"Chance\", alpha=0.8)\n\nmean_tpr = np.mean(tprs, axis=0)\nmean_tpr[-1] = 1.0\nmean_auc = auc(mean_fpr, mean_tpr)\nstd_auc = np.std(aucs)\nax.plot(\n mean_fpr,\n mean_tpr,\n color=\"b\",\n label=r\"Mean ROC (AUC = %0.2f $\\pm$ %0.2f)\" % (mean_auc, std_auc),\n lw=2,\n alpha=0.8,\n)\n\nstd_tpr = np.std(tprs, axis=0)\ntprs_upper = np.minimum(mean_tpr + std_tpr, 1)\ntprs_lower = np.maximum(mean_tpr - std_tpr, 0)\nax.fill_between(\n mean_fpr,\n tprs_lower,\n tprs_upper,\n color=\"grey\",\n alpha=0.2,\n label=r\"$\\pm$ 1 std. dev.\",\n)\n\nax.set(\n xlim=[-0.05, 1.05],\n ylim=[-0.05, 1.05],\n title=\"Receiver operating characteristic example\",\n)\nax.legend(loc=\"lower right\")\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import auc\nfrom sklearn.metrics import RocCurveDisplay\nfrom sklearn.model_selection import StratifiedKFold\n\n# #############################################################################\n# Data IO and generation\n\n# Import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\nX, y = X[y != 2], y[y != 2]\nn_samples, n_features = X.shape\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# #############################################################################\n# Classification and ROC analysis\n\n# Run classifier with cross-validation and plot ROC curves\ncv = StratifiedKFold(n_splits=6)\nclassifier = svm.SVC(kernel=\"linear\", probability=True, random_state=random_state)\n\ntprs = []\naucs = []\nmean_fpr = np.linspace(0, 1, 100)\n\nfig, ax = plt.subplots()\nfor i, (train, test) in enumerate(cv.split(X, y)):\n classifier.fit(X[train], y[train])\n viz = RocCurveDisplay.from_estimator(\n classifier,\n X[test],\n y[test],\n name=\"ROC fold {}\".format(i),\n alpha=0.3,\n lw=1,\n ax=ax,\n )\n interp_tpr = np.interp(mean_fpr, viz.fpr, viz.tpr)\n interp_tpr[0] = 0.0\n tprs.append(interp_tpr)\n aucs.append(viz.roc_auc)\n\nax.plot([0, 1], [0, 1], linestyle=\"--\", lw=2, color=\"r\", label=\"Chance\", alpha=0.8)\n\nmean_tpr = np.mean(tprs, axis=0)\nmean_tpr[-1] = 1.0\nmean_auc = auc(mean_fpr, mean_tpr)\nstd_auc = np.std(aucs)\nax.plot(\n mean_fpr,\n mean_tpr,\n color=\"b\",\n label=r\"Mean ROC (AUC = %0.2f $\\pm$ %0.2f)\" % (mean_auc, std_auc),\n lw=2,\n alpha=0.8,\n)\n\nstd_tpr = np.std(tprs, axis=0)\ntprs_upper = np.minimum(mean_tpr + std_tpr, 1)\ntprs_lower = np.maximum(mean_tpr - std_tpr, 0)\nax.fill_between(\n mean_fpr,\n tprs_lower,\n tprs_upper,\n color=\"grey\",\n alpha=0.2,\n label=r\"$\\pm$ 1 std. dev.\",\n)\n\nax.set(\n xlim=[-0.05, 1.05],\n ylim=[-0.05, 1.05],\n title=\"Receiver operating characteristic example\",\n)\nax.legend(loc=\"lower right\")\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/05ca8a4e90b4cc2acd69f9e24b4a1f3a/plot_classifier_chain_yeast.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Adam Kleczewski\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.multioutput import ClassifierChain\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.multiclass import OneVsRestClassifier\nfrom sklearn.metrics import jaccard_score\nfrom sklearn.linear_model import LogisticRegression\n\nprint(__doc__)\n\n# Load a multi-label dataset from https://www.openml.org/d/40597\nX, Y = fetch_openml(\"yeast\", version=4, return_X_y=True)\nY = Y == \"TRUE\"\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=0)\n\n# Fit an independent logistic regression model for each class using the\n# OneVsRestClassifier wrapper.\nbase_lr = LogisticRegression()\novr = OneVsRestClassifier(base_lr)\novr.fit(X_train, Y_train)\nY_pred_ovr = ovr.predict(X_test)\novr_jaccard_score = jaccard_score(Y_test, Y_pred_ovr, average=\"samples\")\n\n# Fit an ensemble of logistic regression classifier chains and take the\n# take the average prediction of all the chains.\nchains = [ClassifierChain(base_lr, order=\"random\", random_state=i) for i in range(10)]\nfor chain in chains:\n chain.fit(X_train, Y_train)\n\nY_pred_chains = np.array([chain.predict(X_test) for chain in chains])\nchain_jaccard_scores = [\n jaccard_score(Y_test, Y_pred_chain >= 0.5, average=\"samples\")\n for Y_pred_chain in Y_pred_chains\n]\n\nY_pred_ensemble = Y_pred_chains.mean(axis=0)\nensemble_jaccard_score = jaccard_score(\n Y_test, Y_pred_ensemble >= 0.5, average=\"samples\"\n)\n\nmodel_scores = [ovr_jaccard_score] + chain_jaccard_scores\nmodel_scores.append(ensemble_jaccard_score)\n\nmodel_names = (\n \"Independent\",\n \"Chain 1\",\n \"Chain 2\",\n \"Chain 3\",\n \"Chain 4\",\n \"Chain 5\",\n \"Chain 6\",\n \"Chain 7\",\n \"Chain 8\",\n \"Chain 9\",\n \"Chain 10\",\n \"Ensemble\",\n)\n\nx_pos = np.arange(len(model_names))\n\n# Plot the Jaccard similarity scores for the independent model, each of the\n# chains, and the ensemble (note that the vertical axis on this plot does\n# not begin at 0).\n\nfig, ax = plt.subplots(figsize=(7, 4))\nax.grid(True)\nax.set_title(\"Classifier Chain Ensemble Performance Comparison\")\nax.set_xticks(x_pos)\nax.set_xticklabels(model_names, rotation=\"vertical\")\nax.set_ylabel(\"Jaccard Similarity Score\")\nax.set_ylim([min(model_scores) * 0.9, max(model_scores) * 1.1])\ncolors = [\"r\"] + [\"b\"] * len(chain_jaccard_scores) + [\"g\"]\nax.bar(x_pos, model_scores, alpha=0.5, color=colors)\nplt.tight_layout()\nplt.show()"
29+
"# Author: Adam Kleczewski\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import fetch_openml\nfrom sklearn.multioutput import ClassifierChain\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.multiclass import OneVsRestClassifier\nfrom sklearn.metrics import jaccard_score\nfrom sklearn.linear_model import LogisticRegression\n\n# Load a multi-label dataset from https://www.openml.org/d/40597\nX, Y = fetch_openml(\"yeast\", version=4, return_X_y=True)\nY = Y == \"TRUE\"\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=0)\n\n# Fit an independent logistic regression model for each class using the\n# OneVsRestClassifier wrapper.\nbase_lr = LogisticRegression()\novr = OneVsRestClassifier(base_lr)\novr.fit(X_train, Y_train)\nY_pred_ovr = ovr.predict(X_test)\novr_jaccard_score = jaccard_score(Y_test, Y_pred_ovr, average=\"samples\")\n\n# Fit an ensemble of logistic regression classifier chains and take the\n# take the average prediction of all the chains.\nchains = [ClassifierChain(base_lr, order=\"random\", random_state=i) for i in range(10)]\nfor chain in chains:\n chain.fit(X_train, Y_train)\n\nY_pred_chains = np.array([chain.predict(X_test) for chain in chains])\nchain_jaccard_scores = [\n jaccard_score(Y_test, Y_pred_chain >= 0.5, average=\"samples\")\n for Y_pred_chain in Y_pred_chains\n]\n\nY_pred_ensemble = Y_pred_chains.mean(axis=0)\nensemble_jaccard_score = jaccard_score(\n Y_test, Y_pred_ensemble >= 0.5, average=\"samples\"\n)\n\nmodel_scores = [ovr_jaccard_score] + chain_jaccard_scores\nmodel_scores.append(ensemble_jaccard_score)\n\nmodel_names = (\n \"Independent\",\n \"Chain 1\",\n \"Chain 2\",\n \"Chain 3\",\n \"Chain 4\",\n \"Chain 5\",\n \"Chain 6\",\n \"Chain 7\",\n \"Chain 8\",\n \"Chain 9\",\n \"Chain 10\",\n \"Ensemble\",\n)\n\nx_pos = np.arange(len(model_names))\n\n# Plot the Jaccard similarity scores for the independent model, each of the\n# chains, and the ensemble (note that the vertical axis on this plot does\n# not begin at 0).\n\nfig, ax = plt.subplots(figsize=(7, 4))\nax.grid(True)\nax.set_title(\"Classifier Chain Ensemble Performance Comparison\")\nax.set_xticks(x_pos)\nax.set_xticklabels(model_names, rotation=\"vertical\")\nax.set_ylabel(\"Jaccard Similarity Score\")\nax.set_ylim([min(model_scores) * 0.9, max(model_scores) * 1.1])\ncolors = [\"r\"] + [\"b\"] * len(chain_jaccard_scores) + [\"g\"]\nax.bar(x_pos, model_scores, alpha=0.5, color=colors)\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/061854726c268bcdae5cd1c330cf8c75/plot_sgd_penalties.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nl1_color = \"navy\"\nl2_color = \"c\"\nelastic_net_color = \"darkorange\"\n\nline = np.linspace(-1.5, 1.5, 1001)\nxx, yy = np.meshgrid(line, line)\n\nl2 = xx ** 2 + yy ** 2\nl1 = np.abs(xx) + np.abs(yy)\nrho = 0.5\nelastic_net = rho * l1 + (1 - rho) * l2\n\nplt.figure(figsize=(10, 10), dpi=100)\nax = plt.gca()\n\nelastic_net_contour = plt.contour(\n xx, yy, elastic_net, levels=[1], colors=elastic_net_color\n)\nl2_contour = plt.contour(xx, yy, l2, levels=[1], colors=l2_color)\nl1_contour = plt.contour(xx, yy, l1, levels=[1], colors=l1_color)\nax.set_aspect(\"equal\")\nax.spines[\"left\"].set_position(\"center\")\nax.spines[\"right\"].set_color(\"none\")\nax.spines[\"bottom\"].set_position(\"center\")\nax.spines[\"top\"].set_color(\"none\")\n\nplt.clabel(\n elastic_net_contour,\n inline=1,\n fontsize=18,\n fmt={1.0: \"elastic-net\"},\n manual=[(-1, -1)],\n)\nplt.clabel(l2_contour, inline=1, fontsize=18, fmt={1.0: \"L2\"}, manual=[(-1, -1)])\nplt.clabel(l1_contour, inline=1, fontsize=18, fmt={1.0: \"L1\"}, manual=[(-1, -1)])\n\nplt.tight_layout()\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\n\nl1_color = \"navy\"\nl2_color = \"c\"\nelastic_net_color = \"darkorange\"\n\nline = np.linspace(-1.5, 1.5, 1001)\nxx, yy = np.meshgrid(line, line)\n\nl2 = xx ** 2 + yy ** 2\nl1 = np.abs(xx) + np.abs(yy)\nrho = 0.5\nelastic_net = rho * l1 + (1 - rho) * l2\n\nplt.figure(figsize=(10, 10), dpi=100)\nax = plt.gca()\n\nelastic_net_contour = plt.contour(\n xx, yy, elastic_net, levels=[1], colors=elastic_net_color\n)\nl2_contour = plt.contour(xx, yy, l2, levels=[1], colors=l2_color)\nl1_contour = plt.contour(xx, yy, l1, levels=[1], colors=l1_color)\nax.set_aspect(\"equal\")\nax.spines[\"left\"].set_position(\"center\")\nax.spines[\"right\"].set_color(\"none\")\nax.spines[\"bottom\"].set_position(\"center\")\nax.spines[\"top\"].set_color(\"none\")\n\nplt.clabel(\n elastic_net_contour,\n inline=1,\n fontsize=18,\n fmt={1.0: \"elastic-net\"},\n manual=[(-1, -1)],\n)\nplt.clabel(l2_contour, inline=1, fontsize=18, fmt={1.0: \"L2\"}, manual=[(-1, -1)])\nplt.clabel(l1_contour, inline=1, fontsize=18, fmt={1.0: \"L1\"}, manual=[(-1, -1)])\n\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],

0 commit comments

Comments
 (0)