{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "\n# One-class SVM with non-linear kernel (RBF)\n\nAn example using a one-class SVM for novelty detection.\n\n`One-class SVM <svm_outlier_detection>` is an unsupervised\nalgorithm that learns a decision function for novelty detection:\nclassifying new data as similar or different to the training set.\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "collapsed": false
      },
      "outputs": [],
      "source": [
        "# Authors: The scikit-learn developers\n# SPDX-License-Identifier: BSD-3-Clause"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "collapsed": false
      },
      "outputs": [],
      "source": [
        "import numpy as np\n\nfrom sklearn import svm\n\n# Generate train data\nX = 0.3 * np.random.randn(100, 2)\nX_train = np.r_[X + 2, X - 2]\n# Generate some regular novel observations\nX = 0.3 * np.random.randn(20, 2)\nX_test = np.r_[X + 2, X - 2]\n# Generate some abnormal novel observations\nX_outliers = np.random.uniform(low=-4, high=4, size=(20, 2))\n\n# fit the model\nclf = svm.OneClassSVM(nu=0.1, kernel=\"rbf\", gamma=0.1)\nclf.fit(X_train)\ny_pred_train = clf.predict(X_train)\ny_pred_test = clf.predict(X_test)\ny_pred_outliers = clf.predict(X_outliers)\nn_error_train = y_pred_train[y_pred_train == -1].size\nn_error_test = y_pred_test[y_pred_test == -1].size\nn_error_outliers = y_pred_outliers[y_pred_outliers == 1].size"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "collapsed": false
      },
      "outputs": [],
      "source": [
        "import matplotlib.font_manager\nimport matplotlib.lines as mlines\nimport matplotlib.pyplot as plt\n\nfrom sklearn.inspection import DecisionBoundaryDisplay\n\n_, ax = plt.subplots()\n\n# generate grid for the boundary display\nxx, yy = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))\nX = np.concatenate([xx.reshape(-1, 1), yy.reshape(-1, 1)], axis=1)\nDecisionBoundaryDisplay.from_estimator(\n    clf,\n    X,\n    response_method=\"decision_function\",\n    plot_method=\"contourf\",\n    ax=ax,\n    cmap=\"PuBu\",\n)\nDecisionBoundaryDisplay.from_estimator(\n    clf,\n    X,\n    response_method=\"decision_function\",\n    plot_method=\"contourf\",\n    ax=ax,\n    levels=[0, 10000],\n    colors=\"palevioletred\",\n)\nDecisionBoundaryDisplay.from_estimator(\n    clf,\n    X,\n    response_method=\"decision_function\",\n    plot_method=\"contour\",\n    ax=ax,\n    levels=[0],\n    colors=\"darkred\",\n    linewidths=2,\n)\n\ns = 40\nb1 = ax.scatter(X_train[:, 0], X_train[:, 1], c=\"white\", s=s, edgecolors=\"k\")\nb2 = ax.scatter(X_test[:, 0], X_test[:, 1], c=\"blueviolet\", s=s, edgecolors=\"k\")\nc = ax.scatter(X_outliers[:, 0], X_outliers[:, 1], c=\"gold\", s=s, edgecolors=\"k\")\nplt.legend(\n    [mlines.Line2D([], [], color=\"darkred\"), b1, b2, c],\n    [\n        \"learned frontier\",\n        \"training observations\",\n        \"new regular observations\",\n        \"new abnormal observations\",\n    ],\n    loc=\"upper left\",\n    prop=matplotlib.font_manager.FontProperties(size=11),\n)\nax.set(\n    xlabel=(\n        f\"error train: {n_error_train}/200 ; errors novel regular: {n_error_test}/40 ;\"\n        f\" errors novel abnormal: {n_error_outliers}/40\"\n    ),\n    title=\"Novelty Detection\",\n    xlim=(-5, 5),\n    ylim=(-5, 5),\n)\nplt.show()"
      ]
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": "Python 3",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.9.20"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}