Ipynb - Colab01
Ipynb - Colab01
ipynb - Colab
#Perceptron
import numpy as np
class Perceptron(object):
"""Perceptron classifier.
Parameters
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
random_state : int
Random number generator seed for random weight
initialization.
Attributes
w_ : 1d-array
Weights after fitting.
errors_ : list
Number of misclassifications (updates) in each epoch.
"""
def init (self, eta=0.01, n_iter=50, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
Parameters
Returns
self : object
"""
rgen = np.random.RandomState(self.random_state)
self.w_ = rgen.normal(loc=0.0, scale=0.01,size=1 + X.shape[1])
self.errors_ = []
for _ in range(self.n_iter):
errors = 0
for xi, target in zip(X, y):
update = self.eta * (target - self.predict(xi))
self.w_[1:] += update * xi
self.w_[0] += update
errors += int(update != 0.0)
self.errors_.append(errors)
return self
v1 = np.array([1, 2, 3])
v2 = 0.5 * v1
np.arccos(v1.dot(v2) / (np.linalg.norm(v1) * np.linalg.norm(v2)))
0.0
https://colab.research.google.com/drive/16gZKY_Jp46_P6ZLc-5N97lCut7QODAag#scrollTo=5TORI1ioZi7x&printMode=true 1/4
12/28/24, 2:11 PM Untitled0.ipynb - Colab
import os
import pandas as pd
s = os.path.join('https://archive.ics.uci.edu', 'ml','machine-learning-databases','iris','iris.data')
print('URL:', s)
URL:"https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"
df = pd.read_csv(s,header=None,encoding='utf-8')
df.tail()
URL: https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data
0 1 2 3 4
149 5 9 3 0 5 1 1 8 Iris-virginica
df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',header=None, encoding='utf-8')
https://colab.research.google.com/drive/16gZKY_Jp46_P6ZLc-5N97lCut7QODAag#scrollTo=5TORI1ioZi7x&printMode=true 2/4
12/28/24, 2:11 PM Untitled0.ipynb - Colab
#Adaline
class AdalineGD(object):
"""ADAptive LInear NEuron classifier.
Parameters
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
random_state : int
Random number generator seed for random weight initialization.
Attributes
w_ : 1d-array
Weights after fitting.
cost_ : list
Sum-of-squares cost function value in each epoch.
"""
def init (self, eta=0.01, n_iter=50, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
""" Fit training data.
Parameters
Returns
self : object
"""
rgen = np.random.RandomState(self.random_state)
self.w_ = rgen.normal(loc=0.0, scale=0.01, size=1 + X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = (y - output)
self.w_[1:] += self.eta * X.T.dot(errors)
self.w_[0] += self.eta * errors.sum()
cost = (errors**2).sum() / 2.0
self.cost_.append(cost)
return self
https://colab.research.google.com/drive/16gZKY_Jp46_P6ZLc-5N97lCut7QODAag#scrollTo=5TORI1ioZi7x&printMode=true 3/4
12/28/24, 2:11 PM Untitled0.ipynb - Colab
return np.dot(X, self.w_[1:]) + self.w_[0]
https://colab.research.google.com/drive/16gZKY_Jp46_P6ZLc-5N97lCut7QODAag#scrollTo=5TORI1ioZi7x&printMode=true 4/4