0% found this document useful (0 votes)
2 views

Lab5

The document provides a Python implementation of the perceptron learning algorithm, demonstrating its functionality with a linearly separable dataset. It includes code for training the perceptron and visualizing decision regions using matplotlib. The output is a graphical representation of the decision boundaries between two classes in the dataset.

Uploaded by

Riya
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
2 views

Lab5

The document provides a Python implementation of the perceptron learning algorithm, demonstrating its functionality with a linearly separable dataset. It includes code for training the perceptron and visualizing decision regions using matplotlib. The output is a graphical representation of the decision boundaries between two classes in the dataset.

Uploaded by

Riya
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 2

With a suitable example demonstrate the perceptron learning law with its

decision regions using python. Give the output in graphical form.

In [ ]: import numpy as np
import matplotlib.pyplot as plt

class Perceptron:
def __init__(self, learning_rate=0.01, epochs=100):
self.learning_rate = learning_rate
self.epochs = epochs

def train(self, X, y):


self.weights = np.zeros(X.shape[1] + 1)
self.errors = []

for _ in range(self.epochs):
error = 0
for xi, target in zip(X, y):
update = self.learning_rate * (target - self.predict(xi))
self.weights[1:] += update * xi
self.weights[0] += update
error += int(update != 0.0)
self.errors.append(error)

def predict(self, X):


return np.where((np.dot(X, self.weights[1:]) + self.weights[0]) >= 0,

# Generate linearly separable dataset


np.random.seed(0)
X = np.random.randn(100, 2)
y = np.where(X[:,0] + X[:,1] > 0, 1, -1)

# Train perceptron
perceptron = Perceptron(learning_rate=0.1, epochs=10)
perceptron.train(X, y)

# Plot decision regions


plt.figure(figsize=(8, 6))
plt.scatter(X[y==1, 0], X[y==1, 1], color='blue', marker='o', label='Class 1'
plt.scatter(X[y==-1, 0], X[y==-1, 1], color='red', marker='x', label='Class -
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')

x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1


x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, 0.1), np.arange(x2_min, x2_m
Z = perceptron.predict(np.array([xx1.ravel(), xx2.ravel()]).T)
Z = Z.reshape(xx1.shape)
plt.contourf(xx1, xx2, Z, alpha=0.3, cmap='coolwarm')
plt.xlim(xx1.min(), xx1.max())
plt.ylim(xx2.min(), xx2.max())
plt.title('Perceptron Decision Regions')
plt.legend()
plt.show()

You might also like