Lab Report 04
Lab Report 04
04
Name of the Experiment: (a) Design and implementation of Kohonen Self-organizing Neural
Networks algorithm.
Dataset:
Implementation:
Code:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
class Kohonen:
def __init__(self, inputs, iterations, X):
self.iterations = iterations
self.learning_rate = 0.5
self.inputs = inputs
# initialize weights
self.X = X
2 .Present input
inputs = [[1, 1, 1],[1, 0, 1], [0, 1, 1],[1, 1, 1]] weights
iterations = 1
X = [1,0,1] //pattern
obj = Kohonen(inputs,iterations,X)
def find_winner_next(self,Y):
self.X = Y
for j in range(self.iterations):
# initialize shortest distance to 0
shortest_distance = 10000 # a large number
delta = [0,0,0] # to store update factor of weights
for i in range(len(self.inputs)):
distance = self.euclidean(self.inputs[i],
self.X)
print("Distance: " + str(distance))
if distance < shortest_distance:
shortest_distance = distance
winner = i # stores which one is nearest
4. Update Weights
# winner'th neuron is nearest so we need to update weights of
Wi, ie. inputs[i]
print("Shortest distance node: " +str(winner) + " --
- distance: "+ str(shortest_distance))
winning_node = self.inputs[winner]
delta[0] = self.learning_rate * (self.X[0] -
winning_node[0]) # delta_w_1_i
delta[1] = self.learning_rate * (self.X[1] -
winning_node[1]) # delta_w_2_i
delta[2] = self.learning_rate * (self.X[2] -
winning_node[2]) # delta_w_3_i
print("delta: " + str(delta))
print("--------------------")
# update weights of winner'th node
self.inputs[winner] = np.add(self.inputs[winner],
delta)
return self.inputs
def train(self,Y,epochs=5,lr=0.5):
self.learning_rate = lr
for epoch in range(epochs):
for inp_itr in range(len(Y)):
data = Y[inp_itr]
print("pattern: ", data)
self.X = data
for j in range(self.iterations):
# initialize shortest distance to 0
shortest_distance = 10000 # a large number
delta = [0,0,0] # to store update factor of weights
for i in range(len(self.inputs)):
distance = self.euclidean(self.inputs[i], self.X)
print("Distance: " + str(distance))
if distance < shortest_distance:
shortest_distance = distance
winner = i # stores which one is nearest
# winner'th neuron is nearest so we need to update
weights of Wi, ie. inputs[i]
print("Shortest distance node: " +str(winner) + " ---
distance: "+ str(shortest_distance))
winning_node = self.inputs[winner]
delta[0] = self.learning_rate * (self.X[0] -
winning_node[0]) # delta_w_1_i
delta[1] = self.learning_rate * (self.X[1] -
winning_node[1]) # delta_w_2_i
delta[2] = self.learning_rate * (self.X[2] -
winning_node[2]) # delta_w_3_i
print("delta: " + str(delta))
print("--------------------")
# update weights of winner'th node
self.inputs[winner] = np.add(self.inputs[winner],
delta)
print("epoch:",epoch+1,"--:",self.inputs)
return self.inputs
model = Kohonen(inputs,iterations,X)
epochs = 10
lr = 0.5
X = [[1,0,1],[1,1,0],[0,1,1],[1,1,1]]
len(X)
model.train(X,epochs=5,lr=0.5)
Table 4.1 :Evaluation the correctness and the accuracy
Input pattern Epoch(learning rate=0.5) Updated Weight
[[1,0,1], [1., 1.,0.25],
[1,1,0], [1., 0., 1.],
[0,1,1], 1 [0., 1., 1.],
[1,1,1]] [1., 1., 1.]
[[1,0,1], [1., 1.,0.125],
[1,1,0], [1., 0., 1.],
[0,1,1], 2 [0., 1., 1.],
[1,1,1]] [1., 1., 1.]
[[1,0,1], [1., 1.,0.0625],
[1,1,0], [1., 0., 1.],
[0,1,1], 3 [0., 1., 1.],
[1,1,1]] [1., 1., 1.]
Name of the Experiment: (b) Design and implementation of Hopfield Neural Networks
algorithm.
Dataset:
Characteristics of Dataset:
● Every pattern dimension is 5*5
● Total 3 given pattern
● The values of the pattern either a +1 or -1
Implementation:
Code:
import numpy as np
import pandas as pd
import statistics
from statistics import mode
%matplotlib inline
import cv2
def ld_data(path):
with np.load(path) as f:
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
(x_train, y_train), (x_test, y_test) = ld_data('./mnist.npz')
x_train = x_train.reshape(x_train.shape[0],784)
y_train = y_train.reshape(y_train.shape[0],1)
x_test = x_test.reshape(x_test.shape[0],784)
y_test = y_test.reshape(y_test.shape[0],1)
MNIST_image = np.vstack( (x_train,x_test) )
MNIST_label = np.vstack( (y_train,y_test) )
def preprocess(img):
single_image = img.reshape(28,28)
return single_image.flatten()
After that three patterns are chosen and connection weights are assigned
class Hopfield:
def __init__(self, y):
self.y = y
self.memory = 3 # number of states to be memorised by the network
self.I = np.identity(25)
self.W = np.zeros((25,25))
def train(self):
sum = np.zeros((25,25))
for i in range(self.memory):
y = np.array(self.y[i]).reshape(1,25) # flatten
transpose = np.array(y).T.reshape(25,1) # transpose of 1*3
reshaping into 3*1
sum = np.add(sum, np.multiply(transpose,y))
result = np.subtract(sum,arr)
self.W = result
print(result)
y = [preprocess(x_train[1, :]),preprocess(x_train[2,
:]),preprocess(x_train[3, :])] # input for network [y1,y2]
model = Hopfield(y)
model.train()
l=preprocess(x_train[89, :])
out1 = val
out1=np.array(out1).reshape(1,25)
for epoch in range(epochs):
out1[0, :]=out2[0, :]
if epoch == epochs-1:
img = out2.reshape(5,5)
plt.imshow(img, cmap = 'gray')
return out2
img = model.evaluate(l,3)
Table 4.2 :Evaluation the correctness and the accuracy
Unseen pattern Matched pattern
Conclusion:
In conclusion, Kohonen self-organizing creates a topological network in which output nodes
belongs to same group are neighbors. The orientation of the classes may change with the placing
order of input patterns to network.
We examined that Hopfield network successfully transformed the unseen pattern to seen pattern.
This indicates that the network is able to store the given patterns