Deeplearning
Deeplearning
#2. Design and implement a Madaline network to solve the XOR problem.
Simulate the network for non-linearly separable data and demonstrate
the results.
import numpy as np
# Activation function
def activation(x):
return np.where(x > 0, 1, 0)
# Train MLP
for _ in range(10000):
h = sigmoid(np.dot(X, w1) + b1) # Hidden layer
o = sigmoid(np.dot(h, w2) + b2) # Output layer
d2 = (y - o) * sigmoid_derivative(o) # Output delta
d1 = np.dot(d2, w2.T) * sigmoid_derivative(h) # Hidden delta
w2 += np.dot(h.T, d2) * 0.1 # Update weights
b2 += np.sum(d2, axis=0, keepdims=True) * 0.1
w1 += np.dot(X.T, d1) * 0.1
b1 += np.sum(d1, axis=0, keepdims=True) * 0.1
# Test MLP
for x in X:
h = sigmoid(np.dot(x, w1) + b1)
o = sigmoid(np.dot(h, w2) + b2)
print(f"Input: {x}, Output: {o.round()}")
# Gaussian RBF
def rbf(x, center, sigma):
return np.exp(-np.linalg.norm(x - center) ** 2 / (2 * sigma ** 2))
# XOR problem
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([0, 1, 1, 0])
# Output results
print("Input:\n", X)
print("Predicted Output:\n", predictions)
#5. Implement an autoassociative memory network to store and recall
patterns. Write a program that uses Hebbian learning to train the
network and test its performance with noisy inputs.
import numpy as np
# Hebbian learning
def train_hebbian(patterns):
return np.dot(patterns.T, patterns)
# Recall a pattern
def recall(pattern, weights):
return np.sign(np.dot(pattern, weights))
# Patterns to store
patterns = np.array([
[1, -1, 1, -1],
[-1, 1, -1, 1]
])
# Recall a pattern
def recall(input_pattern, weights):
return np.dot(input_pattern, weights)
output_patterns = np.array([
[1, 1],
[-1, -1]
])
# Train the BAM network
w_in_out, w_out_in = train_bam(input_patterns, output_patterns)
#8. Implement a Hopfield network for pattern storage and recall. Write
a program that demonstrates the energy minimization process and the
ability to recall patterns even in the presence of noise.
import numpy as np
# Competitive Network
class CompetitiveNetwork:
def __init__(self, num_neurons, input_dim):
self.num_neurons = num_neurons
self.weights = np.random.rand(num_neurons, input_dim)
# Prepare data
data = "HELLO"
chars = sorted(set(data))
char_to_int = {c: i for i, c in enumerate(chars)}
dataX = [char_to_int[data[i]] for i in range(len(data)-1)]
dataY = [char_to_int[data[i+1]] for i in range(len(data)-1)]
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])