Back Propagation Neural Network
import numpy as np
# Activation function and its derivative
def sigmoid(x):
return 1 / (1 + [Link](-x))
def sigmoid_derivative(x):
return x * (1 - x)
# Training data (XOR problem)
X = [Link]([[0, 0], [0, 1], [1, 0], [1, 1]])
y = [Link]([[0], [1], [1], [0]])
# Initialize weights and biases
input_layer_neurons = 2
hidden_layer_neurons = 4
output_layer_neurons = 1
[Link](1)
weights_input_hidden = [Link](-1, 1,
(input_layer_neurons, hidden_layer_neurons))
weights_hidden_output = [Link](-1, 1,
(hidden_layer_neurons, output_layer_neurons))
bias_hidden = [Link](-1, 1, (1,
hidden_layer_neurons))
bias_output = [Link](-1, 1, (1,
output_layer_neurons))
# Learning rate
theta = 0.5
# Training the neural network
epochs = 10000
for epoch in range(epochs):
# Forward propagation
hidden_layer_input = [Link](X, weights_input_hidden) +
bias_hidden
hidden_layer_output = sigmoid(hidden_layer_input)
output_layer_input = [Link](hidden_layer_output,
weights_hidden_output) + bias_output
predicted_output = sigmoid(output_layer_input)
# Backpropagation
error = y - predicted_output
d_predicted_output = error *
sigmoid_derivative(predicted_output)
error_hidden_layer =
d_predicted_output.dot(weights_hidden_output.T)
d_hidden_layer = error_hidden_layer *
sigmoid_derivative(hidden_layer_output)
# Updating weights and biases
weights_hidden_output +=
hidden_layer_output.[Link](d_predicted_output) * theta
weights_input_hidden += [Link](d_hidden_layer) * theta
bias_output += [Link](d_predicted_output, axis=0,
keepdims=True) * theta
bias_hidden += [Link](d_hidden_layer, axis=0,
keepdims=True) * theta
if epoch % 1000 == 0:
loss = [Link]([Link](error))
print(f'Epoch {epoch}, Loss: {loss}')
# Final predictions
print('\nFinal Predictions:')
print(predicted_output)