0% found this document useful (0 votes)
5 views

Neural Network Code

neural

Uploaded by

kishoranbu14
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
5 views

Neural Network Code

neural

Uploaded by

kishoranbu14
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 4

Neural Network Binary Classification Code

Neural Network Binary Classification Code

import numpy as np # For matrix math

import matplotlib.pyplot as plt # For plotting

# Training data and labels

X = np.array([

[0, 1],

[1, 0],

[1, 1],

[0, 0]

])

y = np.array([

[1],

[1],

[0],

[0]

])

# Parameters

num_i_units = 2 # Number of Input units

num_h_units = 2 # Number of Hidden units

num_o_units = 1 # Number of Output units

learning_rate = 0.01 # Learning rate

reg_param = 0 # Regularization parameter


Neural Network Binary Classification Code

max_iter = 5000 # Maximum iterations

m = len(X) # Number of training examples

# Initialize weights and biases

np.random.seed(1)

W1 = np.random.normal(0, 1, (num_h_units, num_i_units)) # 2x2

W2 = np.random.normal(0, 1, (num_o_units, num_h_units)) # 1x2

B1 = np.random.random((num_h_units, 1)) # 2x1

B2 = np.random.random((num_o_units, 1)) # 1x1

# Activation function

def sigmoid(z, derv=False):

if derv:

return sigmoid(z) * (1 - sigmoid(z))

return 1 / (1 + np.exp(-z))

# Forward propagation

def forward(x, predict=False):

a1 = x.reshape(x.shape[0], 1) # Convert input to column vector

z2 = W1.dot(a1) + B1 # 2x1

a2 = sigmoid(z2) # Hidden layer activation

z3 = W2.dot(a2) + B2 # 1x1

a3 = sigmoid(z3) # Output layer activation

if predict:

return a3

return a1, a2, a3


Neural Network Binary Classification Code

# Training function

def train(W1, W2, B1, B2):

cost_history = np.zeros((max_iter, 1)) # To store cost at each iteration

for i in range(max_iter):

c = 0

dW1 = np.zeros_like(W1)

dW2 = np.zeros_like(W2)

dB1 = np.zeros_like(B1)

dB2 = np.zeros_like(B2)

for j in range(m):

# Forward propagation

a0 = X[j].reshape(X[j].shape[0], 1) # 2x1

z1 = W1.dot(a0) + B1 # 2x1

a1 = sigmoid(z1) # 2x1

z2 = W2.dot(a1) + B2 # 1x1

a2 = sigmoid(z2) # 1x1

# Backpropagation

dz2 = a2 - y[j] # 1x1

dW2 += dz2 * a1.T # 1x2

dB2 += dz2 # 1x1

dz1 = np.multiply(W2.T.dot(dz2), sigmoid(z1, derv=True)) # 2x1

dW1 += dz1.dot(a0.T) # 2x2


Neural Network Binary Classification Code

dB1 += dz1 # 2x1

# Compute cost

c += -y[j] * np.log(a2) - (1 - y[j]) * np.log(1 - a2)

# Update weights and biases

W1 -= learning_rate * (dW1 / m) + (reg_param / m) * W1

W2 -= learning_rate * (dW2 / m) + (reg_param / m) * W2

B1 -= learning_rate * (dB1 / m)

B2 -= learning_rate * (dB2 / m)

# Store the average cost

cost_history[i] = c / m

return W1, W2, B1, B2, cost_history

# Train the network

W1, W2, B1, B2, cost_history = train(W1, W2, B1, B2)

# Plot cost over iterations

plt.plot(range(max_iter), cost_history)

plt.xlabel("Iterations")

plt.ylabel("Cost")

plt.title("Cost Reduction Over Iterations")

plt.show()

You might also like