Class Neuron Red
Class Neuron Red
"""
Attributes:
"""
"""
Parameters:
"""
self.weights = np.random.randn(input_size)
self.bias = np.random.randn()
self.output = 0
self.input = None
self.dweights = np.zeros_like(self.weights)
self.dbias = 0
"""
Returns:
"""
return 1 / (1 + np.exp(-x))
"""
Parameters:
Returns:
"""
return x * (1 - x)
"""
Parameters:
Returns:
"""
self.input = inputs
weighted_sum = np.dot(inputs, self.weights) + self.bias
self.output = self.activate(weighted_sum)
return self.output
"""
Parameters:
d_output (float): The gradient of the loss with respect to the output.
Returns:
"""
self.dbias = d_activation
return d_input
# Example usage
if __name__ == "__main__":
neuron = Neuron(3)
# Example inputs
inputs = np.array([1.5, 2.0, -1.0])
output = neuron.forward(inputs)
class Layer:
"""
Attributes:
"""
"""
Parameters:
"""
"""
Computes the forward pass for the layer.
Parameters:
Returns:
"""
return outputs
"""
Computes the backward pass and updates the neurons in the layer.
Parameters:
d_outputs (ndarray): The gradients of the loss with respect to the outputs of the layer.
Returns:
ndarray: The gradients of the loss with respect to the inputs to the layer.
"""
return d_inputs
# Example usage
if __name__ == "__main__":
layer_output = layer.forward(inputs)
import numpy as np
class NeuralNetwork:
"""
Attributes:
"""
def __init__(self):
"""
"""
self.layers = []
self.loss_list = []
"""
Adds a layer to the neural network.
Parameters:
input_size (int): The number of inputs to the new layer (or neurons).
"""
if not self.layers:
self.layers.append(Layer(num_neurons, input_size))
else:
previous_output_size = len(self.layers[-1].neurons)
self.layers.append(Layer(num_neurons, previous_output_size))
"""
Parameters:
Returns:
"""
inputs = layer.forward(inputs)
return inputs
"""
Parameters:
loss_gradient (ndarray): The gradient of the loss with respect to the output of the
network.
"""
"""
Parameters:
"""
loss = 0
for i in range(len(X)):
# Forward pass
output = self.forward(X[i])
self.backward(loss_gradient, learning_rate)
loss /= len(X)
self.loss_list.append(loss)
if epoch % 100 == 0:
"""
Parameters:
Returns:
"""
predictions = []
for i in range(len(X)):
predictions.append(self.forward(X[i]))
return np.array(predictions)
"""
Saves the neural network's layers, neurons, weights, and biases to a JSON file.
Parameters:
filename (str): The name of the file to save the model parameters.
"""
model_data = {
"layers": []
# Iterate through each layer and store its configuration and neurons
layer_data = {
"num_neurons": len(layer.neurons),
"neurons": layer.to_dict()
model_data["layers"].append(layer_data)
json.dump(model_data, f)
"""
Loads the neural network's layers, neurons, weights, and biases from a JSON file.
Parameters:
filename (str): The name of the file from which to load the model parameters.
"""
model_data = json.load(f)
self.layers = []
num_neurons = layer_data["num_neurons"]
input_size = layer_data["input_size"]
# Create a new layer with the specified number of neurons and input size
new_layer.from_dict(layer_data["neurons"])
self.layers.append(new_layer)
if __name__ == "__main__":
# Example of usage:
# Create a neural network with 3 input features, 1 hidden layers, and 1 output layer
nn = NeuralNetwork()
# Add layers: input size for first hidden layer is 3 (e.g., 3 input features)
nn.save("../models/model.json")
# Load the model (no need to train)
nn.load("../models/model.json")
predictions = nn.predict(X)
print(f"Predictions: {predictions}")