0% found this document useful (0 votes)
89 views5 pages

REG NO. 18MIS7099 Machine Learning - Lab - 10 Name: Dana Vamsi Krishna

This document describes the implementation of an artificial neural network using backpropagation to classify seed types. It includes functions for loading and preprocessing a seed dataset, initializing a neural network, performing forward and backward propagation, updating weights, and training the network over multiple epochs. The network is evaluated using 5-fold cross validation on the seed dataset. The results show mean classification accuracy of 93.8% across the folds.

Uploaded by

ram vamsi
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
89 views5 pages

REG NO. 18MIS7099 Machine Learning - Lab - 10 Name: Dana Vamsi Krishna

This document describes the implementation of an artificial neural network using backpropagation to classify seed types. It includes functions for loading and preprocessing a seed dataset, initializing a neural network, performing forward and backward propagation, updating weights, and training the network over multiple epochs. The network is evaluated using 5-fold cross validation on the seed dataset. The results show mean classification accuracy of 93.8% across the folds.

Uploaded by

ram vamsi
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 5

10/2/21, 7:24 PM 18MIS7099_LAB10_ML - Colaboratory

NAME : DANA VAMSI KRISHNA

REG NO. 18MIS7099

MACHINE LEARNING - LAB - 10

Implement an Artificial Neural Network (Use Backpropagation algorithm)


and demonstrate the results with appropriate data sets.

1 # Backprop on the Seeds Dataset
2 from·random·import·seed
3 from·random·import·randrange
4 from·random·import·random
5 from csv import reader
6 from math import exp

1 # Load a CSV file
2 def load_csv(filename):
3   dataset = list()
4   with open(filename, 'r') as file:
5     csv_reader = reader(file)
6     for row in csv_reader:
7       if not row:
8         continue
9       dataset.append(row)
10   return dataset

1 # Convert string column to float
2 def str_column_to_float(dataset, column):
3   for row in dataset:
4     row[column] = float(row[column].strip())

1 # Convert string column to integer
2 def str_column_to_int(dataset, column):
3   class_values = [row[column] for row in dataset]
4   unique = set(class_values)
5   lookup = dict()
6   for i, value in enumerate(unique):
7     lookup[value] = i
8   for row in dataset:
9     row[column] = lookup[row[column]]
10   return lookup

1 # Find the min and max values for each column
2 def dataset_minmax(dataset):
3   minmax = list()
4 stats = [[min(column), max(column)] for column in zip(*dataset)]
https://colab.research.google.com/drive/1EC8yvXh7JhspWt0X7YwpxJdFAZqjUhq7?authuser=1#scrollTo=Hhxh8hQsR2BQ&printMode=true 1/5
10/2/21, 7:24 PM 18MIS7099_LAB10_ML - Colaboratory
4   stats   [[min(column), max(column)] for column in zip( dataset)]
5   return stats

1 # Rescale dataset columns to the range 0-1
2 def normalize_dataset(dataset, minmax):
3   for row in dataset:
4     for i in range(len(row)-1):
5       row[i] = (row[i] - minmax[i][0]) / (minmax[i][1] - minmax[i][0])

1 # Split a dataset into k folds
2 def cross_validation_split(dataset, n_folds):
3   dataset_split = list()
4   dataset_copy = list(dataset)
5   fold_size = int(len(dataset) / n_folds)
6   for i in range(n_folds):
7     fold = list()
8     while len(fold) < fold_size:
9       index = randrange(len(dataset_copy))
10       fold.append(dataset_copy.pop(index))
11     dataset_split.append(fold)
12   return dataset_split

1 # Calculate accuracy percentage
2 def accuracy_metric(actual, predicted):
3   correct = 0
4   for i in range(len(actual)):
5     if actual[i] == predicted[i]:
6       correct += 1
7   return correct / float(len(actual)) * 100.0

1 # Evaluate an algorithm using a cross validation split
2 def evaluate_algorithm(dataset, algorithm, n_folds, *args):
3   folds = cross_validation_split(dataset, n_folds)
4   scores = list()
5   for fold in folds:
6     train_set = list(folds)
7     train_set.remove(fold)
8     train_set = sum(train_set, [])
9     test_set = list()
10     for row in fold:
11       row_copy = list(row)
12       test_set.append(row_copy)
13       row_copy[-1] = None
14     predicted = algorithm(train_set, test_set, *args)
15     actual = [row[-1] for row in fold]
16     accuracy = accuracy_metric(actual, predicted)
17     scores.append(accuracy)
18   return scores

1 # Calculate neuron activation for an input
2 def activate(weights, inputs):
3   activation = weights[-1]
f i i (l ( i ht ) )
https://colab.research.google.com/drive/1EC8yvXh7JhspWt0X7YwpxJdFAZqjUhq7?authuser=1#scrollTo=Hhxh8hQsR2BQ&printMode=true 2/5
10/2/21, 7:24 PM 18MIS7099_LAB10_ML - Colaboratory
4   for i in range(len(weights)-1):
5     activation += weights[i] * inputs[i]
6   return activation

1 # Transfer neuron activation
2 def transfer(activation):
3   return 1.0 / (1.0 + exp(-activation))

1 # Forward propagate input to a network output
2 def forward_propagate(network, row):
3   inputs = row
4   for layer in network:
5     new_inputs = []
6     for neuron in layer:
7       activation = activate(neuron['weights'], inputs)
8       neuron['output'] = transfer(activation)
9       new_inputs.append(neuron['output'])
10     inputs = new_inputs
11   return inputs

1 # Calculate the derivative of an neuron output
2 def transfer_derivative(output):
3   return output * (1.0 - output)
4  

1 # Backpropagate error and store in neurons
2 def backward_propagate_error(network, expected):
3   for i in reversed(range(len(network))):
4     layer = network[i]
5     errors = list()
6     if i != len(network)-1:
7       for j in range(len(layer)):
8         error = 0.0
9         for neuron in network[i + 1]:
10           error += (neuron['weights'][j] * neuron['delta'])
11         errors.append(error)
12     else:
13       for j in range(len(layer)):
14         neuron = layer[j]
15         errors.append(expected[j] - neuron['output'])
16     for j in range(len(layer)):
17       neuron = layer[j]
18       neuron['delta'] = errors[j] * transfer_derivative(neuron['output'])

1 # Update network weights with error
2 def update_weights(network, row, l_rate):
3   for i in range(len(network)):
4     inputs = row[:-1]
5     if i != 0:
6       inputs = [neuron['output'] for neuron in network[i - 1]]
7     for neuron in network[i]:
8       for j in range(len(inputs)):
https://colab.research.google.com/drive/1EC8yvXh7JhspWt0X7YwpxJdFAZqjUhq7?authuser=1#scrollTo=Hhxh8hQsR2BQ&printMode=true 3/5
10/2/21, 7:24 PM 18MIS7099_LAB10_ML - Colaboratory

9         neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j]
10       neuron['weights'][-1] += l_rate * neuron['delta']

1 # Train a network for a fixed number of epochs
2 def train_network(network, train, l_rate, n_epoch, n_outputs):
3   for epoch in range(n_epoch):
4     for row in train:
5       outputs = forward_propagate(network, row)
6       expected = [0 for i in range(n_outputs)]
7       expected[row[-1]] = 1
8       backward_propagate_error(network, expected)
9       update_weights(network, row, l_rate)

1 # Initialize a network
2 def initialize_network(n_inputs, n_hidden, n_outputs):
3   network = list()
4   hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range
5   network.append(hidden_layer)
6   output_layer = [{'weights':[random() for i in range(n_hidden + 1)]} for i in range
7   network.append(output_layer)
8   return network
9  

1 # Make a prediction with a network
2 def predict(network, row):
3   outputs = forward_propagate(network, row)
4   return outputs.index(max(outputs))

1 # Backpropagation Algorithm With Stochastic Gradient Descent
2 def back_propagation(train, test, l_rate, n_epoch, n_hidden):
3   n_inputs = len(train[0]) - 1
4   n_outputs = len(set([row[-1] for row in train]))
5   network = initialize_network(n_inputs, n_hidden, n_outputs)
6   train_network(network, train, l_rate, n_epoch, n_outputs)
7   predictions = list()
8   for row in test:
9     prediction = predict(network, row)
10     predictions.append(prediction)
11   return(predictions)

1 # Test Backprop on Seeds dataset
2 seed(1)
3 # load and prepare data
4 filename = '/content/seeds.csv'
5 dataset = load_csv(filename)
6 for i in range(len(dataset[0])-1):
7   str_column_to_float(dataset, i)
8 # convert class column to integers
9 str_column_to_int(dataset, len(dataset[0])-1)
10 # normalize input variables
11 minmax = dataset_minmax(dataset)
12 normalize dataset(dataset minmax)
https://colab.research.google.com/drive/1EC8yvXh7JhspWt0X7YwpxJdFAZqjUhq7?authuser=1#scrollTo=Hhxh8hQsR2BQ&printMode=true 4/5
10/2/21, 7:24 PM 18MIS7099_LAB10_ML - Colaboratory
12 normalize_dataset(dataset, minmax)
13 # evaluate algorithm
14 n_folds = 5
15 l_rate = 0.3
16 n_epoch = 500
17 n_hidden = 5
18 scores = evaluate_algorithm(dataset, back_propagation, n_folds, l_rate, n_epoch, n_
19 print('Scores: %s' % scores)
20 print('Mean Accuracy: %.3f%%' % (sum(scores)/float(len(scores))))

Scores: [95.23809523809523, 92.85714285714286, 97.61904761904762, 92.857142857142


Mean Accuracy: 93.810%

check 15s completed at 7:22 PM

https://colab.research.google.com/drive/1EC8yvXh7JhspWt0X7YwpxJdFAZqjUhq7?authuser=1#scrollTo=Hhxh8hQsR2BQ&printMode=true 5/5

You might also like