100% found this document useful (1 vote)
2K views

Write A Program For Imlementing Perceptron Learning Algorithm

The program implements the backpropagation algorithm for neural networks. It takes input values and target outputs for training samples, initializes weights, then runs an iterative learning process to minimize error by adjusting the weights between the input, hidden and output layers. It outputs the final learned weights and allows testing by inputting new values to calculate network output.

Uploaded by

Hemant Panwar
Copyright
© Attribution Non-Commercial (BY-NC)
Available Formats
Download as DOC, PDF, TXT or read online on Scribd
100% found this document useful (1 vote)
2K views

Write A Program For Imlementing Perceptron Learning Algorithm

The program implements the backpropagation algorithm for neural networks. It takes input values and target outputs for training samples, initializes weights, then runs an iterative learning process to minimize error by adjusting the weights between the input, hidden and output layers. It outputs the final learned weights and allows testing by inputting new values to calculate network output.

Uploaded by

Hemant Panwar
Copyright
© Attribution Non-Commercial (BY-NC)
Available Formats
Download as DOC, PDF, TXT or read online on Scribd
You are on page 1/ 17

WRITE A PROGRAM FOR IMLEMENTING PERCEPTRON LEARNING

ALGORITHM

#include<stdlib.h>
#include<iostream>
#include<cmath>
#include<math.h>
#define learn_rate 0.3
#define MAX 20
using namespace std;

int main()
{
int x[MAX][MAX] , y[MAX] , in[MAX] , act_out[MAX] ,i ,j,m,n,iteration=0;
float w[MAX] , calc_out = 0.0 , diff_out = 1.0 , change_weight , threshold = 0.5;

cout<<"\nEnter no. of bits for input : ";


cin>>m; //no. of bits of input

cout<<"\nEnter no. of training sets you want to learn : ";


cin>>n; //no. of training sets

for(i=0;i<n;i++)
{
cout<<"\nEnter training set\n";
for(j=0;j<m;j++)
{
cin>>x[i][j]; //Insertion of training sets
}
x[i][m] = 1;

cout<<"\nEnter Desired output : ";


cin>>y[i]; //Insertion of desired output
}
for(j=0;j<=m;j++)
{
w[j] = 0.3;
cout<<"\tw"<<j+1<<"\t"<<w[j]; //Initialization of weights
}

for (int i = 0; i < n; i++)


{ calc_out = 0.0;
float error = 0.0;
// Calculate output.
for(j=0;j< m;j++)
{
calc_out = calc_out + (x[i][j] * w[j]); //NET = x1*w1 + x2*w2 +........+
xn*wn
}
error = (float) ( 0.5 * pow(threshold - calc_out, 2));
cout<<"\nError"<<error;

if (error == 0.0)
{

continue;

if (calc_out >= threshold)


{

if (y[i] == 1)
{

continue;

}
else
{

threshold = threshold - w[m];

w[m] = (w[m] - learn_rate);

for (int j = 0; j < m; j++)


{

if (x[i][j] == 1)
{

w[j] = (w[j] - learn_rate);

i=-1;

}
}
else
{

if (y[i] == 0)
{

continue;

}
else
{

threshold = threshold - w[m];

w[m] = (w[m] + learn_rate);

for (int j = 0; j < m; j++)


{

if (x[i][j] == 1)
{

w[j] = (w[j] + learn_rate);

i=-1;

}
}
}
cout<<"\n";
for(j=0;j<=m;j++)
{
cout<<"\tw"<<j+1<<"\t"<<w[j]; //final weights
}
cout<<"\nThreshold\t"<<threshold;

do{
cout<<"\nEnter Test set\n";
for(j=0;j<m;j++)
{
cin>>in[j];
}
calc_out=0;
for(j=0;j<m;j++)
{ //Test case insertion
calc_out = calc_out + (float)(in[j] * w[j]); //Actual output
}
if(calc_out >= threshold) //Checking for threshold value
{
cout<<"Output for this Test case is 1\n";
}
else
{
cout<<"Output for this Test case is 0\n";
}
cout<<"Press 1 for test again else 0 : ";
cin>>i;
}while(i==1);
return(0);
}

OUTPUT

hemant@hemant-laptop:~/h$ g++ perceptron1.cc


hemant@hemant-laptop:~/h$ ./a.out

Enter no. of bits for input : 3

Enter no. of training sets you want to learn : 5

Enter training set


1
1
1

Enter Desired output : 1

Enter training set


0
1
1

Enter Desired output : 0

Enter training set


1
0
1

Enter Desired output : 0

Enter training set


1
1
0

Enter Desired output : 0

Enter training set


1
0
0

Enter Desired output : 0


w1 0.3 w2 0.3 w3 0.3 w4 0.3
Error0.08
Error0.005
Error0.005
Error0.02
Error0.005
Error0.125
Error0.00500001
Error0.02
Error0.02
Error0.00500001
Error0.125
Error0.00500001
Error0.02
Error0.02
Error0.02
Error0.125 w1 0.3 w2 0.3 w3 0.3 w4 1.78814e-08
Threshold 0.8
Enter Test set
1
1
1
Output for this Test case is 1
Press 1 for test again else 0 : 1

Enter Test set


0
1
0
Output for this Test case is 0
Press 1 for test again else 0 : 1

Enter Test set


0
0
1
Output for this Test case is 0
Press 1 for test again else 0 : 0

B1: Write a program for back propagation algorithm.


#include<iostream>

#include<math.h>
#include<time.h>
#define eta 2.0
using namespace std;

int main()

int c,i,j,hidden_unit,no_bits,l,k,p,no_sample,b;

float input[10][10],t,target_out[10],weight_hidden[10]
[10],weight_out[10],change_weight_hidden[10][10],change_weight_out[10];

float actual_out_hidden[10],net_hidden[10],net_out,actual_out_out,net,in[10];

double error;
clock_t start,end;

cout<<"\nEnter the no of neuron in hidden layer layer: ";

cin>>hidden_unit;

cout<<"\nEnter the no of bits of input: ";

cin>>no_bits;

cout<<"\nEnter the no of sample: ";

cin>>no_sample;

//LOOP FOR ENTERING INPUT SAMPLE FROM THE USER

for (i=0;i<no_sample;i++)

cout<<"\nEnter the "<<i+1<<" sample: ";

for (j=0;j<no_bits;j++)

{
cin>>input[i][j];

}
cout<<"Enter the corresponding output: ";
cin>>target_out[i];

input[i][no_bits]=1;

for (i=0;i<no_bits+1;i++)

for (j=0;j<hidden_unit;j++)

weight_hidden[i][j]=1;

for (i=0;i<hidden_unit+1;i++)

weight_out[i]=1;

}
start = clock();

cout<<"\n#################### Learning Phase


########################\n";

error=1;

while(error>0.1)

{
error=0;

for(p=0;p<no_sample;p++) //loop for no of sample


{

for(i=0;i<hidden_unit;i++)

{ net_hidden[i]=0;

for(k=0;k<no_bits+1;k++)

net_hidden[i]=net_hidden[i] + (input[p][k] *
weight_hidden[p][k]);

for(i=0;i<hidden_unit;i++)

actual_out_hidden[i]=1/(1+exp(-
net_hidden[i])); //calculation of the output of hidden layer

actual_out_hidden[hidden_unit]=1;

net_out=0;

for(k=0;k<hidden_unit+1;k++)

net_out=net_out +
(actual_out_hidden[k]*weight_out[k]);
}

actual_out_out=1/(1+exp(-net_out)); //claculation of
output of output layer neuron

error=error+(target_out[p]-actual_out_out)*(target_out[p]-
actual_out_out); //error calculation

//change in weight between the hidden layer and


output layer

for (i=0;i<hidden_unit+1;i++)

change_weight_out[i]=eta*(target_out[p]-
actual_out_out)*actual_out_out*(1-actual_out_out)*actual_out_hidden[i];

weight_out[i]=weight_out[i]+
change_weight_out[i];

//change in weight between the hidden layer and


input layer

for (j=0;j<hidden_unit;j++)

for (i=0;i<no_bits+1;i++)

change_weight_hidden[i]
[j]=eta*(target_out[p]-actual_out_out)*actual_out_out*(1-
actual_out_out)*actual_out_hidden[j]*(1-actual_out_hidden[j])*input[p]
[i]*weight_out[j];

weight_hidden[i]
[j]=weight_hidden[i][j]+ change_weight_hidden[i][j];
}

}//end of sample loop

cout<<"\nGlobal Error="<<error;

} //end of while loop

end = clock();
cout<<"Learning Time"<<(end-start)/CLOCKS_PER_SEC<<"(in second)";

//FINAL WEIGHTS
cout<<"\n################# MODIFIED WEIGHT FOR HIDDEN LAYER
###################\n";

for (j=0;j<hidden_unit;j++)

for (i=0;i<no_bits+1;i++)

cout<<"\nWeight ("<<i+1<<","<<j+1<<") : "<<weight_hidden[i][j];

cout<<"\n############## MODIFIED WEIGHT FOR OUTPUT LAYER


################\n";

for (i=0;i<hidden_unit+1;i++)

{
cout<<"\nWeight ("<<i+1<<","<<"out) : "<<weight_out[i];

/*USED FOR CALCULATING OUTPUT FOR THE PARTICULAR


SAMPLE(TESTING)*/

cout<<"\n########################### TESTING PHASE


################################\n";

c=0;

while(c==0)

cout<<"\nEnter the input for testing ";

for(i=0;i<no_bits;i++)

cin>>in[i];

in[no_bits]=1;

for(i=0;i<hidden_unit;i++)

{ net_hidden[i]=0;

for(k=0;k<no_bits+1;k++)

net_hidden[i]=net_hidden[i] + (in[k] *
weight_hidden[i][k]);
}

for(i=0;i<hidden_unit;i++)

actual_out_hidden[i]=1/(1+exp(-net_hidden[i]));

actual_out_hidden[hidden_unit]=1;

net_out=0;

for(k=0;k<hidden_unit+1;k++)

net_out=net_out +
(actual_out_hidden[k]*weight_out[k]);

actual_out_out=1/(1+exp(-net_out));

cout<<"output= "<<actual_out_out;

// getch();
return(0);

}
WAP FOR COMPITATIVE LEARNING

#include<iostream>
#include<math.h>
#include<stdlib.h>
#define MAX 10
#define OUT 2
#define learning_rate 0.01
using namespace std;

int main()
{
int input[MAX]
[MAX],classify[MAX],active[MAX],i,j,k,n_samples,n_bits,winner,in_test[MAX],wish;
double weight[MAX][MAX],sum,mul,net[MAX],change_weight;

cout<<"\n######### TRAINING PHASE ##########\n";


cout<<"Enter no. of input samples\t";
cin>>n_samples;

cout<<"\nEnter no. of bits in input\t";


cin>>n_bits;

cout<<"\nEnter input samples to classify\t";

for(i=0;i<n_samples;i++)
{
active[i] = 0;
cout<<"\nEnter \t"<< i+1 <<"\tsample\t";
for(j=0;j<n_bits;j++)
{
cin>>input[i][j];
if(input[i][j] == 1)
{
active[i] = active[i] + 1;//determine no of active elements
}
}
}
for(j=0;j<OUT;j++)
{sum = 0;
for(k=0;k<n_bits;k++)
{
weight[j][k] = rand();//initialize weight randomly
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
sum = sum + weight[j][k];
cout<<"\nsum\t"<<sum;
}
for(k=0;k<n_bits;k++)
{
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
cout<<"\nsum\t"<<sum;
weight[j][k] = weight[j][k] / sum;//normalize weight
cout<<"\nsum\t"<<sum;
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
}
}

for(i=0;i<n_samples;i++)
{
label1:
for(j=0;j<OUT;j++)
{
net[j] = 0;
for(k=0;k<n_bits;k++)
{
mul = input[i][k] * weight[j][k];
net[j] = net[j] + mul;//calculate net value
}
}
winner = 0;
for(j=1;j<OUT;j++)
{
if(net[j-1] < net[j])
{
winner = j;//identify winner
}
}//weight modification
for(j=0;j<n_bits;j++)
{
change_weight = (learning_rate * ((input[i][j] / active[i]) - weight[winner][j]));
if(change_weight > 0.0001)
{//stoping condition for learning
weight[winner][j] = weight[winner][j] + change_weight;
}
else{
goto label2;
}
}
goto label1;
label2:
classify[i] = winner;
}

for(i=0;i<n_samples;i++)
{
cout<<"\n"<<i+1<<"SAMPLE";
for(j=0;j<n_bits;j++)
{
cout<<"\t"<<input[i][j];
}
cout<<"\tbelongs to\t"<<classify[i]<<"\tclass";
}

cout<<"\n########### TESTING PHASE ##########\n";


do
{
cout<<"\nEnter testing data\n";
for(i=0;i<n_bits;i++)
{
cin>>in_test[i];

}
for(j=0;j<OUT;j++)
{
net[j] = 0;
for(k=0;k<n_bits;k++)
{
mul = input[i][k] * weight[j][k];
net[j] = net[j] + mul;
}
}
winner = 0;
for(j=1;j<OUT;j++)
{
if(net[j-1] < net[j])
{
winner = j;
}
}
cout<<"\tbelongs to\t"<<winner<<"\tclass";
cout<<"press 1 to continue";
cin>>wish;
}while(wish == 1);
return(0);
}

You might also like