Write A Program For Imlementing Perceptron Learning Algorithm
Write A Program For Imlementing Perceptron Learning Algorithm
ALGORITHM
#include<stdlib.h>
#include<iostream>
#include<cmath>
#include<math.h>
#define learn_rate 0.3
#define MAX 20
using namespace std;
int main()
{
int x[MAX][MAX] , y[MAX] , in[MAX] , act_out[MAX] ,i ,j,m,n,iteration=0;
float w[MAX] , calc_out = 0.0 , diff_out = 1.0 , change_weight , threshold = 0.5;
for(i=0;i<n;i++)
{
cout<<"\nEnter training set\n";
for(j=0;j<m;j++)
{
cin>>x[i][j]; //Insertion of training sets
}
x[i][m] = 1;
if (error == 0.0)
{
continue;
if (y[i] == 1)
{
continue;
}
else
{
if (x[i][j] == 1)
{
i=-1;
}
}
else
{
if (y[i] == 0)
{
continue;
}
else
{
if (x[i][j] == 1)
{
i=-1;
}
}
}
cout<<"\n";
for(j=0;j<=m;j++)
{
cout<<"\tw"<<j+1<<"\t"<<w[j]; //final weights
}
cout<<"\nThreshold\t"<<threshold;
do{
cout<<"\nEnter Test set\n";
for(j=0;j<m;j++)
{
cin>>in[j];
}
calc_out=0;
for(j=0;j<m;j++)
{ //Test case insertion
calc_out = calc_out + (float)(in[j] * w[j]); //Actual output
}
if(calc_out >= threshold) //Checking for threshold value
{
cout<<"Output for this Test case is 1\n";
}
else
{
cout<<"Output for this Test case is 0\n";
}
cout<<"Press 1 for test again else 0 : ";
cin>>i;
}while(i==1);
return(0);
}
OUTPUT
#include<math.h>
#include<time.h>
#define eta 2.0
using namespace std;
int main()
int c,i,j,hidden_unit,no_bits,l,k,p,no_sample,b;
float input[10][10],t,target_out[10],weight_hidden[10]
[10],weight_out[10],change_weight_hidden[10][10],change_weight_out[10];
float actual_out_hidden[10],net_hidden[10],net_out,actual_out_out,net,in[10];
double error;
clock_t start,end;
cin>>hidden_unit;
cin>>no_bits;
cin>>no_sample;
for (i=0;i<no_sample;i++)
for (j=0;j<no_bits;j++)
{
cin>>input[i][j];
}
cout<<"Enter the corresponding output: ";
cin>>target_out[i];
input[i][no_bits]=1;
for (i=0;i<no_bits+1;i++)
for (j=0;j<hidden_unit;j++)
weight_hidden[i][j]=1;
for (i=0;i<hidden_unit+1;i++)
weight_out[i]=1;
}
start = clock();
error=1;
while(error>0.1)
{
error=0;
for(i=0;i<hidden_unit;i++)
{ net_hidden[i]=0;
for(k=0;k<no_bits+1;k++)
net_hidden[i]=net_hidden[i] + (input[p][k] *
weight_hidden[p][k]);
for(i=0;i<hidden_unit;i++)
actual_out_hidden[i]=1/(1+exp(-
net_hidden[i])); //calculation of the output of hidden layer
actual_out_hidden[hidden_unit]=1;
net_out=0;
for(k=0;k<hidden_unit+1;k++)
net_out=net_out +
(actual_out_hidden[k]*weight_out[k]);
}
actual_out_out=1/(1+exp(-net_out)); //claculation of
output of output layer neuron
error=error+(target_out[p]-actual_out_out)*(target_out[p]-
actual_out_out); //error calculation
for (i=0;i<hidden_unit+1;i++)
change_weight_out[i]=eta*(target_out[p]-
actual_out_out)*actual_out_out*(1-actual_out_out)*actual_out_hidden[i];
weight_out[i]=weight_out[i]+
change_weight_out[i];
for (j=0;j<hidden_unit;j++)
for (i=0;i<no_bits+1;i++)
change_weight_hidden[i]
[j]=eta*(target_out[p]-actual_out_out)*actual_out_out*(1-
actual_out_out)*actual_out_hidden[j]*(1-actual_out_hidden[j])*input[p]
[i]*weight_out[j];
weight_hidden[i]
[j]=weight_hidden[i][j]+ change_weight_hidden[i][j];
}
cout<<"\nGlobal Error="<<error;
end = clock();
cout<<"Learning Time"<<(end-start)/CLOCKS_PER_SEC<<"(in second)";
//FINAL WEIGHTS
cout<<"\n################# MODIFIED WEIGHT FOR HIDDEN LAYER
###################\n";
for (j=0;j<hidden_unit;j++)
for (i=0;i<no_bits+1;i++)
for (i=0;i<hidden_unit+1;i++)
{
cout<<"\nWeight ("<<i+1<<","<<"out) : "<<weight_out[i];
c=0;
while(c==0)
for(i=0;i<no_bits;i++)
cin>>in[i];
in[no_bits]=1;
for(i=0;i<hidden_unit;i++)
{ net_hidden[i]=0;
for(k=0;k<no_bits+1;k++)
net_hidden[i]=net_hidden[i] + (in[k] *
weight_hidden[i][k]);
}
for(i=0;i<hidden_unit;i++)
actual_out_hidden[i]=1/(1+exp(-net_hidden[i]));
actual_out_hidden[hidden_unit]=1;
net_out=0;
for(k=0;k<hidden_unit+1;k++)
net_out=net_out +
(actual_out_hidden[k]*weight_out[k]);
actual_out_out=1/(1+exp(-net_out));
cout<<"output= "<<actual_out_out;
// getch();
return(0);
}
WAP FOR COMPITATIVE LEARNING
#include<iostream>
#include<math.h>
#include<stdlib.h>
#define MAX 10
#define OUT 2
#define learning_rate 0.01
using namespace std;
int main()
{
int input[MAX]
[MAX],classify[MAX],active[MAX],i,j,k,n_samples,n_bits,winner,in_test[MAX],wish;
double weight[MAX][MAX],sum,mul,net[MAX],change_weight;
for(i=0;i<n_samples;i++)
{
active[i] = 0;
cout<<"\nEnter \t"<< i+1 <<"\tsample\t";
for(j=0;j<n_bits;j++)
{
cin>>input[i][j];
if(input[i][j] == 1)
{
active[i] = active[i] + 1;//determine no of active elements
}
}
}
for(j=0;j<OUT;j++)
{sum = 0;
for(k=0;k<n_bits;k++)
{
weight[j][k] = rand();//initialize weight randomly
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
sum = sum + weight[j][k];
cout<<"\nsum\t"<<sum;
}
for(k=0;k<n_bits;k++)
{
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
cout<<"\nsum\t"<<sum;
weight[j][k] = weight[j][k] / sum;//normalize weight
cout<<"\nsum\t"<<sum;
cout<<"\nWeight W("<<j+1<<","<<k+1<<")"<<weight[j][k];
}
}
for(i=0;i<n_samples;i++)
{
label1:
for(j=0;j<OUT;j++)
{
net[j] = 0;
for(k=0;k<n_bits;k++)
{
mul = input[i][k] * weight[j][k];
net[j] = net[j] + mul;//calculate net value
}
}
winner = 0;
for(j=1;j<OUT;j++)
{
if(net[j-1] < net[j])
{
winner = j;//identify winner
}
}//weight modification
for(j=0;j<n_bits;j++)
{
change_weight = (learning_rate * ((input[i][j] / active[i]) - weight[winner][j]));
if(change_weight > 0.0001)
{//stoping condition for learning
weight[winner][j] = weight[winner][j] + change_weight;
}
else{
goto label2;
}
}
goto label1;
label2:
classify[i] = winner;
}
for(i=0;i<n_samples;i++)
{
cout<<"\n"<<i+1<<"SAMPLE";
for(j=0;j<n_bits;j++)
{
cout<<"\t"<<input[i][j];
}
cout<<"\tbelongs to\t"<<classify[i]<<"\tclass";
}
}
for(j=0;j<OUT;j++)
{
net[j] = 0;
for(k=0;k<n_bits;k++)
{
mul = input[i][k] * weight[j][k];
net[j] = net[j] + mul;
}
}
winner = 0;
for(j=1;j<OUT;j++)
{
if(net[j-1] < net[j])
{
winner = j;
}
}
cout<<"\tbelongs to\t"<<winner<<"\tclass";
cout<<"press 1 to continue";
cin>>wish;
}while(wish == 1);
return(0);
}