Vous êtes sur la page 1sur 30

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.1
OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK USING THRESHOLD FUNCTION.
PROGRAM:

# include<conio.h> #inlude<iostream.h> void main() { clrscr(); int x1,x2,x3,x4,w1,w2,w3,w4,t,net; t=50; x1=2; x2=1; x3=3; x4=2; cout<<enter weights\n; cout<<enter value of w1\n; cin>>w1; cout<<enter value of w2\n; cin>>w2; cout<<enter value of w3\n; cin>>w3;
Page | 1

0201CS081076

JABALPUR ENGINEERING COLLEGE

cout<<enter value of w4\n; cin>>w4; net=x1*w1+x2*w2+x3*w3+x4*w4; cout<<the value of net is=<<net<<\n; if(net<=t) { cout<<value of threshold is 0; } else { cout <<value of threshold is 1; } getch(); }

Page | 2

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT enter weights: enter value of w1: 12 enter value of w2: 9 enter value of w3: 6 enter value of w4: 5 the value of net is= 61 value of threshold is :1

Page | 3

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.2
OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK USING SIGMOIDAL FUNCTION.
PROGRAM:

# include<conio.h> #inlude<iostream.h> #inlude<graphics.h> #include<math.h> #include<process.h> void main() { clrscr(); int gdriver=DETECT,gmode,errorcode; initgraph(&gdriver,&gmode,); errorcode=graphresult(); if(errorode!=grOk) { cout<<graphics error\n<<grapherrormsg(errorcode); cout<<press any key to halt:; { sig[i]=1/(1+exp(-net[i])); cout<<sigmoidal<<i<<=<<sig*i+<<endl;
Page | 4

0201CS081076

JABALPUR ENGINEERING COLLEGE

} clearviewport(); int xmax,ymax; xmax=getmaxx(); ymax=getmaxy(); line(0,0,0,ymax); line(xmax,ymax,0,ymax); lineto(0,ymax); for(i=1;i<=n;i++) { lineto(net[i],sig[i]); } getch(); }

Page | 5

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT

Page | 6

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.3
OBJECT: WRITE A PROGRAM TO IMPLEMENT ARTIFICIAL NEURON NETWORK USING HYPERBOLIC TANGENT FUNCTION.
PROGRAM:

# include<conio.h> #inlude<iostream.h> #include<graphics.h> #include<math.h> #include<process.h> void main() { clrscr(); int i,a[10],th; float w[10],net,out; char ch; cout<<enter no of input vectors<<endl; cin>>I; for(int j=0;j<I;j++) { cout<<enter value of input vectors (must be 0 or 1)<<endl; cin>>a[j]; cout<<enter value of weight of input vectors (must be between 0 or 1)<<endl;
Page | 7

0201CS081076

JABALPUR ENGINEERING COLLEGE

cin>>w[j]; } for(j=0;j<I;j++) { net=net+a[j]*w[j]; } out=tanh(net); cout<<output value =<<out<<endl; cout<<press G to view the graph<<endl; cin>>ch; if(ch==gII ch==G) { int gd,gm; gd=DETECT; initgraph(&gd,&gm,c:\\TC\\BGI\\); float x,y; int axisx=getmaxx()/2; int axisy=getmaxy()/2; line(0+axisx,0,0+axisx,axisy*2); line(0,axisy,axisx*2,axisy); for(x=-500;x<getmaxx();x++)

Page | 8

0201CS081076

JABALPUR ENGINEERING COLLEGE

{ y=tanh(x*(3.14)/180); putpixel((axisx+x),axisy-(100*y)-10,6); } } getch(); }

Page | 9

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT

enter no of input vectors enter value of input vectors (must be 0 or 1) 1 enter value of weight of input vectors (must be between 0 or 1) .7 enter value of input vectors (must be 0 or 1) 0 enter value of weight of input vectors (must be between 0 or 1) .4 output value =0.905148 press G to view the graph

Page | 10

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.4
OBJECT: WRITE A PROGRAM TO IMPLEMENT ADALINE NETWORK.
PROGRAM:

# include<conio.h> #inlude<iostream.h> #int adaline(); void main() { int n,s=1,t[5],i; clrscr(); cout<<enter no of elementary networksendl; cin>>n; for(i=0;i<n;i++) { t[i]=adaline(); } for(i=0;i<n;i++) { cout<<t[i]<<endl; s=s*t[i]; } cout<<s;
Page | 11

0201CS081076

JABALPUR ENGINEERING COLLEGE

getch(); } int adaline() { int i,a[10],th,x=0; float w[10],net=0; cout<<enter no of input vetors <<endl; cin>>i; for(int j=0;j<i;j++) { cout<<enter value of input vetors <<endl; cin>>a[i]; cout<<enter value of weight of input vector (must be between 0 and 1)<<endl; cin>>w[i]; } cout<<enter threshold value:<<endl; cin>>th; for(j=0;j<i;j++) { net=net+a[i]*w[i]; } if(net>=th)
Page | 12

0201CS081076

JABALPUR ENGINEERING COLLEGE

{ cout <<output is:1 <<endl; x=1; } else { cout<<output is:0<<endl; x=0; } return(x); }

Page | 13

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT enter no of elementary networks 2 enter no of input vetors 2 enter value of input vetors 1 enter value of weight of input vector (must be between 0 and 1) .5 enter value of input vetors 1 enter value of weight of input vector (must be between 0 and 1) .8 enter threshold value:1 output is:1 enter no of input vetors 2 enter value of input vetors 1 enter value of weight of input vector (must be between 0 and 1) .7 enter value of input vetors 0 enter value of weight of input vector (must be between 0 and 1) .5 enter threshold value:1 output is:0

Page | 14

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.5
OBJECT: WRITE A PROGRAM TO IMPLEMENT MADALINE NETWORK.
PROGRAM:

# include<conio.h> #inlude<iostream.h> #int madaline(); void main() { int n,s=1,t[5],i; clrscr(); cout<<enter no of elementary networksendl; cin>>n; for(i=0;i<n;i++) { t[i]=madaline(); } for(i=0;i<n;i++) { cout<<t[i]<<endl; s=s*t[i]; } cout<<s;
Page | 15

0201CS081076

JABALPUR ENGINEERING COLLEGE

getch(); } int madaline() { int i,a[10],th,x=0; float w[10],net=0; cout<<enter no of input vetors <<endl; cin>>i; for(int j=0;j<i;j++) { cout<<enter value of input vetors <<endl; cin>>a[i]; cout<<enter value of weight of input vector (must be between 0 and 1)<<endl; cin>>w[i]; } cout<<enter threshold value:<<endl; cin>>th; for(j=0;j<i;j++) { net=net+a[i]*w[i]; } if(net>=th)
Page | 16

0201CS081076

JABALPUR ENGINEERING COLLEGE

{ cout <<output is:1 <<endl; x=1; } else { cout<<output is:0<<endl; x=0; } return(x); }

Page | 17

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.6
OBJECT: WRITE A PROGRAM TO IMPLEMENT TRAIN BACK PROPAGATION ALGORITHM.
PROGRAM:

function [layer1,layer2] = nn_train(data, output) % data(N,:) is training data set N % output(N,:) is the corresponding output input_count=2; layer1_count=2; output_count=2; layer1=rand(2,2); layer2= rand(2,2); learn = 0.1; [n,m] = size(data); for data_set=1:n [out,inputs1,inputs2,outs1,outs2] = nn_eval(transpose(data(data_set,:)), layer1, layer2, 1.0);

% compute deltas for each layer delta2 = zeros(output_count); delta1 = zeros(layer1_count); for i=1:output_count % delta2 is (desired - actual) * actual * (1 - actual) delta2(i) = (output(data_set,i) - out(i,1)) * out(i,1) * (1 - out(i,1)); end
Page | 18

0201CS081076

JABALPUR ENGINEERING COLLEGE

for i=1:layer1_count d_sum = 0; % Sum up the previous deltas*inpus for j=1:output_count d_sum = d_sum - delta2(j)*inputs2(i,j); end % delta1 is output * (1 - output) * sum above delta1(i) = outs2(i,1) * (1 - outs2(i,1)) * d_sum; end % second layer weights [p,q] = size(inputs2); for k=1:output_count for j=1:p % Adjust the weights by -learning constant * delta * input layer2(j,k) = layer2(j,k) + -learn * delta2(k) * inputs2(j,k); end end % first layer weights [p,q] = size(inputs1); for k=1:q for j=1:p % Adjust the weights by -learning constant * output * (1 - output) * delta * input layer1(j,k) = layer1(j,k) + -learn * outs2(k,1) * (1 - outs2(k,1)) * delta1(k) * inputs1(j,k); end end end
Page | 19

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT data = 1.0000 0.9000 1.4000 1.2000 1.0000 1.0000 1.0000 1.0000 output = 0.1000 0.0900 0.1400 0.1200 0.1000 0.1000 0.1000 0.1000 ans = 0.5248 0.5714 0.1000 0.0900 0.1400 0.1200 0.1000 0.1000 0.1000 0.1000 1.0000 0.9000 1.4000 1.2000 1.0000 1.0000 1.0000 1.0000

Page | 20

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.7
OBJECT: WRITE A PROGRAM TO IMPLEMENT TRAIN COUNTER PROPAGATION.
PROGRAM:

clear; %set initial weights v=[0.6 0.2;0.6 0.2;0.2 0.6; 0.2 0.6]; w=[0.4 0.3;0.4 0.3]; x=[0 1 1 0]; y=[1 0]; alpha=0.3; for j=1:2 D(j)=0; for i=1:4 D(j)=D(j)+(x(i)-v(i,j))^2; end for k=1:2 D(j)=D(j)+(y(k)-w(k,j))^2; end end for j=1:2 if D(j)==min(D) J=j;
Page | 21

0201CS081076

JABALPUR ENGINEERING COLLEGE

end end disp('After one step the weight matrix are'); v(:,J)=v(:,J)+alpha*(x'-v(:,J)) w(:,J)=w(:,J)+alpha*(y'-w(:,J))

Page | 22

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT After one step the weight matrix are v = 0.4200 0.2000 0.7200 0.2000 0.4400 0.6000 0.1400 0.6000 w= 0.5800 0.3000 0.2800 0.3000

Page | 23

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.8
OBJECT: WRITE A PROGRAM TO IMPLEMENT ART 1 ALGORITHM.
PROGRAM:

clc; clear; b=[0.57 0.0 0.3;0.0 0.0 0.3;0.0 0.57 0.3;0.0 0.47 0.3]; t=[1 1 0 0;1 0 0 1;1 1 1 1]; vp=0.4; L=2; x=[1 0 1 1]; s=x; ns=sum(s); y=x*b; con=1; while con for i=1:3 if y(i)==max(y) J=i; end end x=s.*t(J,:); nx=sum(x);
Page | 24

0201CS081076

JABALPUR ENGINEERING COLLEGE

if nx/ns >= vp b(:,J)=L*x(:)/(L-1+nx); t(J,:)=x(1,:); con=0; else y(J)=-1; con=1; end if y+1==0 con=0; end end disp('Top Down Weights'); disp(t); disp('Bottom up Weights'); disp(b);

Page | 25

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT Top-down Weights 1 1 1 1 0 1 0 0 1 0 1 1

Bottom-up Weights 0.5700 0.6667 0.3000 0 0 0 0 0 0.3000 0.3000

0.6667 0.3000

Page | 26

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.9
OBJECT: WRITE A PROGRAM FOR DELTA LEARNING ALGORITHM.
PROGRAM:

% --------------------% Main Program % --------------------% Load data load housing.txt X = housing(:,1:13); t = housing(:,14); % Scale to zero mean, unit variance and introduce bias on input. xmean = mean(X); xstd = std(X); X = (X-ones(size(X,1),1)*xmean)./(ones(size(X,1),1)*xstd); X = [ones(size(X,1),1) X]; tmean = mean(t); tstd = std(t); t = (t-tmean)/tstd; % Iterate over a number of hidden nodes maxHidden = 2; for numHidden=1:maxHidden % Initialise random weight vector. % Wh are hidden weights, wo are output weights. randn(seed, 123456); Wh = 0.1*randn(size(X,2),numHidden);
Page | 27

0201CS081076

JABALPUR ENGINEERING COLLEGE

wo = 0.1*randn(numHidden+1,1); numPatterns = size(X,1); eta = 0.05/numPatterns; for i=1:numEpochs % Calculate outputs, errors, and gradients. phi = [ones(size(X,1),1) tanh(X*Wh)]; y = phi*wo; err = y-t; go = phi*err; Gh = X*((1-phi(:,2:numHidden+1).2).* (err*wo(2:numHidden+1))); % Perform gradient descent. wo = wo - eta*go; Wh = Wh - eta*Gh; % Update performance statistics. mse(i) = var(err); end plot(1:numEpochs, mse, -) hold on end fsize=15; set(gca,xtick,*0:500:2000+,FontSize,fsize) set(gca,ytick,*0:0.5:1+,FontSize,fsize) xlabel(Number of Epochs,FontSize,fsize) ylabel(Mean Squared Error,FontSize,fsize) hold off % --------------------% End of Main % --------------------Page | 28

0201CS081076

JABALPUR ENGINEERING COLLEGE

OUTPUT

Page | 29

0201CS081076

JABALPUR ENGINEERING COLLEGE

PROGRAM NO.10
OBJECT: WRITE A PROGRAM FOR HEBBIAN LEARNING ALGORITHM.
PROGRAM:

#include <iostream> using namespace std; int main() { int x[4][3],w[3],y[4][1],i,j,p; for(i=0;i<=2;++i) w[i]=0; for(p=1;p<=4;++p) x[p][0]=1; for(p=1;p<=4;++p) { cout<<"Enter the input Pattern number "<<p<<" : \n"; for(i=1;i<=2;++i) cin>>x[p][i]; cout<<"Enter the corresponding output \n"; for(j=1;j<=1;++j) cin>>y[p][j]; for(i=0;i<=2;++i) for(j=1;j<=1;++j){ w[i]+=x[p][i]*y[p][j]; cout<<endl; } } cout<<"The final weights = [ "<<w[0]<<" "<<w[1]<<" "<<w[2]<<" ]"<<endl; clrscr(); return 0; }

Page | 30

Vous aimerez peut-être aussi