Code not running and showing busy
1 Ansicht (letzte 30 Tage)
Ältere Kommentare anzeigen
Rajdeep Ghosh
am 3 Aug. 2020
Kommentiert: Rajdeep Ghosh
am 8 Aug. 2020
Dear programmers
I have written a piece of code. Please check after providing convergence criteria with while loop the code is not running and only showing as busy. Please help.
Regards
Rajdeep
input= xlsread('Input');
output = xlsread('Output');
bias = ones(1,3)*0.0005; %bias values
a=-1;
b=1;
rng('default')
V= a+(b-a)*rand(6,5); %weights between the input(6 nodes) and hidden(5 nodes) layers
rng('default')
W = a+(b-a)*rand(5,1); %weights between the hidden(5 nodes) and output(1 node) layers
transposed_input = input'; %inputs
d = output'; %target output
%initialization of del[W] and del[V]
del_W1=0;
del_W2=0;
del_W3=0;
del_W4=0;
del_W5=0;
del_V1=0;
del_V2=0;
del_V3=0;
del_V4=0;
del_V5=0;
del_V6=0;
del_V7=0;
del_V8=0;
del_V9=0;
del_V10=0;
del_V11=0;
del_V12=0;
del_V13=0;
del_V14=0;
del_V15=0;
del_V16=0;
del_V17=0;
del_V18=0;
del_V19=0;
del_V20=0;
del_V21=0;
del_V22=0;
del_V23=0;
del_V24=0;
del_V25=0;
del_V26=0;
del_V27=0;
del_V28=0;
del_V29=0;
del_V30=0;
for epoch=1:6000 %number of iterations
alpha=0.1; %learning rate
Mu=0.1; %Momentum Constant
N=80; %Number of training cases
for k=1:N
%Feed-Forward Network starts
%Output to input neurons
OI(1,k)=1./(1+exp(-(transposed_input(1,k)+bias(1,1))));
OI(2,k)=1./(1+exp(-(transposed_input(2,k)+bias(1,1))));
OI(3,k)=1./(1+exp(-(transposed_input(3,k)+bias(1,1))));
OI(4,k)=1./(1+exp(-(transposed_input(4,k)+bias(1,1))));
OI(5,k)=1./(1+exp(-(transposed_input(5,k)+bias(1,1))));
OI(6,k)=1./(1+exp(-(transposed_input(6,k)+bias(1,1))));
%Input to hidden neurons
IH1(1,k)= (OI(1,k)*V(1,1))+(OI(2,k)*V(2,1))+ (OI(3,k)*V(3,1))+ (OI(4,k)*V(4,1))+ (OI(5,k)*V(5,1))+(OI(6,k)*V(6,1))+bias(1,2);
IH2(1,k)= (OI(1,k)*V(1,2))+(OI(2,k)*V(2,2))+ (OI(3,k)*V(3,2))+ (OI(4,k)*V(4,2))+ (OI(5,k)*V(5,2))+(OI(6,k)*V(6,2))+bias(1,2);
IH3(1,k)= (OI(1,k)*V(1,3))+(OI(2,k)*V(2,3))+ (OI(3,k)*V(3,3))+ (OI(4,k)*V(4,3))+ (OI(5,k)*V(5,3))+(OI(6,k)*V(6,3))+bias(1,2);
IH4(1,k)= (OI(1,k)*V(1,4))+(OI(2,k)*V(2,4))+ (OI(3,k)*V(3,4))+ (OI(4,k)*V(4,4))+ (OI(5,k)*V(5,4))+(OI(6,k)*V(6,4))+bias(1,2);
IH5(1,k)= (OI(1,k)*V(1,5))+(OI(2,k)*V(2,5))+ (OI(3,k)*V(3,5))+ (OI(4,k)*V(4,5))+ (OI(5,k)*V(5,5))+(OI(6,k)*V(6,5))+bias(1,2);
%Output to hidden neurons
OH1(1,k) = (exp(IH1(1,k)+bias(1,2)) - exp(-(IH1(1,k)+bias(1,2))))./(exp(IH1(1,k)+bias(1,2)) + exp(-(IH1(1,k)+bias(1,2))));
OH2(1,k) = (exp(IH2(1,k)+bias(1,2)) - exp(-(IH2(1,k)+bias(1,2))))./(exp(IH2(1,k)+bias(1,2)) + exp(-(IH2(1,k)+bias(1,2))));
OH3(1,k) = (exp(IH3(1,k)+bias(1,2)) - exp(-(IH3(1,k)+bias(1,2))))./(exp(IH3(1,k)+bias(1,2)) + exp(-(IH3(1,k)+bias(1,2))));
OH4(1,k) = (exp(IH4(1,k)+bias(1,2)) - exp(-(IH4(1,k)+bias(1,2))))./(exp(IH4(1,k)+bias(1,2)) + exp(-(IH4(1,k)+bias(1,2))));
OH5(1,k) = (exp(IH5(1,k)+bias(1,2)) - exp(-(IH5(1,k)+bias(1,2))))./(exp(IH5(1,k)+bias(1,2)) + exp(-(IH5(1,k)+bias(1,2))));
%Input to Output neuron
IO(1,k)=OH1(1,k)*W(1,1)+OH2(1,k)*W(2,1)+OH3(1,k)*W(3,1)+OH4(1,k)*W(4,1)+OH5(1,k)*W(5,1)+bias(1,3);
%Output to Output neuron
Out(1,k)=IO(1,k)+bias(1,3);
%forward step calculation corresponding to each training case of a batch run terminates here
%BackPropagation of [V] and [W] starts
%Finding Mean Squared Error(E)
error(1,k)=d(1,k)-Out(1,k);
diff_Out(1,k)=[error(1,k).^2];
E= sum(diff_Out(1,k))./(2*N);
end
while (E >= 0.0004) %convergence criteria
for p=1:N
delta(1,p)=Out(1,p)*(1-Out(1,p))*error(1,p);
del=sum(delta(1,p));
%summation of Output to hidden neurons for 'k' training cases
OHS1=sum(OH1(1,p));
OHS2=sum(OH2(1,p));
OHS3=sum(OH3(1,p));
OHS4=sum(OH4(1,p));
OHS5=sum(OH5(1,p));
%summation of Output to input neurons for 'k' training cases
OI1=sum(OI(1,p));
OI2=sum(OI(2,p));
OI3=sum(OI(3,p));
OI4=sum(OI(4,p));
OI5=sum(OI(5,p));
OI6=sum(OI(6,p));
end
delta2_1=OHS1*(1-OHS1)*W(1,1)*del;
delta2_2=OHS2*(1-OHS2)*W(2,1)*del;
delta2_3=OHS3*(1-OHS3)*W(3,1)*del;
delta2_4=OHS4*(1-OHS4)*W(4,1)*del;
delta2_5=OHS5*(1-OHS5)*W(5,1)*del;
%delta[W]
del_W1=-alpha*OHS1*del+Mu*del_W1;
del_W2=-alpha*OHS2*del+Mu*del_W2;
del_W3=-alpha*OHS3*del+Mu*del_W3;
del_W4=-alpha*OHS4*del+Mu*del_W4;
del_W5=-alpha*OHS5*del+Mu*del_W5;
%delta[V]
del_V1=-alpha*OI1*delta2_1+Mu*del_V1;
del_V2=-alpha*OI2*delta2_1+Mu*del_V2;
del_V3=-alpha*OI3*delta2_1+Mu*del_V3;
del_V4=-alpha*OI4*delta2_1+Mu*del_V4;
del_V5=-alpha*OI5*delta2_1+Mu*del_V5;
del_V6=-alpha*OI6*delta2_1+Mu*del_V6;
del_V7=-alpha*OI1*delta2_2+Mu*del_V7;
del_V8=-alpha*OI2*delta2_2+Mu*del_V8;
del_V9=-alpha*OI3*delta2_2+Mu*del_V9;
del_V10=-alpha*OI4*delta2_2+Mu*del_V10;
del_V11=-alpha*OI5*delta2_2+Mu*del_V11;
del_V12=-alpha*OI6*delta2_2+Mu*del_V12;
del_V13=-alpha*OI1*delta2_3+Mu*del_V13;
del_V14=-alpha*OI2*delta2_3+Mu*del_V14;
del_V15=-alpha*OI3*delta2_3+Mu*del_V15;
del_V16=-alpha*OI4*delta2_3+Mu*del_V16;
del_V17=-alpha*OI5*delta2_3+Mu*del_V17;
del_V18=-alpha*OI6*delta2_3+Mu*del_V18;
del_V19=-alpha*OI1*delta2_4+Mu*del_V19;
del_V20=-alpha*OI2*delta2_4+Mu*del_V20;
del_V21=-alpha*OI3*delta2_4+Mu*del_V21;
del_V22=-alpha*OI4*delta2_4+Mu*del_V22;
del_V23=-alpha*OI5*delta2_4+Mu*del_V23;
del_V24=-alpha*OI6*delta2_4+Mu*del_V24;
del_V25=-alpha*OI1*delta2_5+Mu*del_V25;
del_V26=-alpha*OI2*delta2_5+Mu*del_V26;
del_V27=-alpha*OI3*delta2_5+Mu*del_V27;
del_V28=-alpha*OI4*delta2_5+Mu*del_V28;
del_V29=-alpha*OI5*delta2_5+Mu*del_V29;
del_V30=-alpha*OI6*delta2_5+Mu*del_V30;
end
%updated [W] and [V] matrix
W(1,1)=W(1,1)+del_W1;
W(2,1)=W(2,1)+del_W2;
W(3,1)=W(3,1)+del_W3;
W(4,1)=W(4,1)+del_W4;
W(5,1)=W(5,1)+del_W5;
V(1,1)= V(1,1)+del_V1;
V(2,1)= V(2,1)+del_V2;
V(3,1)= V(3,1)+del_V3;
V(4,1)= V(4,1)+del_V4;
V(5,1)= V(5,1)+del_V5;
V(6,1)= V(6,1)+del_V6;
V(1,2)= V(1,2)+del_V7;
V(2,2)= V(2,2)+del_V8;
V(3,2)= V(3,2)+del_V9;
V(4,2)= V(4,2)+del_V10;
V(5,2)= V(5,2)+del_V11;
V(6,2)= V(6,2)+del_V12;
V(1,3)= V(1,3)+del_V13;
V(2,3)= V(2,3)+del_V14;
V(3,3)= V(3,3)+del_V15;
V(4,3)= V(4,3)+del_V16;
V(5,3)= V(5,3)+del_V17;
V(6,3)= V(6,3)+del_V18;
V(1,4)= V(1,4)+del_V19;
V(2,4)= V(2,4)+del_V20;
V(3,4)= V(3,4)+del_V21;
V(4,4)= V(4,4)+del_V22;
V(5,4)= V(5,4)+del_V23;
V(6,4)= V(6,4)+del_V24;
V(1,5)= V(1,5)+del_V25;
V(2,5)= V(2,5)+del_V26;
V(3,5)= V(3,5)+del_V27;
V(4,5)= V(4,5)+del_V28;
V(5,5)= V(5,5)+del_V29;
V(6,5)= V(6,5)+del_V30;
end
0 Kommentare
Akzeptierte Antwort
Raunak Gupta
am 8 Aug. 2020
Hi Rajdeep,
From the code I see you are using first for loop inside the main for loop for updating the value of E. And the while loop condition is based on E only. So, you are not updating the variable on which the while loop is dependent inside the while loop, the problem it will create is let’s say the value of E from first for loop is >= 0.0004, then the while loop will not terminate because its value is not changing at all in that loop or any subsequent loop.
I think there is something missing in the implementation about updating E within the while loop. You may check again what are equation mathematically for writing the backpropagation and the termination condition.
Hope it helps!
Weitere Antworten (0)
Siehe auch
Kategorien
Mehr zu Sequence and Numeric Feature Data Workflows finden Sie in Help Center und File Exchange
Community Treasure Hunt
Find the treasures in MATLAB Central and discover how the community can help you!
Start Hunting!