Wednesday 13 January 2016

Implement fuzzy membership functions (triangular, trapezoidal, gbell, PI, Gamma, Gaussian) ......... MATLAB Code

clc;
clear all;
close all;

ip=input('Enter input range : ');
a=input('Enter value of alpha : ');
b=input('Enter value of beta : ');
c=input('Enter value of gamma : ');
d=input('Enter value of delta : ');
m=input('Enter value of m : ');
del=input('Enter value of del : ');

%Gamma function

for u=1:ip
    if(u<=a)
        g(u)=0;
    elseif((u>a)&&(u<=b))
        g(u)=((u-a)/(b-a));
    else
        g(u)=1;
    end
end

subplot(2,3,1)
plot(g)
title('Gamma Function')
axis([0 ip 0 1]);

%S function

for u=1:ip
    if(u<=a)
        s(u)=0;
    elseif((u>a)&&(u<=b))
        s(u)=2*(((u-a)/(c-a))^2);
    elseif((u>b)&&(u<=c))
        s(u)=1-(2*(((u-c)/(c-a))^2));
    else
        s(u)=1;
    end
end


subplot(2,3,2)
plot(s);
title('S Function');
axis([0 ip 0 1]);

%Triangular function

for u=1:ip
    if(u<=a)
        t(u)=0;
    elseif((u>a)&&(u<=b))
        t(u)=((u-a)/(b-a));
    elseif((u>b)&&(u<=c))
        t(u)=((c-u)/(c-b));
    else
        t(u)=0;
    end
end

subplot(2,3,3)
plot(t);
title('Triangular Function');
axis([0 ip 0 1]);

%Pie function

for u=1:ip
    if(u<=a)
        p(u)=0;
    elseif((u>a)&&(u<=b))
        p(u)=((u-a)/(b-a));
    elseif((u>b)&&(u<=c))
        p(u)=1;
    elseif((u>c)&&(u<=d))
        p(u)=((d-u)/(d-c));
    else
        p(u)=0;
    end
end

subplot(2,3,4)
plot(p);
title('Pie Function');
axis([0 ip 0 1]);

%Gaussian function

for u=1:ip
   gs(u)=exp((-((u-m)^2))/(2*(del^2)));
end

subplot(2,3,5)
plot(gs);
title('Gaussian Function');
axis([0 ip 0 1]);

Monday 11 January 2016

Implement a simple linear regressor with a single neuron model .... MATLAB Code

// data.txt file


166, 54.00
195, 82.00
200, 72.00
260, 72.00
265, 90.00
335, 124.00
370, 94.00
450, 118.00


//    .m file for linear regression

%Load the data from our text file
data = load('/home/svcet/Desktop/data.txt');
% Define x and y
x = data(:,2);
y = data(:,1);
% Create a function to plot the data
function plotData(x,y)
plot(x,y,'rx','MarkerSize',8); % Plot the data
end
% Plot the data
plotData(x,y);
xlabel('Cost of Book'); % Set the x-axis label
ylabel('Number of Pages'); % Set the y-axis label
fprintf('Program paused. Press enter to continue.\n');
pause;
% Count how many data points we have
m = length(x);
% Add a column of all ones (intercept term) to x
X = [ones(m, 1) x];
% Calculate theta
theta = (pinv(X'*X))*X'*y
% Plot the fitted equation we got from the regression
hold on; % this keeps our previous plot of the training data visible
plot(X(:,2), X*theta, '-')
legend('Training data', 'Linear regression')
hold off % Don't put any more plots on this figure


MLP trained with Backpropagation for XOR Function.... MATLAB Code

function y=binsig(x)
y=1/(1+exp(-x));
end

function y=binsig1(x)
y=binsig(x)*(1-binsig(x));
end

%Back Propagation Network for XOR function with Binary Input and Output
clc;
clear;
%Initialize weights and bias
v=[0.197 0.3191 -0.1448 0.3394;0.3099 0.1904 -0.0347 -0.4861];
v1=zeros(2,4);
b1=[-0.3378 0.2771 0.2859 -0.3329];
b2=-0.1401;
w=[0.4919;-0.2913;-0.3979;0.3581];
w1=zeros(4,1);
x=[1 1 0 0;1 0 1 0];
t=[0 1 1 0];
alpha=0.02;
mf=0.9;
con=1;
epoch=0;
while con
    e=0;
    for I=1:4
        %Feed forward
        for j=1:4
            zin(j)=b1(j);
            for i=1:2
                zin(j)=zin(j)+x(i,I)*v(i,j);
            end
            z(j)=binsig(zin(j));
        end
        yin=b2+z*w;
        y(I)=binsig(yin);
        %Backpropagation of Error
        delk=(t(I)-y(I))*binsig1(yin);
        delw=alpha*delk*z'+mf*(w-w1);
        delb2=alpha*delk;
        delinj=delk*w;
        for j=1:4
            delj(j,1)=delinj(j,1)*binsig1(zin(j));
        end
        for j=1:4
            for i=1:2
                delv(i,j)=alpha*delj(j,1)*x(i,I)+mf*(v(i,j)-v1(i,j));
            end
        end
        delb1=alpha*delj;
        w1=w;
        v1=v;
        %Weight updation
        w=w+delw;
        b2=b2+delb2;
        v=v+delv;
        b1=b1+delb1';
        e=e+(t(I)-y(I))^2;
    end
    if e<0.005
        con=0;
    end
    epoch=epoch+1;
end
disp('BPN for XOR funtion with Binary input and Output');
disp('Total Epoch Performed');
disp(epoch);
disp('Error');
disp(e);
disp('Final Weight matrix and bias');
v
b1
w
b2