Sei sulla pagina 1di 9

January 2-, 2015, MATLAB Tutorial for ECE 614

Regression Using Neural Network


1- Using Neural Network GUI (nnstart)

2- Using Command-Line functions


% Solve an Input-Output Fitting problem with a Neural Network
% Script generated by NFTOOL
%
% This script assumes these variables are defined:
%
% houseInputs - input data.
% houseTargets - target data.
load house_dataset
inputs = houseInputs;
targets = houseTargets;
% Create a Fitting Network
hiddenLayerSize = 10;
net = fitnet(hiddenLayerSize);
% Set up Division of Data for Training, Validation, Testing
net.divideParam.trainRatio = 70/100;
net.divideParam.valRatio = 15/100;
net.divideParam.testRatio = 15/100;
% Train the Network
[net,tr] = train(net,inputs,targets);
% Test the Network
outputs = net(inputs);
errors = gsubtract(outputs,targets);
performance = perform(net,targets,outputs)
% View the Network
view(net)
% Plots
% Uncomment these lines to enable various plots.
% figure, plotperform(tr)
% figure, plottrainstate(tr)
% figure, plotfit(targets,outputs)
% figure, plotregression(targets,outputs)
% figure, ploterrhist(errors)

Classification Using Neural Network

1- Using Neural Network GUI (nnstart)

2- Using Command-Line functions


% Solve a Pattern Recognition Problem with a Neural Network
% Script generated by NPRTOOL
%
% This script assumes these variables are defined:
%
% cancerInputs - input data.
% cancerTargets - target data.
inputs = cancerInputs;
targets = cancerTargets;
% Create a Pattern Recognition Network
hiddenLayerSize = 10;
net = patternnet(hiddenLayerSize);

% Set up Division of Data for Training, Validation, Testing


net.divideParam.trainRatio = 70/100;
net.divideParam.valRatio = 15/100;
net.divideParam.testRatio = 15/100;

% Train the Network


[net,tr] = train(net,inputs,targets);
% Test the Network
outputs = net(inputs);
errors = gsubtract(targets,outputs);
performance = perform(net,targets,outputs)
% View the Network
view(net)
% Plots
% Uncomment these lines to enable various plots.
% figure, plotperform(tr)
% figure, plottrainstate(tr)
% figure, plotconfusion(targets,outputs)
% figure, ploterrhist(errors)

Clustering Using Neural Network (Self-Organizing


Maps)
1- Using Neural Network GUI (nnstart)

2- Using Command-Line functions

% Solve a Clustering Problem with a Self-Organizing Map


% Script generated by NCTOOL
%
% This script assumes these variables are defined:
%
% simpleclusterInputs - input data.
load iris_dataset
inputs = irisInputs;
% Create a Self-Organizing Map
dimension1 = 10;
dimension2 = 10;
net = selforgmap([dimension1 dimension2]);
% Train the Network
[net,tr] = train(net,inputs);
% Test the Network
outputs = net(inputs);
% View the Network
view(net)
% Plots
% Uncomment these lines to enable various plots.
% figure, plotsomtop(net)
% figure, plotsomnc(net)
% figure, plotsomnd(net)
% figure, plotsomplanes(net)
% figure, plotsomhits(net,inputs)
% figure, plotsompos(net,inputs)

Examples from the Text


1-Digit Detection
%% data as in e.g. 8.19, p. 558
digitsInput = [...
0 0 1 0 0 ... here goes the 1
0 0 1 0 0 ...
0 0 1 0 0 ...
0 0 1 0 0 ...
00100
0 1 1 1 0 ... here goes the 2
0 0 0 1 0 ...
0 0 1 0 0 ...
0 1 0 0 0 ...
11111
0 1 1 1 0 ...here goes the 3
0 0 0 1 0 ...
0 0 1 1 0 ...
0 0 0 1 0 ...
01110
0 1 0 0 1 ... 4
0 1 0 0 1 ...
0 1 1 1 1 ...
0 0 0 0 1 ...
00001
0 1 1 1 0 ... 5
0 1 0 0 0 ...
0 1 1 1 0 ...
0 0 0 1 0 ...
01110
];
digitsInput=digitsInput';
digitsTargets = [...
10000
01000
00100
00010
00001
]'; %notice the transpose

%% plot the digits


figure;
for d=1:5
subplot(1,5,d);
imshow(reshape(digitsInput(:,d),5,5)')
end
%% set up and train the network:
%% Create a new net
net = newpr(digitsInput, digitsTargets, [5]);
net.divideFcn=''; % no data division
[net,stats]=train(net,digitsInput,digitsTargets);
out=sim(net,digitsInput);
%%
plotconfusion(digitsTargets,out);
plotperf(stats);

2- Function Approximation
%%
X=linspace(-1,1,21);
Y=0.8*sin(pi*X);
%plot(X,Y)
net=newfit(X,Y,5);
net.divideFcn='';
XTest=-1:0.05:1;
net=train(net,X,Y);
out=sim(net,XTest);
plot(X,Y,'o',XTest,out,'-');
%% What is the network function?
%read the layers
HB=net.B{1}; %hidden layer bias
HW=net.IW{1}; %hidden layer weights, stored ROW-wise (each neuron is a ROW)
OB=net.B{2};
OW=net.LW{2,1};
myOut=zeros(size(out));
%apply preprocessing used by the network
myIn=mapminmax(XTest,net.inputs{1}.processSettings{3});
for i=1:size(XTest,2) %iterate over all inputs

in=XTest(:,i);
myOut(:,i)=OW*tansig(HW*in+HB)+OB;
%
^ input neurons^
%
^ output activation, no output sigmoid
end
%postprocess the output
myOut=mapminmax('reverse',myOut,net.outputs{1,2}.processSettings{2});

%compare
disp(['net out~=myOut: ' mat2str(find(abs(out(:)-myOut(:))>1e-6))]);

Potrebbero piacerti anche