1 %> @brief Neural Network Classifier. This is a wrapper to MATLAB
's Neural Networks Toolbox
3 %> Check MATLAB's NN toolbox documentation on net.trainParam
5 %> This is by
default tuned to finished trained mostly when the net overfits:
6 %> @arg High number of epochs
7 %> @arg goal is zero (will only stop according to
this criterion
if reaches 100% classification)
8 %> @arg min_grad is very small
as well (1e-5)
10 %> Input weights are initialized to
'midpoint', and layer weights are initialized to zero. This way, the classifier will be deterministic
11 %> (the
default MATLAB
"initnw" sounds nice but has randomness in it)
13 %> @sa uip_clssr_ann.m
16 %> whether classes must be converter into multiple-output
boolean targets.
18 %> = [1]. Number of neurons in each hidden layer.
21 % Learning parameters. Names match the ones in net.trainParam
23 %> =0. Stands
for the goal MSE (Mean Squared Error). Training stops when achieving
this error. We don
't know what to expect, so it is fair to
24 %> expect 0 error (100 classification).
32 %> = 15 Maximum number of validation increases
36 %> Whether to weight the observations (for unbalanced classes)
38 %> =1e-5. Minimum gradient.
42 properties(SetAccess=protected)
47 function o = clssr_ann(o)
48 o.classtitle = 'Artificial Neural Network
';
53 methods(Access=protected)
55 function o = do_boot(o)
60 % TODO actually this could even be multiple-trainable
61 function o = do_train(o, data)
62 o.classlabels = data.classlabels;
66 % TODO I pasted from the boot procedure an dI don't know it
this is working,
long time not
using NN
70 o.net = newpr(data.X', targets, o.hiddens);
72 o.net.trainFcn =
'trainlm';
74 o.net.trainParam.goal = o.goal;
75 o.net.trainParam.epochs = o.epochs;
76 o.net.trainParam.show = o.show;
77 o.net.trainParam.lr = o.lr;
78 o.net.trainParam.max_fail = o.max_fail;
79 o.net.trainParam.showWindow = o.flag_show_window; % gets rid of
this annoying GUI, TODO however not quite working
86 o.net.initFcn =
'initlay'; % (net.initParam automatically becomes initlay
's default parameters.)
87 for i = 1:numel(o.net.layers)
88 o.net.layers{i}.initFcn = 'initwb
';
90 for i = 1:size(o.net.inputWeights, 1)
91 for j = 1:size(o.net.inputWeights, 2)
92 if ~isempty(o.net.inputWeights{i, j})
93 o.net.inputWeights{i, j}.initFcn = 'midpoint
';
99 % These instructions of how to use midpoint are from MATLAB's reference to the midpoint
function.
100 % However,
this gave errors and I opted
for initializing the layer weights to zero
102 %
for i = 1:size(o.net.layerWeights, 1)
103 %
for j = 1:size(o.net.layerWeights, 2)
104 %
if ~isempty(o.net.layerWeights{i, j})
105 % o.net.layerWeights{i, j}.initFcn =
'midpoint';
115 for i = 1:size(o.net.LW, 1)
116 for j = 1:size(o.net.LW, 1)
119 o.net.LW{i, j} = zeros(size(z));
130 % % Changes the way the data is split
131 % o.net.divideFcn =
'divideind';
132 % o.net.divideParam.trainInd = 1:no_obs_train;
133 % o.net.divideParam.valInd = (no_obs_train+1):no_obs_total;
134 % o.net.divideParam.testInd = (no_obs_train+1):no_obs_total;
138 [o.net, tr, output, error] = train(o.net, data.X
', targets);
140 ww = data.get_weights(); % weights per class
141 weights = zeros(1, data.no); % weights per observation
144 weights(data.classes == i-1) = ww(i);
148 % train(ftdnn_net,p,t,Pi,Ai,ew1);
149 [o.net, tr, output, error] = train(o.net, data.X', targets, {}, {}, weights);
150 % [o.net, tr, output, error] = train(o.net, data.X
', targets, [], []);
158 function est = do_use(o, data)
160 est.classlabels = o.classlabels;
161 est = est.copy_from_data(data);
164 Y = sim(o.net, data.X')
';
166 % TODO SoftMax!!!!!!!!11
function classes2boolean(in classes, in no_different)
Neural Network Classifier. This is a wrapper to MATLAB's Neural Networks Toolbox. ...
Analysis Session (AS) base class.