2 %> @ingroup parallelgroup
4 %> @brief Incremental learning curve - to test incremental classifiers
6 %> This Analysis Session aims to
raise "Incremental learning curves" (ILC)
for one or more classifiers. This aims at testing incremental
7 %> classifiers such
as eClass
9 %> ILC
's are are direcly stored in a dataset. Each class of the dataset corresponds to a different classifier in the block_mold property.
11 %> When using the reptt_incr::use() method, you need to pass two datasets to it: <code>[train, test]</code>. The @c test element will be always used as-is.
12 %> The @ref train dataset will have its rows permuted using the @ref reptt_incr::sgs. If an SGS is not provided, the @ref train dataset
13 %> will be used only once.
15 %> The block_mold objects must be of class @ref clssr_incr
17 %> The reptt_incr::use() method outputs one dataset per log in the @ref reptt_incr::log_mold property.
19 %> @sa demo_reptt_incr.m
20 classdef reptt_incr < reptt
22 %> SGS object. Needs to be a @ref sgs_randsub_base; bites will be ignored and overwritten with "[1]"; type will be overwritten with "simple"
23 %> The following properties make a difference: no_reps; flag_group; flag_perclass; randomseed
25 %> =0. Whether to parallelize the outer "reps" loop
27 %> =1. Recording periodicity
31 properties(SetAccess=protected)
35 %> Cell of datasets: each corresponding to one element in reptt_incr::log_mold
40 function o = reptt_incr()
41 o.classtitle = 'Incremental
';
42 o.moreactions = [o.moreactions, {'extract_datasets
'}];
43 o.flag_ui = 0; % Not published in GUI
47 methods(Access=protected)
48 %> Returns number of recordings based on internal setup
50 %> Asks the first element in block_mold
51 function nr = get_no_recordings(o, data)
53 bl.record_every = o.record_every;
54 nr = bl.get_no_recordings(data(1).no);
57 %> Allocates result datasets
59 %> Allocates datasets rather than logs. The datasets are sized with zeroes in their @c X and @c classes
62 %> @arg results: cell of datasets: (1)X(no_logs)
63 %> @arg each dataset: X: (no_blocks*no_reps)X(number of recordings)
65 %> The number of recordings is currently the number of elements in the training set (one recording is taken after one row is passed
66 %> to the incremental-training classifier)
67 function o = allocate_results(o, data)
68 no_reps = size(o.obsidxs, 1);
69 nb = numel(o.block_mold);
70 nl = numel(o.log_mold);
72 if ~iscell(o.block_mold)
73 bmold = {o.block_mold};
78 if ~iscell(o.log_mold)
84 nr = o.get_no_recordings(data);
88 d.fea_x = [(1:nr-1)*o.record_every data(1).no];
89 d.xname = 'Number of training observations
';
91 d.yname = 'Performance
';
93 d.X(nb*no_reps, nr) = 0;
94 d.classes(nb*no_reps, 1) = 0;
96 d.classlabels = cell(1, nb);
98 d.classlabels{i} = bmold{i}.get_description();
99 d.classes((1:no_reps)+(i-1)*no_reps) = i-1;
102 o.results = irdata.empty();
106 o.results(i).title = ['Based on
', lmold{i}.get_description()];
107 o.results(i).yunit = iif(o.log_mold{i}.get_flag_perc(), '%
', '');
111 %> Asserts that the SGS object is a randsub one
112 function o = assert_randsub(o)
118 %> Asserts that the blocks in @ref block_mold are all clssr_incr
119 function o = assert_clssr_incr(o)
120 for i = 1:numel(o.block_mold)
122 irerror('All classifiers must be of
class "clssr_incr"!
');
128 %> Output is an array of datasets
129 function out = do_use(o, data)
130 o.assert_clssr_incr();
132 flag_sgs = ~isempty(o.sgs);
137 sgs_.type = 'simple
';
138 o.obsidxs = o.sgs.get_obsidxs(data(1));
139 no_reps = size(o.obsidxs, 1);
140 % nt = size(o.obsidxs, 2)-1; % Number of test datasets
143 % nt = numel(data)-1;
146 o = o.allocate_results(data);
147 % o = o.allocate_blocks();
149 nb = numel(o.block_mold);
150 nl = numel(o.results);
151 % no_recordings = o.get_no_recordings();
154 % Will have to split the dataset before the parfor!
158 dd = data(1).split_map(o.obsidxs);
161 % Have to extrude everything from "o" that will be used by the workers
162 o_blocks = o.block_mold;
164 o_postpr_est = o.postpr_est;
165 o_postpr_test = o.postpr_test;
167 o_record_every = o.record_every;
170 % Results assigned during the parallel loop
171 tempresult = cell(1, no_reps*nb);
181 parfor i_par = 1:no_reps*nb
183 i_rep = ceil(i_par/nb);
184 i_blk = mod(i_par-1, nb)+1;
186 % tempx = zeros(nl, no_recordings, nb);
188 % ipro = progress2_open('REPTT_INCR BLOCKS
', [], 0, nb);
190 bl = o_blocks{i_blk};
192 bl.data_test = o_data2;
193 bl.postpr_test = o_postpr_test;
194 bl.postpr_est = o_postpr_est;
195 bl.log_mold = o_logs;
196 bl.flag_rtrecord = 1;
197 bl.record_every = o_record_every;
200 % bl = bl.allocate(dd(i_rep).no); % dd(i_rep).no for all i_rep should be the same
202 bl = bl.train(dd(i_rep));
204 tempresult{i_par} = bl.rates;
205 % ipro = progress2_change(ipro, [], [], i_blk);
207 % progress2_close(ipro);
210 % tempresult{i_par} = tempx;
213 irverbose(sprintf('TOTAL REPTT_INCR ELLAPSED TIME: %g\n
', toc(t)));
227 % Places results inside the right slots
229 for i_par = 1:no_reps*nb
230 i_rep = ceil(i_par/nb);
231 i_blk = mod(i_par-1, nb)+1;
232 o.results(il).X(i_rep+(i_blk-1)*no_reps, :) = tempresult{i_par}(il, :);
Base Sub-dataset Generation Specification (SGS) class.
Random Sub-sampling base class.
Base class for Incremental Classifiers.
Analysis Session (AS) base class.