|
@@ -3,6 +3,7 @@
|
3
|
3
|
#include <cmath>
|
4
|
4
|
#include <forward_list>
|
5
|
5
|
#include <algorithm>
|
|
6
|
+#include <numeric>
|
6
|
7
|
#include "annclasses.h"
|
7
|
8
|
|
8
|
9
|
using namespace std;
|
|
@@ -114,6 +115,8 @@ Network::Network(int n_layers, int n_neurons)
|
114
|
115
|
h_activ = RELU;
|
115
|
116
|
//o_activ = SIGMOID;
|
116
|
117
|
o_activ = LINEAR;
|
|
118
|
+
|
|
119
|
+ neurons_number = n_layers*n_neurons;
|
117
|
120
|
}
|
118
|
121
|
|
119
|
122
|
Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ)
|
|
@@ -138,15 +141,61 @@ Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ
|
138
|
141
|
}
|
139
|
142
|
h_activ = h_activ;
|
140
|
143
|
o_activ = o_activ;
|
|
144
|
+
|
|
145
|
+ neurons_number = accumulate(n_neurons.begin(), n_neurons.end(), 0);
|
141
|
146
|
}
|
142
|
147
|
|
143
|
|
-bool Network::train(const std::vector<float> &input, const std::vector<float> &target, float learning_rate, int n_episodes)
|
|
148
|
+int Network::get_neurons_number()
|
144
|
149
|
{
|
145
|
|
- for(int episode=1;episode<=n_episodes;episode++)
|
|
150
|
+ return neurons_number;
|
|
151
|
+}
|
|
152
|
+
|
|
153
|
+bool Network::train(const vector<vector<float>> &inputs, const vector<vector<float>> &targets, float learning_rate, int n_episodes, int batch_size)
|
|
154
|
+{
|
|
155
|
+ if(inputs.size() == targets.size())
|
|
156
|
+ {
|
|
157
|
+ vector<vector<float>> all_activated_outputs(get_neurons_number());
|
|
158
|
+ vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
|
|
159
|
+ bool is_constructed = false;
|
|
160
|
+ for(int episode=1 ; episode<=n_episodes ; episode++)
|
|
161
|
+ {
|
|
162
|
+ for(int index(0) ; index<inputs.size() ; index++)//batch_size not yet used
|
|
163
|
+ {
|
|
164
|
+ forward(inputs.at(index), targets.at(index));
|
|
165
|
+ set_errors(targets.at(index));
|
|
166
|
+
|
|
167
|
+ int layer_counter = 0;
|
|
168
|
+ int neurons_counter1 = 0;
|
|
169
|
+ int neurons_counter2 = 0;
|
|
170
|
+ for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
|
171
|
+ {
|
|
172
|
+ layer_counter++;
|
|
173
|
+ if(layer_counter==1)
|
|
174
|
+ {
|
|
175
|
+ for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
|
176
|
+ {
|
|
177
|
+ all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() );
|
|
178
|
+ neurons_counter1++;
|
|
179
|
+ }
|
|
180
|
+ }else
|
|
181
|
+ {
|
|
182
|
+ for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
|
183
|
+ {
|
|
184
|
+ all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() );
|
|
185
|
+ neurons_counter1++;
|
|
186
|
+
|
|
187
|
+ all_derrors.at(neurons_counter2).push_back( current_neuron->get_derror() );
|
|
188
|
+ neurons_counter2++;
|
|
189
|
+ }
|
|
190
|
+ }
|
|
191
|
+ }
|
|
192
|
+ }
|
|
193
|
+ backward(learning_rate);
|
|
194
|
+ }
|
|
195
|
+ }else
|
146
|
196
|
{
|
147
|
|
- forward(input, target);
|
148
|
|
- set_errors(target);
|
149
|
|
- backward(learning_rate);
|
|
197
|
+ cerr << "Inputs and targets vectors have different size" << endl;
|
|
198
|
+ exit(-1);
|
150
|
199
|
}
|
151
|
200
|
return true;
|
152
|
201
|
}
|
|
@@ -252,48 +301,53 @@ bool Network::backward(float learning_rate)
|
252
|
301
|
|
253
|
302
|
bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output()<b.get_activated_output();}
|
254
|
303
|
|
255
|
|
-float Network::predict(const std::vector<float> &input, bool as_raw)
|
|
304
|
+vector<float> Network::predict(const vector<vector<float>> &inputs, bool as_raw)
|
256
|
305
|
{
|
257
|
|
- int layer_counter = 0;
|
258
|
|
- for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
259
|
|
- {//inside current layer
|
260
|
|
- layer_counter++;
|
261
|
|
- if(layer_counter==1)
|
262
|
|
- {
|
263
|
|
- int i=0;
|
264
|
|
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
265
|
|
- {//inside current neuron
|
266
|
|
- current_neuron->set_activated_output( input.at(i) );
|
267
|
|
- i++;
|
|
306
|
+ vector<float> results;
|
|
307
|
+ for(auto input : inputs)
|
|
308
|
+ {
|
|
309
|
+ int layer_counter = 0;
|
|
310
|
+ for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
|
311
|
+ {//inside current layer
|
|
312
|
+ layer_counter++;
|
|
313
|
+ if(layer_counter==1)
|
|
314
|
+ {
|
|
315
|
+ int i=0;
|
|
316
|
+ for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
|
317
|
+ {//inside current neuron
|
|
318
|
+ current_neuron->set_activated_output( input.at(i) );
|
|
319
|
+ i++;
|
|
320
|
+ }
|
|
321
|
+ }else if(layer_counter==layers.size())
|
|
322
|
+ {
|
|
323
|
+ list<forward_list<Neuron>>::iterator temp = current_layer;
|
|
324
|
+ temp--; //previous layer
|
|
325
|
+ for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
|
326
|
+ {//inside current neuron
|
|
327
|
+ forward_list<Neuron>::iterator prev_layer_it(temp->begin());
|
|
328
|
+ current_neuron->activate(prev_layer_it, o_activ);
|
|
329
|
+ }
|
|
330
|
+ }else
|
|
331
|
+ {
|
|
332
|
+ list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
|
|
333
|
+ temp_prev_layer--; //temp_prev_layer set now at previous layer
|
|
334
|
+ for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
|
335
|
+ {//inside current neuron
|
|
336
|
+ forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
|
|
337
|
+ current_neuron->activate(prev_layer_it, h_activ);
|
|
338
|
+ }
|
268
|
339
|
}
|
269
|
|
- }else if(layer_counter==layers.size())
|
|
340
|
+ }
|
|
341
|
+ list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
|
|
342
|
+ if(as_raw)
|
270
|
343
|
{
|
271
|
|
- list<forward_list<Neuron>>::iterator temp = current_layer;
|
272
|
|
- temp--; //previous layer
|
273
|
|
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
274
|
|
- {//inside current neuron
|
275
|
|
- forward_list<Neuron>::iterator prev_layer_it(temp->begin());
|
276
|
|
- current_neuron->activate(prev_layer_it, o_activ);
|
277
|
|
- }
|
|
344
|
+ results.push_back( max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output() );
|
278
|
345
|
}else
|
279
|
346
|
{
|
280
|
|
- list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
|
281
|
|
- temp_prev_layer--; ////temp_prev_layer set now at previous layer
|
282
|
|
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
283
|
|
- {//inside current neuron
|
284
|
|
- forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
|
285
|
|
- current_neuron->activate(prev_layer_it, h_activ);
|
286
|
|
- }
|
|
347
|
+ results.push_back( distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ) );
|
287
|
348
|
}
|
288
|
349
|
}
|
289
|
|
- list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
|
290
|
|
- if(as_raw)
|
291
|
|
- {
|
292
|
|
- return max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output();
|
293
|
|
- }else
|
294
|
|
- {
|
295
|
|
- return distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) );
|
296
|
|
- }
|
|
350
|
+ return results;
|
297
|
351
|
}
|
298
|
352
|
|
299
|
353
|
void Network::print()
|