No Description
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

annclasses.cpp 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522
  1. #include <iostream>
  2. #include <ctime>
  3. #include <cmath>
  4. #include <forward_list>
  5. #include <algorithm>
  6. #include <numeric>
  7. #include "annclasses.h"
  8. using namespace std;
  9. Neuron::Neuron(int prev_layer_size)
  10. {
  11. for(int i(1) ; i<=prev_layer_size ; i++)
  12. {
  13. weights.push_front(Tools::get_random(0.0, 1.0));
  14. //weights.push_front(1.0);
  15. }
  16. bias = 0.1;
  17. weighted_sum = 0.0;
  18. activated_output = 0.0;
  19. derror = 0.0;
  20. }
  21. void Neuron::set_bias(float value)
  22. {
  23. bias = value;
  24. }
  25. float Neuron::get_bias()
  26. {
  27. return bias;
  28. }
  29. void Neuron::set_nth_weight(int n, float value)
  30. {
  31. int i=1;
  32. forward_list<float>::iterator current_weight(weights.begin());
  33. while(i<n)
  34. {
  35. current_weight++;
  36. i++;
  37. }
  38. *current_weight = value;
  39. }
  40. float Neuron::get_nth_weight(int n)
  41. {
  42. int i=1;
  43. forward_list<float>::iterator current_weight(weights.begin());
  44. while(i<n)
  45. {
  46. current_weight++;
  47. i++;
  48. }
  49. return *current_weight;
  50. }
  51. float Neuron::get_weighted_sum()
  52. {
  53. return weighted_sum;
  54. }
  55. void Neuron::set_activated_output(float value)
  56. {
  57. activated_output = value;
  58. }
  59. float Neuron::get_activated_output()
  60. {
  61. return activated_output;
  62. }
  63. void Neuron::set_derror(float value)
  64. {
  65. derror = value;
  66. }
  67. float Neuron::get_derror()
  68. {
  69. return derror;
  70. }
  71. void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function)
  72. {
  73. weighted_sum = bias;
  74. for(forward_list<float>::iterator it(weights.begin()) ; it!=weights.end() ; ++it)
  75. {
  76. weighted_sum += (*it) * (prev_layer_it->activated_output);
  77. prev_layer_it++;
  78. }
  79. activated_output = Tools::activation_function(activ_function, weighted_sum);
  80. }
  81. Network::Network(int n_layers, int n_neurons)
  82. {
  83. for(int i(1) ; i<=n_layers ; i++)
  84. {
  85. forward_list<Neuron> current_layer;
  86. for(int j(1) ; j<=n_neurons ; j++)
  87. {
  88. if(i==1)
  89. {
  90. current_layer.push_front( Neuron(0) );
  91. }else if(i==n_layers)
  92. {
  93. current_layer.push_front( Neuron(n_neurons) );
  94. }else
  95. {
  96. current_layer.push_front( Neuron(n_neurons) );
  97. }
  98. }
  99. layers.push_back(current_layer);
  100. }
  101. h_activ = RELU;
  102. //o_activ = SIGMOID;
  103. o_activ = LINEAR;
  104. neurons_number = n_layers*n_neurons;
  105. }
  106. Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ)
  107. {
  108. for(int i(0) ; i<n_neurons.size() ; i++)
  109. {
  110. forward_list<Neuron> current_layer;
  111. for(int j(1) ; j<=n_neurons[i] ; j++)
  112. {
  113. if(i==0)
  114. {
  115. current_layer.push_front( Neuron(0) );
  116. }else if(i==n_neurons.size()-1)
  117. {
  118. current_layer.push_front( Neuron(n_neurons[i-1]) );
  119. }else
  120. {
  121. current_layer.push_front( Neuron(n_neurons[i-1]) );
  122. }
  123. }
  124. layers.push_back(current_layer);
  125. }
  126. h_activ = h_activ;
  127. o_activ = o_activ;
  128. neurons_number = accumulate(n_neurons.begin(), n_neurons.end(), 0);
  129. }
  130. int Network::get_neurons_number()
  131. {
  132. return neurons_number;
  133. }
  134. bool Network::train(const vector<vector<float>> &inputs, const vector<vector<float>> &targets, float learning_rate, int n_episodes, int batch_size)
  135. {
  136. if(inputs.size() == targets.size())
  137. {
  138. //vector<vector<float>> all_activated_outputs(get_neurons_number());
  139. //vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
  140. for(int episode=1 ; episode<=n_episodes ; episode++)
  141. {
  142. for(int batch_index(0) ; batch_index<inputs.size() ; batch_index+=batch_size)
  143. {
  144. vector<vector<float>> all_activated_outputs(get_neurons_number());
  145. vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
  146. int layer_counter;
  147. int neurons_counter1;
  148. int neurons_counter2;
  149. for(int index(batch_index) ; index<inputs.size() && index<batch_index+batch_size ; index++)//batch_size not yet used
  150. {
  151. forward(inputs.at(index), targets.at(index));
  152. set_errors(targets.at(index));
  153. layer_counter = 0;
  154. neurons_counter1 = 0;
  155. neurons_counter2 = 0;
  156. for(list<forward_list<Neuron>>::iterator cur_layer(layers.begin()) ; cur_layer!=layers.end() ; ++cur_layer)
  157. {
  158. layer_counter++;
  159. if(layer_counter==1)
  160. {
  161. for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
  162. {
  163. all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
  164. neurons_counter1++;
  165. }
  166. }else
  167. {
  168. for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
  169. {
  170. all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
  171. neurons_counter1++;
  172. all_derrors.at(neurons_counter2).push_back( cur_neuron->get_derror() );
  173. neurons_counter2++;
  174. }
  175. }
  176. }
  177. }
  178. layer_counter = 0;
  179. neurons_counter1 = 0;
  180. neurons_counter2 = 0;
  181. for(list<forward_list<Neuron>>::iterator cur_layer(layers.begin()) ; cur_layer!=layers.end() ; ++cur_layer)
  182. {
  183. layer_counter++;
  184. if(layer_counter==1)
  185. {
  186. for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
  187. {
  188. cur_neuron->set_activated_output( accumulate(all_activated_outputs.at(neurons_counter1).begin(),
  189. all_activated_outputs.at(neurons_counter1).end(),0)/all_activated_outputs.at(neurons_counter1).size() );
  190. //all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
  191. neurons_counter1++;
  192. }
  193. }else
  194. {
  195. for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
  196. {
  197. cur_neuron->set_activated_output( accumulate(all_activated_outputs.at(neurons_counter1).begin(),
  198. all_activated_outputs.at(neurons_counter1).end(),0)/all_activated_outputs.at(neurons_counter1).size() );
  199. //all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
  200. neurons_counter1++;
  201. cur_neuron->set_derror( accumulate(all_derrors.at(neurons_counter2).begin(),
  202. all_derrors.at(neurons_counter2).end(),0)/all_derrors.at(neurons_counter2).size() );
  203. //all_derrors.at(neurons_counter2).push_back( cur_neuron->get_derror() );
  204. neurons_counter2++;
  205. }
  206. }
  207. }
  208. backward(learning_rate);
  209. }
  210. //backward(learning_rate);
  211. }
  212. }else
  213. {
  214. cerr << "Inputs and targets vectors have different size" << endl;
  215. exit(-1);
  216. }
  217. return true;
  218. }
  219. bool Network::forward(const std::vector<float> &input, const std::vector<float> &target)
  220. {
  221. int layer_counter = 0;
  222. for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
  223. {//inside current layer
  224. layer_counter++;
  225. if(layer_counter==1)
  226. {
  227. int i=0;
  228. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  229. {//inside current neuron
  230. current_neuron->set_activated_output( input.at(i) );
  231. i++;
  232. }
  233. }else if(layer_counter==layers.size())
  234. {
  235. list<forward_list<Neuron>>::iterator temp = current_layer;
  236. temp--; //previous layer
  237. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  238. {//inside current neuron
  239. forward_list<Neuron>::iterator prev_layer_it(temp->begin());
  240. current_neuron->activate(prev_layer_it, o_activ);
  241. }
  242. }else
  243. {
  244. list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
  245. temp_prev_layer--; ////temp_prev_layer set now at previous layer
  246. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  247. {//inside current neuron
  248. forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
  249. current_neuron->activate(prev_layer_it, h_activ);
  250. }
  251. }
  252. }
  253. //set_errors(target);
  254. return true;
  255. }
  256. bool Network::set_errors(const std::vector<float> &target)
  257. {
  258. int layer_counter = layers.size()+1;
  259. for(list<forward_list<Neuron>>::reverse_iterator current_layer(layers.rbegin()) ; current_layer!=layers.rend() ; ++current_layer)
  260. {//inside current layer
  261. layer_counter--;
  262. if(layer_counter==layers.size())
  263. {
  264. int i=0;
  265. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  266. {//inside current neuron
  267. current_neuron->set_derror( (current_neuron->get_activated_output()-target.at(i))*Tools::activation_function_derivative(o_activ,current_neuron->get_weighted_sum()) );
  268. i++;
  269. }
  270. }else if(layer_counter>1) //all hidden layers
  271. {
  272. list<forward_list<Neuron>>::reverse_iterator temp_next_layer = current_layer; //temp_next_layer set at current layer
  273. temp_next_layer--; //temp_next_layer set now at next layer
  274. int neuron_counter=0;
  275. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  276. {//inside current neuron
  277. neuron_counter++;
  278. current_neuron->set_derror(0.0);
  279. for(forward_list<Neuron>::iterator next_layer_current_neuron(temp_next_layer->begin()) ; next_layer_current_neuron!=temp_next_layer->end() ; ++next_layer_current_neuron)
  280. {
  281. current_neuron->set_derror( current_neuron->get_derror()+next_layer_current_neuron->get_derror()*next_layer_current_neuron->get_nth_weight(neuron_counter) );
  282. }
  283. current_neuron->set_derror( current_neuron->get_derror()*Tools::activation_function_derivative(h_activ,current_neuron->get_weighted_sum()) );
  284. }
  285. }
  286. }
  287. return true;
  288. }
  289. bool Network::backward(float learning_rate)
  290. {
  291. int layer_counter = layers.size()+1;
  292. for(list<forward_list<Neuron>>::reverse_iterator current_layer(layers.rbegin()) ; current_layer!=layers.rend() ; ++current_layer)
  293. {//inside current layer
  294. layer_counter--;
  295. if(layer_counter>1) //all layers except input layer
  296. {
  297. list<forward_list<Neuron>>::reverse_iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
  298. temp_prev_layer++; //temp_prev_layer set now at previous layer
  299. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  300. {//inside current neuron
  301. int neuron_counter=0;
  302. for(forward_list<Neuron>::iterator prev_layer_current_neuron(temp_prev_layer->begin()) ; prev_layer_current_neuron!=temp_prev_layer->end() ; ++prev_layer_current_neuron)
  303. {
  304. neuron_counter++;
  305. current_neuron->set_nth_weight( neuron_counter, current_neuron->get_nth_weight(neuron_counter)-learning_rate*current_neuron->get_derror()*prev_layer_current_neuron->get_activated_output() );
  306. }
  307. current_neuron->set_bias( current_neuron->get_bias()-learning_rate*current_neuron->get_derror() );
  308. }
  309. }
  310. }
  311. return true;
  312. }
  313. bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output()<b.get_activated_output();}
  314. vector<float> Network::predict(const vector<vector<float>> &inputs, bool as_raw)
  315. {
  316. vector<float> results;
  317. for(auto input : inputs)
  318. {
  319. int layer_counter = 0;
  320. for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
  321. {//inside current layer
  322. layer_counter++;
  323. if(layer_counter==1)
  324. {
  325. int i=0;
  326. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  327. {//inside current neuron
  328. current_neuron->set_activated_output( input.at(i) );
  329. i++;
  330. }
  331. }else if(layer_counter==layers.size())
  332. {
  333. list<forward_list<Neuron>>::iterator temp = current_layer;
  334. temp--; //previous layer
  335. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  336. {//inside current neuron
  337. forward_list<Neuron>::iterator prev_layer_it(temp->begin());
  338. current_neuron->activate(prev_layer_it, o_activ);
  339. }
  340. }else
  341. {
  342. list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
  343. temp_prev_layer--; //temp_prev_layer set now at previous layer
  344. for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
  345. {//inside current neuron
  346. forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
  347. current_neuron->activate(prev_layer_it, h_activ);
  348. }
  349. }
  350. }
  351. list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
  352. if(as_raw)
  353. {
  354. results.push_back( max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output() );
  355. }else
  356. {
  357. results.push_back( distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ) );
  358. }
  359. }
  360. return results;
  361. }
  362. void Network::print()
  363. {
  364. cout << endl << "#>>==========================================<<#" << endl;
  365. cout << "# NEURAL NETWORK #" << endl;
  366. cout << "#>>==========================================<<#" << endl;
  367. cout << ">> Number of layers : " << layers.size() << endl;
  368. cout << "------------------------------------------------" << endl;
  369. int layer_counter = 0;
  370. int prev_layer_size_temp = 0, params_counter = 0;
  371. for(list<forward_list<Neuron>>::iterator it1(layers.begin()) ; it1!=layers.end() ; ++it1)
  372. {
  373. layer_counter++;
  374. int current_layer_size = 0;
  375. for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2)
  376. {
  377. current_layer_size++;
  378. }
  379. if(layer_counter==1)
  380. {
  381. prev_layer_size_temp = current_layer_size;
  382. }
  383. else
  384. {
  385. params_counter += (prev_layer_size_temp+1)*current_layer_size;
  386. prev_layer_size_temp = current_layer_size;
  387. }
  388. if(layer_counter==1)
  389. {
  390. cout << ">> Input layer" << endl;
  391. cout << "size : " << current_layer_size << endl;
  392. cout << "neurons' activations : ";
  393. for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
  394. cout << endl;
  395. }else if(layer_counter==layers.size())
  396. {
  397. cout << (">> Output layer\n");
  398. cout << "size : " << current_layer_size << endl;
  399. cout << ("neurons' activations : ");
  400. //for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
  401. for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl; for(int i=1;i<=3;i++){cout << it2->get_nth_weight(i) << " ";}cout<<endl;}//to be deleted
  402. cout << endl;
  403. }else
  404. {
  405. cout << ">> Hidden layer " << layer_counter-1 << endl;
  406. cout << "size : " << current_layer_size << endl;
  407. for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl;}//to be deleted
  408. }
  409. cout << "------------------------------------------------" << endl;
  410. }
  411. cout << "Number of parameters : ";
  412. cout << params_counter << endl;
  413. cout << "#>>==========================================<<#" << endl << endl;
  414. }
  415. void Tools::activate_randomness()
  416. {
  417. srand(time(NULL));
  418. }
  419. float Tools::get_random(float mini, float maxi)
  420. {
  421. return mini + ((float)rand()/(float)RAND_MAX) * (maxi-mini);
  422. }
  423. float Tools::activation_function(Activ activ, float value)
  424. {
  425. Tools t;
  426. switch(activ)
  427. {
  428. case RELU:
  429. return t.relu(value);
  430. case SIGMOID:
  431. return t.sigmoid(value);
  432. case TANH:
  433. return tanh(value);
  434. case LINEAR:
  435. return value;
  436. default:
  437. exit(-1);
  438. }
  439. }
  440. float Tools::activation_function_derivative(Activ activ, float value)
  441. {
  442. Tools t;
  443. switch(activ)
  444. {
  445. case RELU:
  446. return t.relu_derivative(value);
  447. case SIGMOID:
  448. return t.sigmoid_derivative(value);
  449. case TANH:
  450. return t.tanh_derivative(value);
  451. case LINEAR:
  452. return 1.0;
  453. default:
  454. exit(-1);
  455. }
  456. }
  457. float Tools::relu(float value)
  458. {
  459. return (value > 0.0) ? value : 0.0;
  460. }
  461. float Tools::sigmoid(float value)
  462. {
  463. return 1.0 / (1.0 + exp(-value));
  464. }
  465. float Tools::relu_derivative(float value)
  466. {
  467. return (value > 0.0) ? 1.0 : 0.0;
  468. }
  469. float Tools::sigmoid_derivative(float value)
  470. {
  471. return sigmoid(value) * (1.0 - sigmoid(value));
  472. }
  473. float Tools::tanh_derivative(float value)
  474. {
  475. return 1.0 - (tanh(value) * tanh(value));
  476. }