123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522 |
- #include <iostream>
- #include <ctime>
- #include <cmath>
- #include <forward_list>
- #include <algorithm>
- #include <numeric>
- #include "annclasses.h"
-
- using namespace std;
-
- Neuron::Neuron(int prev_layer_size)
- {
- for(int i(1) ; i<=prev_layer_size ; i++)
- {
- weights.push_front(Tools::get_random(0.0, 1.0));
- //weights.push_front(1.0);
- }
- bias = 0.1;
- weighted_sum = 0.0;
- activated_output = 0.0;
- derror = 0.0;
- }
-
- void Neuron::set_bias(float value)
- {
- bias = value;
- }
-
- float Neuron::get_bias()
- {
- return bias;
- }
-
- void Neuron::set_nth_weight(int n, float value)
- {
- int i=1;
- forward_list<float>::iterator current_weight(weights.begin());
- while(i<n)
- {
- current_weight++;
- i++;
- }
- *current_weight = value;
- }
-
- float Neuron::get_nth_weight(int n)
- {
- int i=1;
- forward_list<float>::iterator current_weight(weights.begin());
- while(i<n)
- {
- current_weight++;
- i++;
- }
- return *current_weight;
- }
-
- float Neuron::get_weighted_sum()
- {
- return weighted_sum;
- }
-
- void Neuron::set_activated_output(float value)
- {
- activated_output = value;
- }
-
- float Neuron::get_activated_output()
- {
- return activated_output;
- }
-
- void Neuron::set_derror(float value)
- {
- derror = value;
- }
-
- float Neuron::get_derror()
- {
- return derror;
- }
-
- void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function)
- {
- weighted_sum = bias;
- for(forward_list<float>::iterator it(weights.begin()) ; it!=weights.end() ; ++it)
- {
- weighted_sum += (*it) * (prev_layer_it->activated_output);
- prev_layer_it++;
- }
- activated_output = Tools::activation_function(activ_function, weighted_sum);
- }
-
-
- Network::Network(int n_layers, int n_neurons)
- {
- for(int i(1) ; i<=n_layers ; i++)
- {
- forward_list<Neuron> current_layer;
- for(int j(1) ; j<=n_neurons ; j++)
- {
- if(i==1)
- {
- current_layer.push_front( Neuron(0) );
- }else if(i==n_layers)
- {
- current_layer.push_front( Neuron(n_neurons) );
- }else
- {
- current_layer.push_front( Neuron(n_neurons) );
- }
- }
- layers.push_back(current_layer);
- }
- h_activ = RELU;
- //o_activ = SIGMOID;
- o_activ = LINEAR;
-
- neurons_number = n_layers*n_neurons;
- }
-
- Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ)
- {
- for(int i(0) ; i<n_neurons.size() ; i++)
- {
- forward_list<Neuron> current_layer;
- for(int j(1) ; j<=n_neurons[i] ; j++)
- {
- if(i==0)
- {
- current_layer.push_front( Neuron(0) );
- }else if(i==n_neurons.size()-1)
- {
- current_layer.push_front( Neuron(n_neurons[i-1]) );
- }else
- {
- current_layer.push_front( Neuron(n_neurons[i-1]) );
- }
- }
- layers.push_back(current_layer);
- }
- h_activ = h_activ;
- o_activ = o_activ;
-
- neurons_number = accumulate(n_neurons.begin(), n_neurons.end(), 0);
- }
-
- int Network::get_neurons_number()
- {
- return neurons_number;
- }
-
- bool Network::train(const vector<vector<float>> &inputs, const vector<vector<float>> &targets, float learning_rate, int n_episodes, int batch_size)
- {
- if(inputs.size() == targets.size())
- {
- //vector<vector<float>> all_activated_outputs(get_neurons_number());
- //vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
- for(int episode=1 ; episode<=n_episodes ; episode++)
- {
- for(int batch_index(0) ; batch_index<inputs.size() ; batch_index+=batch_size)
- {
- vector<vector<float>> all_activated_outputs(get_neurons_number());
- vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
- int layer_counter;
- int neurons_counter1;
- int neurons_counter2;
- for(int index(batch_index) ; index<inputs.size() && index<batch_index+batch_size ; index++)//batch_size not yet used
- {
- forward(inputs.at(index), targets.at(index));
- set_errors(targets.at(index));
-
- layer_counter = 0;
- neurons_counter1 = 0;
- neurons_counter2 = 0;
- for(list<forward_list<Neuron>>::iterator cur_layer(layers.begin()) ; cur_layer!=layers.end() ; ++cur_layer)
- {
- layer_counter++;
- if(layer_counter==1)
- {
- for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
- {
- all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
- neurons_counter1++;
- }
- }else
- {
- for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
- {
- all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
- neurons_counter1++;
-
- all_derrors.at(neurons_counter2).push_back( cur_neuron->get_derror() );
- neurons_counter2++;
- }
- }
- }
- }
-
- layer_counter = 0;
- neurons_counter1 = 0;
- neurons_counter2 = 0;
- for(list<forward_list<Neuron>>::iterator cur_layer(layers.begin()) ; cur_layer!=layers.end() ; ++cur_layer)
- {
- layer_counter++;
- if(layer_counter==1)
- {
- for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
- {
- cur_neuron->set_activated_output( accumulate(all_activated_outputs.at(neurons_counter1).begin(),
- all_activated_outputs.at(neurons_counter1).end(),0)/all_activated_outputs.at(neurons_counter1).size() );
- //all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
- neurons_counter1++;
- }
- }else
- {
- for(forward_list<Neuron>::iterator cur_neuron(cur_layer->begin()) ; cur_neuron!=cur_layer->end() ; ++cur_neuron)
- {
- cur_neuron->set_activated_output( accumulate(all_activated_outputs.at(neurons_counter1).begin(),
- all_activated_outputs.at(neurons_counter1).end(),0)/all_activated_outputs.at(neurons_counter1).size() );
- //all_activated_outputs.at(neurons_counter1).push_back( cur_neuron->get_activated_output() );
- neurons_counter1++;
-
- cur_neuron->set_derror( accumulate(all_derrors.at(neurons_counter2).begin(),
- all_derrors.at(neurons_counter2).end(),0)/all_derrors.at(neurons_counter2).size() );
- //all_derrors.at(neurons_counter2).push_back( cur_neuron->get_derror() );
- neurons_counter2++;
- }
- }
- }
- backward(learning_rate);
- }
- //backward(learning_rate);
- }
- }else
- {
- cerr << "Inputs and targets vectors have different size" << endl;
- exit(-1);
- }
- return true;
- }
-
- bool Network::forward(const std::vector<float> &input, const std::vector<float> &target)
- {
- int layer_counter = 0;
- for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
- {//inside current layer
- layer_counter++;
- if(layer_counter==1)
- {
- int i=0;
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- current_neuron->set_activated_output( input.at(i) );
- i++;
- }
- }else if(layer_counter==layers.size())
- {
- list<forward_list<Neuron>>::iterator temp = current_layer;
- temp--; //previous layer
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- forward_list<Neuron>::iterator prev_layer_it(temp->begin());
- current_neuron->activate(prev_layer_it, o_activ);
- }
- }else
- {
- list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
- temp_prev_layer--; ////temp_prev_layer set now at previous layer
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
- current_neuron->activate(prev_layer_it, h_activ);
- }
- }
- }
- //set_errors(target);
- return true;
- }
-
- bool Network::set_errors(const std::vector<float> &target)
- {
- int layer_counter = layers.size()+1;
- for(list<forward_list<Neuron>>::reverse_iterator current_layer(layers.rbegin()) ; current_layer!=layers.rend() ; ++current_layer)
- {//inside current layer
- layer_counter--;
- if(layer_counter==layers.size())
- {
- int i=0;
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- current_neuron->set_derror( (current_neuron->get_activated_output()-target.at(i))*Tools::activation_function_derivative(o_activ,current_neuron->get_weighted_sum()) );
- i++;
- }
- }else if(layer_counter>1) //all hidden layers
- {
- list<forward_list<Neuron>>::reverse_iterator temp_next_layer = current_layer; //temp_next_layer set at current layer
- temp_next_layer--; //temp_next_layer set now at next layer
- int neuron_counter=0;
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- neuron_counter++;
- current_neuron->set_derror(0.0);
- for(forward_list<Neuron>::iterator next_layer_current_neuron(temp_next_layer->begin()) ; next_layer_current_neuron!=temp_next_layer->end() ; ++next_layer_current_neuron)
- {
- current_neuron->set_derror( current_neuron->get_derror()+next_layer_current_neuron->get_derror()*next_layer_current_neuron->get_nth_weight(neuron_counter) );
- }
- current_neuron->set_derror( current_neuron->get_derror()*Tools::activation_function_derivative(h_activ,current_neuron->get_weighted_sum()) );
- }
- }
- }
- return true;
- }
-
- bool Network::backward(float learning_rate)
- {
- int layer_counter = layers.size()+1;
- for(list<forward_list<Neuron>>::reverse_iterator current_layer(layers.rbegin()) ; current_layer!=layers.rend() ; ++current_layer)
- {//inside current layer
- layer_counter--;
- if(layer_counter>1) //all layers except input layer
- {
- list<forward_list<Neuron>>::reverse_iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
- temp_prev_layer++; //temp_prev_layer set now at previous layer
-
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- int neuron_counter=0;
-
- for(forward_list<Neuron>::iterator prev_layer_current_neuron(temp_prev_layer->begin()) ; prev_layer_current_neuron!=temp_prev_layer->end() ; ++prev_layer_current_neuron)
- {
- neuron_counter++;
- current_neuron->set_nth_weight( neuron_counter, current_neuron->get_nth_weight(neuron_counter)-learning_rate*current_neuron->get_derror()*prev_layer_current_neuron->get_activated_output() );
- }
- current_neuron->set_bias( current_neuron->get_bias()-learning_rate*current_neuron->get_derror() );
- }
- }
- }
- return true;
- }
-
- bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output()<b.get_activated_output();}
-
- vector<float> Network::predict(const vector<vector<float>> &inputs, bool as_raw)
- {
- vector<float> results;
- for(auto input : inputs)
- {
- int layer_counter = 0;
- for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
- {//inside current layer
- layer_counter++;
- if(layer_counter==1)
- {
- int i=0;
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- current_neuron->set_activated_output( input.at(i) );
- i++;
- }
- }else if(layer_counter==layers.size())
- {
- list<forward_list<Neuron>>::iterator temp = current_layer;
- temp--; //previous layer
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- forward_list<Neuron>::iterator prev_layer_it(temp->begin());
- current_neuron->activate(prev_layer_it, o_activ);
- }
- }else
- {
- list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
- temp_prev_layer--; //temp_prev_layer set now at previous layer
- for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
- {//inside current neuron
- forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
- current_neuron->activate(prev_layer_it, h_activ);
- }
- }
- }
- list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
- if(as_raw)
- {
- results.push_back( max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output() );
- }else
- {
- results.push_back( distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ) );
- }
- }
- return results;
- }
-
- void Network::print()
- {
- cout << endl << "#>>==========================================<<#" << endl;
- cout << "# NEURAL NETWORK #" << endl;
- cout << "#>>==========================================<<#" << endl;
- cout << ">> Number of layers : " << layers.size() << endl;
- cout << "------------------------------------------------" << endl;
- int layer_counter = 0;
- int prev_layer_size_temp = 0, params_counter = 0;
- for(list<forward_list<Neuron>>::iterator it1(layers.begin()) ; it1!=layers.end() ; ++it1)
- {
- layer_counter++;
- int current_layer_size = 0;
- for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2)
- {
- current_layer_size++;
- }
- if(layer_counter==1)
- {
- prev_layer_size_temp = current_layer_size;
- }
- else
- {
- params_counter += (prev_layer_size_temp+1)*current_layer_size;
- prev_layer_size_temp = current_layer_size;
- }
- if(layer_counter==1)
- {
- cout << ">> Input layer" << endl;
- cout << "size : " << current_layer_size << endl;
- cout << "neurons' activations : ";
- for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
- cout << endl;
- }else if(layer_counter==layers.size())
- {
- cout << (">> Output layer\n");
- cout << "size : " << current_layer_size << endl;
- cout << ("neurons' activations : ");
- //for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
- for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl; for(int i=1;i<=3;i++){cout << it2->get_nth_weight(i) << " ";}cout<<endl;}//to be deleted
- cout << endl;
- }else
- {
- cout << ">> Hidden layer " << layer_counter-1 << endl;
- cout << "size : " << current_layer_size << endl;
- for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl;}//to be deleted
- }
- cout << "------------------------------------------------" << endl;
- }
- cout << "Number of parameters : ";
- cout << params_counter << endl;
- cout << "#>>==========================================<<#" << endl << endl;
- }
-
- void Tools::activate_randomness()
- {
- srand(time(NULL));
- }
-
- float Tools::get_random(float mini, float maxi)
- {
- return mini + ((float)rand()/(float)RAND_MAX) * (maxi-mini);
- }
-
- float Tools::activation_function(Activ activ, float value)
- {
- Tools t;
- switch(activ)
- {
- case RELU:
- return t.relu(value);
-
- case SIGMOID:
- return t.sigmoid(value);
-
- case TANH:
- return tanh(value);
-
- case LINEAR:
- return value;
- default:
- exit(-1);
- }
- }
-
- float Tools::activation_function_derivative(Activ activ, float value)
- {
- Tools t;
- switch(activ)
- {
- case RELU:
- return t.relu_derivative(value);
-
- case SIGMOID:
- return t.sigmoid_derivative(value);
-
- case TANH:
- return t.tanh_derivative(value);
-
- case LINEAR:
- return 1.0;
- default:
- exit(-1);
- }
- }
-
- float Tools::relu(float value)
- {
- return (value > 0.0) ? value : 0.0;
- }
-
- float Tools::sigmoid(float value)
- {
- return 1.0 / (1.0 + exp(-value));
- }
-
- float Tools::relu_derivative(float value)
- {
- return (value > 0.0) ? 1.0 : 0.0;
- }
-
- float Tools::sigmoid_derivative(float value)
- {
- return sigmoid(value) * (1.0 - sigmoid(value));
- }
-
- float Tools::tanh_derivative(float value)
- {
- return 1.0 - (tanh(value) * tanh(value));
- }
|