diff --git a/annclasses.cpp b/annclasses.cpp index 0002a40..7608ee3 100755 --- a/annclasses.cpp +++ b/annclasses.cpp @@ -3,6 +3,7 @@ #include #include #include +#include #include "annclasses.h" using namespace std; @@ -114,6 +115,8 @@ Network::Network(int n_layers, int n_neurons) h_activ = RELU; //o_activ = SIGMOID; o_activ = LINEAR; + + neurons_number = n_layers*n_neurons; } Network::Network(const std::vector &n_neurons, Activ h_activ, Activ o_activ) @@ -138,15 +141,61 @@ Network::Network(const std::vector &n_neurons, Activ h_activ, Activ o_activ } h_activ = h_activ; o_activ = o_activ; + + neurons_number = accumulate(n_neurons.begin(), n_neurons.end(), 0); } -bool Network::train(const std::vector &input, const std::vector &target, float learning_rate, int n_episodes) +int Network::get_neurons_number() { - for(int episode=1;episode<=n_episodes;episode++) + return neurons_number; +} + +bool Network::train(const vector> &inputs, const vector> &targets, float learning_rate, int n_episodes, int batch_size) +{ + if(inputs.size() == targets.size()) { - forward(input, target); - set_errors(target); - backward(learning_rate); + vector> all_activated_outputs(get_neurons_number()); + vector> all_derrors(get_neurons_number()-inputs.at(0).size()); + bool is_constructed = false; + for(int episode=1 ; episode<=n_episodes ; episode++) + { + for(int index(0) ; index>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer) + { + layer_counter++; + if(layer_counter==1) + { + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + { + all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() ); + neurons_counter1++; + } + }else + { + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + { + all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() ); + neurons_counter1++; + + all_derrors.at(neurons_counter2).push_back( current_neuron->get_derror() ); + neurons_counter2++; + } + } + } + } + backward(learning_rate); + } + }else + { + cerr << "Inputs and targets vectors have different size" << endl; + exit(-1); } return true; } @@ -252,48 +301,53 @@ bool Network::backward(float learning_rate) bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output() &input, bool as_raw) +vector Network::predict(const vector> &inputs, bool as_raw) { - int layer_counter = 0; - for(list>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer) - {//inside current layer - layer_counter++; - if(layer_counter==1) - { - int i=0; - for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) - {//inside current neuron - current_neuron->set_activated_output( input.at(i) ); - i++; - } - }else if(layer_counter==layers.size()) - { - list>::iterator temp = current_layer; - temp--; //previous layer - for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) - {//inside current neuron - forward_list::iterator prev_layer_it(temp->begin()); - current_neuron->activate(prev_layer_it, o_activ); - } - }else - { - list>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer - temp_prev_layer--; ////temp_prev_layer set now at previous layer - for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) - {//inside current neuron - forward_list::iterator prev_layer_it(temp_prev_layer->begin()); - current_neuron->activate(prev_layer_it, h_activ); + vector results; + for(auto input : inputs) + { + int layer_counter = 0; + for(list>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer) + {//inside current layer + layer_counter++; + if(layer_counter==1) + { + int i=0; + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + current_neuron->set_activated_output( input.at(i) ); + i++; + } + }else if(layer_counter==layers.size()) + { + list>::iterator temp = current_layer; + temp--; //previous layer + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + forward_list::iterator prev_layer_it(temp->begin()); + current_neuron->activate(prev_layer_it, o_activ); + } + }else + { + list>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer + temp_prev_layer--; //temp_prev_layer set now at previous layer + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + forward_list::iterator prev_layer_it(temp_prev_layer->begin()); + current_neuron->activate(prev_layer_it, h_activ); + } } } + list>::iterator output_layer = layers.end(); output_layer--; + if(as_raw) + { + results.push_back( max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output() ); + }else + { + results.push_back( distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ) ); + } } - list>::iterator output_layer = layers.end(); output_layer--; - if(as_raw) - { - return max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output(); - }else - { - return distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ); - } + return results; } void Network::print() diff --git a/annclasses.h b/annclasses.h index f8694e8..01c1a09 100755 --- a/annclasses.h +++ b/annclasses.h @@ -1,5 +1,5 @@ -#ifndef MYCLASSES_H -#define MYCLASSES_H +#ifndef ANNCLASSES_H +#define ANNCLASSES_H #include #include @@ -39,10 +39,12 @@ class Network public: Network(int n_layers, int n_neurons); Network(const std::vector &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID); - - bool train(const std::vector &input, const std::vector &target, float learning_rate, int n_episodes); - float predict(const std::vector &input, bool as_raw=true); + int get_neurons_number(); + + bool train(const std::vector> &inputs, const std::vector> &targets, float learning_rate=0.001, int n_episodes=30, int batch_size=32); + + std::vector predict(const std::vector> &inputs, bool as_raw=true); void print(); //to be deleted @@ -51,6 +53,7 @@ public: //bool backward(float learning_rate); private: std::list> layers; + int neurons_number; Activ h_activ; Activ o_activ; diff --git a/main.cpp b/main.cpp index 0cfc028..e094acd 100755 --- a/main.cpp +++ b/main.cpp @@ -11,14 +11,21 @@ int main(int argc, char *argv[]) cout << "Bonjour et bienvenu" << endl; - Network network(15, 3); + Network network(15, 3);/* network.print(); cout << endl << endl; network.train({1.0,1.0,1.0}, {1.0,2.0,3.0}, 0.001, 100000); //network.print(); cout << endl << endl; - network.print(); - cout << "verdict : " << network.predict({1.0,1.0,1.0},false) << endl; + network.print();*/ + cout << "Network has " << network.get_neurons_number() << " neurons" << endl; + for(auto e : network.predict({{1.0,1.0,1.0},{2.0,1.3,0.0}},false)) + { + cout << e << " " << endl; + } + + //vector> v(5); + //cout << v.at(0).size() << endl; return 0; }