diff --git a/main.cpp b/main.cpp index 3f68015..3165d4e 100644 --- a/main.cpp +++ b/main.cpp @@ -16,27 +16,18 @@ int main(int argc, char *argv[]) cout << "Bonjour et bienvenu" << endl; - Network network(3, 3); - network.forward({1.0,1.0,1.0}, {1.0,2.0,3.0}); + Network network(15, 3); network.print(); - - /*Neuron n(3), n1(1), n2(1), n3(1); - forward_list fl; - fl.push_front(n1); - fl.push_front(n2); - fl.push_front(n3); - forward_list::iterator it(fl.begin()); - n.activate(it, LINEAR); - cout << "weighted sum = " << n.get_weighted_sum() << endl;*/ - - /*list l; - l.push_back(1.0); - l.push_back(2.0); - l.push_back(3.0); - for(list::reverse_iterator it(l.rbegin()) ; it!=l.rend() ; ++it) + cout << endl << endl; + for(int episode=1;episode<=100000;episode++) { - cout << *it << endl; - }*/ + network.forward({1.0,1.0,1.0}, {1.0,2.0,3.0}); + network.backward(0.001); + } + //network.print(); + cout << endl << endl; + network.print(); + cout << "verdict : " << network.predict({1.0,1.0,1.0},false) << endl; return 0; } diff --git a/myclasses.cpp b/myclasses.cpp index cb15479..fdf0341 100644 --- a/myclasses.cpp +++ b/myclasses.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include "myclasses.h" using namespace std; @@ -19,29 +20,14 @@ Neuron::Neuron(int prev_layer_size) derror = 0.0; } -void Neuron::set_activated_output(float value) +void Neuron::set_bias(float value) { - activated_output = value; + bias = value; } -float Neuron::get_weighted_sum() +float Neuron::get_bias() { - return weighted_sum; -} - -float Neuron::get_activated_output() -{ - return activated_output; -} - -void Neuron::set_derror(float value) -{ - derror = value; -} - -float Neuron::get_derror() -{ - return derror; + return bias; } void Neuron::set_nth_weight(int n, float value) @@ -68,6 +54,31 @@ float Neuron::get_nth_weight(int n) return *current_weight; } +float Neuron::get_weighted_sum() +{ + return weighted_sum; +} + +void Neuron::set_activated_output(float value) +{ + activated_output = value; +} + +float Neuron::get_activated_output() +{ + return activated_output; +} + +void Neuron::set_derror(float value) +{ + derror = value; +} + +float Neuron::get_derror() +{ + return derror; +} + void Neuron::activate(forward_list::iterator &prev_layer_it, Activ activ_function) { weighted_sum = bias; @@ -211,24 +222,67 @@ bool Network::backward(float learning_rate) { list>::reverse_iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer temp_prev_layer++; //temp_prev_layer set now at previous layer - int neuron_counter=0; + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) {//inside current neuron - neuron_counter++; + int neuron_counter=0; + for(forward_list::iterator prev_layer_current_neuron(temp_prev_layer->begin()) ; prev_layer_current_neuron!=temp_prev_layer->end() ; ++prev_layer_current_neuron) { - //current_neuron->set_nth_weight() - current_neuron->set_derror( current_neuron->get_derror()+prev_layer_current_neuron->get_derror()*prev_layer_current_neuron->get_nth_weight(neuron_counter) ); + neuron_counter++; + current_neuron->set_nth_weight( neuron_counter, current_neuron->get_nth_weight(neuron_counter)-learning_rate*current_neuron->get_derror()*prev_layer_current_neuron->get_activated_output() ); } + current_neuron->set_bias( current_neuron->get_bias()-learning_rate*current_neuron->get_derror() ); } } } return true; } -float Network::predict(const std::vector &input) +bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output() &input, bool as_raw) { - return 0.0; + int layer_counter = 0; + for(list>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer) + {//inside current layer + layer_counter++; + if(layer_counter==1) + { + int i=0; + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + current_neuron->set_activated_output( input.at(i) ); + i++; + } + }else if(layer_counter==layers.size()) + { + list>::iterator temp = current_layer; + temp--; //previous layer + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + forward_list::iterator prev_layer_it(temp->begin()); + current_neuron->activate(prev_layer_it, o_activ); + } + }else + { + list>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer + temp_prev_layer--; ////temp_prev_layer set now at previous layer + for(forward_list::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron) + {//inside current neuron + forward_list::iterator prev_layer_it(temp_prev_layer->begin()); + current_neuron->activate(prev_layer_it, h_activ); + } + } + } + list>::iterator output_layer = layers.end(); output_layer--; + if(as_raw) + { + return max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output(); + }else + { + return distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ); + } } void Network::print() diff --git a/myclasses.h b/myclasses.h index a34a196..e65b590 100644 --- a/myclasses.h +++ b/myclasses.h @@ -15,15 +15,15 @@ class Neuron { public: Neuron(int prev_layer_size); //prev_layer_size = number of weights - //void set_weighted_sum(float weighted_sum); + void set_bias(float value); + float get_bias(); + void set_nth_weight(int n, float value); + float get_nth_weight(int n); float get_weighted_sum(); void set_activated_output(float value); float get_activated_output(); void set_derror(float value); float get_derror(); - void set_nth_weight(int n, float value); - float get_nth_weight(int n); - //std::forward_list &get_weights(); void activate(std::forward_list::iterator &prev_layer_it, Activ activ_function=LINEAR); private: std::forward_list weights; @@ -40,12 +40,14 @@ public: Network(int n_layers, int n_neurons); Network(const std::vector &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID); - float predict(const std::vector &input); + + float predict(const std::vector &input, bool as_raw=true); void print(); //to be deleted bool forward(const std::vector &input, const std::vector &target); bool set_errors(const std::vector &target); + bool backward(float learning_rate); private: std::list> layers; Activ h_activ; @@ -53,7 +55,7 @@ private: //bool forward(const std::vector &input, const std::vector &target); //bool set_errors(const std::vector &target); - bool backward(float learning_rate); + //bool backward(float learning_rate); };