Updating ANN errors backpropagate function
This commit is contained in:
parent
f7193b3e33
commit
77be210265
3 changed files with 58 additions and 5 deletions
4
main.cpp
4
main.cpp
|
@ -16,8 +16,8 @@ int main(int argc, char *argv[])
|
||||||
|
|
||||||
cout << "Bonjour et bienvenu" << endl;
|
cout << "Bonjour et bienvenu" << endl;
|
||||||
|
|
||||||
Network network(2, 5);
|
Network network(3, 3);
|
||||||
network.forward({1.0,1.0,1.0,1.0,1.0}, {1.0,1.0,1.0,1.0,1.0});
|
network.forward({1.0,1.0,1.0}, {1.0,2.0,3.0});
|
||||||
network.print();
|
network.print();
|
||||||
|
|
||||||
/*Neuron n(3), n1(1), n2(1), n3(1);
|
/*Neuron n(3), n1(1), n2(1), n3(1);
|
||||||
|
|
|
@ -44,6 +44,30 @@ float Neuron::get_derror()
|
||||||
return derror;
|
return derror;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Neuron::set_nth_weight(int n, float value)
|
||||||
|
{
|
||||||
|
int i=1;
|
||||||
|
forward_list<float>::iterator current_weight(weights.begin());
|
||||||
|
while(i<n)
|
||||||
|
{
|
||||||
|
current_weight++;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
*current_weight = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
float Neuron::get_nth_weight(int n)
|
||||||
|
{
|
||||||
|
int i=1;
|
||||||
|
forward_list<float>::iterator current_weight(weights.begin());
|
||||||
|
while(i<n)
|
||||||
|
{
|
||||||
|
current_weight++;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
return *current_weight;
|
||||||
|
}
|
||||||
|
|
||||||
void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function)
|
void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function)
|
||||||
{
|
{
|
||||||
weighted_sum = bias;
|
weighted_sum = bias;
|
||||||
|
@ -139,6 +163,7 @@ bool Network::forward(const std::vector<float> &input, const std::vector<float>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
set_errors(target);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,13 +185,16 @@ bool Network::set_errors(const std::vector<float> &target)
|
||||||
{
|
{
|
||||||
list<forward_list<Neuron>>::reverse_iterator temp_next_layer = current_layer; //temp_next_layer set at current layer
|
list<forward_list<Neuron>>::reverse_iterator temp_next_layer = current_layer; //temp_next_layer set at current layer
|
||||||
temp_next_layer--; //temp_next_layer set now at next layer
|
temp_next_layer--; //temp_next_layer set now at next layer
|
||||||
|
int neuron_counter=0;
|
||||||
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
{//inside current neuron
|
{//inside current neuron
|
||||||
|
neuron_counter++;
|
||||||
current_neuron->set_derror(0.0);
|
current_neuron->set_derror(0.0);
|
||||||
for(forward_list<Neuron>::iterator next_layer_current_neuron(temp_next_layer->begin()) ; next_layer_current_neuron!=temp_next_layer->end() ; ++next_layer_current_neuron)
|
for(forward_list<Neuron>::iterator next_layer_current_neuron(temp_next_layer->begin()) ; next_layer_current_neuron!=temp_next_layer->end() ; ++next_layer_current_neuron)
|
||||||
{
|
{
|
||||||
//
|
current_neuron->set_derror( current_neuron->get_derror()+next_layer_current_neuron->get_derror()*next_layer_current_neuron->get_nth_weight(neuron_counter) );
|
||||||
}
|
}
|
||||||
|
current_neuron->set_derror( current_neuron->get_derror()*Tools::activation_function_derivative(h_activ,current_neuron->get_weighted_sum()) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -175,6 +203,26 @@ bool Network::set_errors(const std::vector<float> &target)
|
||||||
|
|
||||||
bool Network::backward(float learning_rate)
|
bool Network::backward(float learning_rate)
|
||||||
{
|
{
|
||||||
|
int layer_counter = layers.size()+1;
|
||||||
|
for(list<forward_list<Neuron>>::reverse_iterator current_layer(layers.rbegin()) ; current_layer!=layers.rend() ; ++current_layer)
|
||||||
|
{//inside current layer
|
||||||
|
layer_counter--;
|
||||||
|
if(layer_counter>1) //all layers except input layer
|
||||||
|
{
|
||||||
|
list<forward_list<Neuron>>::reverse_iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
|
||||||
|
temp_prev_layer++; //temp_prev_layer set now at previous layer
|
||||||
|
int neuron_counter=0;
|
||||||
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
|
{//inside current neuron
|
||||||
|
neuron_counter++;
|
||||||
|
for(forward_list<Neuron>::iterator prev_layer_current_neuron(temp_prev_layer->begin()) ; prev_layer_current_neuron!=temp_prev_layer->end() ; ++prev_layer_current_neuron)
|
||||||
|
{
|
||||||
|
//current_neuron->set_nth_weight()
|
||||||
|
current_neuron->set_derror( current_neuron->get_derror()+prev_layer_current_neuron->get_derror()*prev_layer_current_neuron->get_nth_weight(neuron_counter) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,12 +269,14 @@ void Network::print()
|
||||||
cout << (">> Output layer\n");
|
cout << (">> Output layer\n");
|
||||||
cout << "size : " << current_layer_size << endl;
|
cout << "size : " << current_layer_size << endl;
|
||||||
cout << ("neurons' activations : ");
|
cout << ("neurons' activations : ");
|
||||||
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
|
//for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " ";}
|
||||||
|
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl; for(int i=1;i<=3;i++){cout << it2->get_nth_weight(i) << " ";}cout<<endl;}//to be deleted
|
||||||
cout << endl;
|
cout << endl;
|
||||||
}else
|
}else
|
||||||
{
|
{
|
||||||
cout << ">> Hidden layer " << layer_counter-1 << endl;
|
cout << ">> Hidden layer " << layer_counter-1 << endl;
|
||||||
cout << "size : " << current_layer_size << endl;
|
cout << "size : " << current_layer_size << endl;
|
||||||
|
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){cout << it2->get_activated_output() << " " << it2->get_derror() << endl;}//to be deleted
|
||||||
}
|
}
|
||||||
cout << "------------------------------------------------" << endl;
|
cout << "------------------------------------------------" << endl;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,9 @@ public:
|
||||||
float get_activated_output();
|
float get_activated_output();
|
||||||
void set_derror(float value);
|
void set_derror(float value);
|
||||||
float get_derror();
|
float get_derror();
|
||||||
|
void set_nth_weight(int n, float value);
|
||||||
|
float get_nth_weight(int n);
|
||||||
|
//std::forward_list<float> &get_weights();
|
||||||
void activate(std::forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function=LINEAR);
|
void activate(std::forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function=LINEAR);
|
||||||
private:
|
private:
|
||||||
std::forward_list<float> weights;
|
std::forward_list<float> weights;
|
||||||
|
|
Loading…
Reference in a new issue