Updating Network functions
This commit is contained in:
parent
756056f55c
commit
8fd19c2d8f
3 changed files with 115 additions and 51 deletions
140
annclasses.cpp
140
annclasses.cpp
|
@ -3,6 +3,7 @@
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
#include <forward_list>
|
#include <forward_list>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <numeric>
|
||||||
#include "annclasses.h"
|
#include "annclasses.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
@ -114,6 +115,8 @@ Network::Network(int n_layers, int n_neurons)
|
||||||
h_activ = RELU;
|
h_activ = RELU;
|
||||||
//o_activ = SIGMOID;
|
//o_activ = SIGMOID;
|
||||||
o_activ = LINEAR;
|
o_activ = LINEAR;
|
||||||
|
|
||||||
|
neurons_number = n_layers*n_neurons;
|
||||||
}
|
}
|
||||||
|
|
||||||
Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ)
|
Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ)
|
||||||
|
@ -138,15 +141,61 @@ Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ
|
||||||
}
|
}
|
||||||
h_activ = h_activ;
|
h_activ = h_activ;
|
||||||
o_activ = o_activ;
|
o_activ = o_activ;
|
||||||
|
|
||||||
|
neurons_number = accumulate(n_neurons.begin(), n_neurons.end(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Network::train(const std::vector<float> &input, const std::vector<float> &target, float learning_rate, int n_episodes)
|
int Network::get_neurons_number()
|
||||||
{
|
{
|
||||||
for(int episode=1;episode<=n_episodes;episode++)
|
return neurons_number;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Network::train(const vector<vector<float>> &inputs, const vector<vector<float>> &targets, float learning_rate, int n_episodes, int batch_size)
|
||||||
|
{
|
||||||
|
if(inputs.size() == targets.size())
|
||||||
{
|
{
|
||||||
forward(input, target);
|
vector<vector<float>> all_activated_outputs(get_neurons_number());
|
||||||
set_errors(target);
|
vector<vector<float>> all_derrors(get_neurons_number()-inputs.at(0).size());
|
||||||
backward(learning_rate);
|
bool is_constructed = false;
|
||||||
|
for(int episode=1 ; episode<=n_episodes ; episode++)
|
||||||
|
{
|
||||||
|
for(int index(0) ; index<inputs.size() ; index++)//batch_size not yet used
|
||||||
|
{
|
||||||
|
forward(inputs.at(index), targets.at(index));
|
||||||
|
set_errors(targets.at(index));
|
||||||
|
|
||||||
|
int layer_counter = 0;
|
||||||
|
int neurons_counter1 = 0;
|
||||||
|
int neurons_counter2 = 0;
|
||||||
|
for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
||||||
|
{
|
||||||
|
layer_counter++;
|
||||||
|
if(layer_counter==1)
|
||||||
|
{
|
||||||
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
|
{
|
||||||
|
all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() );
|
||||||
|
neurons_counter1++;
|
||||||
|
}
|
||||||
|
}else
|
||||||
|
{
|
||||||
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
|
{
|
||||||
|
all_activated_outputs.at(neurons_counter1).push_back( current_neuron->get_activated_output() );
|
||||||
|
neurons_counter1++;
|
||||||
|
|
||||||
|
all_derrors.at(neurons_counter2).push_back( current_neuron->get_derror() );
|
||||||
|
neurons_counter2++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
backward(learning_rate);
|
||||||
|
}
|
||||||
|
}else
|
||||||
|
{
|
||||||
|
cerr << "Inputs and targets vectors have different size" << endl;
|
||||||
|
exit(-1);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -252,48 +301,53 @@ bool Network::backward(float learning_rate)
|
||||||
|
|
||||||
bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output()<b.get_activated_output();}
|
bool neuron_cmp(Neuron a, Neuron b){return a.get_activated_output()<b.get_activated_output();}
|
||||||
|
|
||||||
float Network::predict(const std::vector<float> &input, bool as_raw)
|
vector<float> Network::predict(const vector<vector<float>> &inputs, bool as_raw)
|
||||||
{
|
{
|
||||||
int layer_counter = 0;
|
vector<float> results;
|
||||||
for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
for(auto input : inputs)
|
||||||
{//inside current layer
|
{
|
||||||
layer_counter++;
|
int layer_counter = 0;
|
||||||
if(layer_counter==1)
|
for(list<forward_list<Neuron>>::iterator current_layer(layers.begin()) ; current_layer!=layers.end() ; ++current_layer)
|
||||||
{
|
{//inside current layer
|
||||||
int i=0;
|
layer_counter++;
|
||||||
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
if(layer_counter==1)
|
||||||
{//inside current neuron
|
{
|
||||||
current_neuron->set_activated_output( input.at(i) );
|
int i=0;
|
||||||
i++;
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
}
|
{//inside current neuron
|
||||||
}else if(layer_counter==layers.size())
|
current_neuron->set_activated_output( input.at(i) );
|
||||||
{
|
i++;
|
||||||
list<forward_list<Neuron>>::iterator temp = current_layer;
|
}
|
||||||
temp--; //previous layer
|
}else if(layer_counter==layers.size())
|
||||||
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
{
|
||||||
{//inside current neuron
|
list<forward_list<Neuron>>::iterator temp = current_layer;
|
||||||
forward_list<Neuron>::iterator prev_layer_it(temp->begin());
|
temp--; //previous layer
|
||||||
current_neuron->activate(prev_layer_it, o_activ);
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
}
|
{//inside current neuron
|
||||||
}else
|
forward_list<Neuron>::iterator prev_layer_it(temp->begin());
|
||||||
{
|
current_neuron->activate(prev_layer_it, o_activ);
|
||||||
list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
|
}
|
||||||
temp_prev_layer--; ////temp_prev_layer set now at previous layer
|
}else
|
||||||
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
{
|
||||||
{//inside current neuron
|
list<forward_list<Neuron>>::iterator temp_prev_layer = current_layer; //temp_prev_layer set at current layer
|
||||||
forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
|
temp_prev_layer--; //temp_prev_layer set now at previous layer
|
||||||
current_neuron->activate(prev_layer_it, h_activ);
|
for(forward_list<Neuron>::iterator current_neuron(current_layer->begin()) ; current_neuron!=current_layer->end() ; ++current_neuron)
|
||||||
|
{//inside current neuron
|
||||||
|
forward_list<Neuron>::iterator prev_layer_it(temp_prev_layer->begin());
|
||||||
|
current_neuron->activate(prev_layer_it, h_activ);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
|
||||||
|
if(as_raw)
|
||||||
|
{
|
||||||
|
results.push_back( max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output() );
|
||||||
|
}else
|
||||||
|
{
|
||||||
|
results.push_back( distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) ) );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
list<forward_list<Neuron>>::iterator output_layer = layers.end(); output_layer--;
|
return results;
|
||||||
if(as_raw)
|
|
||||||
{
|
|
||||||
return max_element(output_layer->begin(), output_layer->end(), neuron_cmp)->get_activated_output();
|
|
||||||
}else
|
|
||||||
{
|
|
||||||
return distance( output_layer->begin(), max_element(output_layer->begin(),output_layer->end(),neuron_cmp) );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Network::print()
|
void Network::print()
|
||||||
|
|
13
annclasses.h
13
annclasses.h
|
@ -1,5 +1,5 @@
|
||||||
#ifndef MYCLASSES_H
|
#ifndef ANNCLASSES_H
|
||||||
#define MYCLASSES_H
|
#define ANNCLASSES_H
|
||||||
|
|
||||||
#include <forward_list>
|
#include <forward_list>
|
||||||
#include <list>
|
#include <list>
|
||||||
|
@ -39,10 +39,12 @@ class Network
|
||||||
public:
|
public:
|
||||||
Network(int n_layers, int n_neurons);
|
Network(int n_layers, int n_neurons);
|
||||||
Network(const std::vector<int> &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID);
|
Network(const std::vector<int> &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID);
|
||||||
|
|
||||||
bool train(const std::vector<float> &input, const std::vector<float> &target, float learning_rate, int n_episodes);
|
|
||||||
|
|
||||||
float predict(const std::vector<float> &input, bool as_raw=true);
|
int get_neurons_number();
|
||||||
|
|
||||||
|
bool train(const std::vector<std::vector<float>> &inputs, const std::vector<std::vector<float>> &targets, float learning_rate=0.001, int n_episodes=30, int batch_size=32);
|
||||||
|
|
||||||
|
std::vector<float> predict(const std::vector<std::vector<float>> &inputs, bool as_raw=true);
|
||||||
void print();
|
void print();
|
||||||
|
|
||||||
//to be deleted
|
//to be deleted
|
||||||
|
@ -51,6 +53,7 @@ public:
|
||||||
//bool backward(float learning_rate);
|
//bool backward(float learning_rate);
|
||||||
private:
|
private:
|
||||||
std::list<std::forward_list<Neuron>> layers;
|
std::list<std::forward_list<Neuron>> layers;
|
||||||
|
int neurons_number;
|
||||||
Activ h_activ;
|
Activ h_activ;
|
||||||
Activ o_activ;
|
Activ o_activ;
|
||||||
|
|
||||||
|
|
13
main.cpp
13
main.cpp
|
@ -11,14 +11,21 @@ int main(int argc, char *argv[])
|
||||||
|
|
||||||
cout << "Bonjour et bienvenu" << endl;
|
cout << "Bonjour et bienvenu" << endl;
|
||||||
|
|
||||||
Network network(15, 3);
|
Network network(15, 3);/*
|
||||||
network.print();
|
network.print();
|
||||||
cout << endl << endl;
|
cout << endl << endl;
|
||||||
network.train({1.0,1.0,1.0}, {1.0,2.0,3.0}, 0.001, 100000);
|
network.train({1.0,1.0,1.0}, {1.0,2.0,3.0}, 0.001, 100000);
|
||||||
//network.print();
|
//network.print();
|
||||||
cout << endl << endl;
|
cout << endl << endl;
|
||||||
network.print();
|
network.print();*/
|
||||||
cout << "verdict : " << network.predict({1.0,1.0,1.0},false) << endl;
|
cout << "Network has " << network.get_neurons_number() << " neurons" << endl;
|
||||||
|
for(auto e : network.predict({{1.0,1.0,1.0},{2.0,1.3,0.0}},false))
|
||||||
|
{
|
||||||
|
cout << e << " " << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
//vector<vector<float>> v(5);
|
||||||
|
//cout << v.at(0).size() << endl;
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue