Adding ANN print function
This commit is contained in:
förälder
26ae13b72e
incheckning
639694d05c
3 ändrade filer med 44 tillägg och 46 borttagningar
6
main.cpp
6
main.cpp
|
@ -4,7 +4,7 @@
|
||||||
#include "myclasses.h"
|
#include "myclasses.h"
|
||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <iterator>
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,6 +26,8 @@ int main(int argc, char *argv[])
|
||||||
|
|
||||||
n0.activate(it);
|
n0.activate(it);
|
||||||
cout << "is = " << n0.get_output() << endl;*/
|
cout << "is = " << n0.get_output() << endl;*/
|
||||||
Network(4, 5);
|
|
||||||
|
Network network(4, 5);
|
||||||
|
network.print();
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,13 +6,12 @@
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
Neuron::Neuron(int prev_layer_size, Activ activ_function)
|
Neuron::Neuron(int prev_layer_size)
|
||||||
{
|
{
|
||||||
for(int i(1) ; i<=prev_layer_size ; i++)
|
for(int i(1) ; i<=prev_layer_size ; i++)
|
||||||
{
|
{
|
||||||
weights.push_front(Tools::get_random(0.0, 1.0));
|
weights.push_front(Tools::get_random(0.0, 1.0));
|
||||||
}
|
}
|
||||||
activ = activ_function;
|
|
||||||
bias = 0.1;
|
bias = 0.1;
|
||||||
output = 0.0;
|
output = 0.0;
|
||||||
derror = 0.0;
|
derror = 0.0;
|
||||||
|
@ -23,16 +22,16 @@ void Neuron::set_output(float value)
|
||||||
output = value;
|
output = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it)
|
void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function)
|
||||||
{
|
{
|
||||||
set_output(bias);
|
output = bias;
|
||||||
for(forward_list<float>::iterator it(weights.begin()) ; it!=weights.end() ; ++it)
|
for(forward_list<float>::iterator it(weights.begin()) ; it!=weights.end() ; ++it)
|
||||||
{
|
{
|
||||||
output += (*it) * ((*prev_layer_it).output);
|
output += (*it) * ((*prev_layer_it).output);
|
||||||
prev_layer_it++;
|
prev_layer_it++;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch(activ)
|
switch(activ_function)
|
||||||
{
|
{
|
||||||
case RELU:
|
case RELU:
|
||||||
output = (output > 0.0) ? output : 0.0;
|
output = (output > 0.0) ? output : 0.0;
|
||||||
|
@ -66,13 +65,13 @@ Network::Network(int n_layers, int n_neurons)
|
||||||
{
|
{
|
||||||
if(i==1)
|
if(i==1)
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(0, LINEAR) );
|
current_layer.push_front( Neuron(0) );
|
||||||
}else if(i==n_layers)
|
}else if(i==n_layers)
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(n_neurons, SIGMOID) );
|
current_layer.push_front( Neuron(n_neurons) );
|
||||||
}else
|
}else
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(n_neurons, RELU) );
|
current_layer.push_front( Neuron(n_neurons) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
layers.push_back(current_layer);
|
layers.push_back(current_layer);
|
||||||
|
@ -90,13 +89,13 @@ Network::Network(const std::vector<int> &n_neurons, Activ h_activ, Activ o_activ
|
||||||
{
|
{
|
||||||
if(i==0)
|
if(i==0)
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(0, LINEAR) );
|
current_layer.push_front( Neuron(0) );
|
||||||
}else if(i==n_neurons.size()-1)
|
}else if(i==n_neurons.size()-1)
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(n_neurons[i-1], o_activ) );
|
current_layer.push_front( Neuron(n_neurons[i-1]) );
|
||||||
}else
|
}else
|
||||||
{
|
{
|
||||||
current_layer.push_front( Neuron(n_neurons[i-1], h_activ) );
|
current_layer.push_front( Neuron(n_neurons[i-1]) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
layers.push_back(current_layer);
|
layers.push_back(current_layer);
|
||||||
|
@ -112,50 +111,48 @@ void Network::print()
|
||||||
cout << "#>>==========================================<<#" << endl;
|
cout << "#>>==========================================<<#" << endl;
|
||||||
cout << ">> Number of layers : " << layers.size() << endl;
|
cout << ">> Number of layers : " << layers.size() << endl;
|
||||||
cout << "------------------------------------------------" << endl;
|
cout << "------------------------------------------------" << endl;
|
||||||
|
int layer_counter = 0;
|
||||||
|
int prev_layer_size_temp = 0, params_counter = 0;
|
||||||
for(list<forward_list<Neuron>>::iterator it1(layers.begin()) ; it1!=layers.end() ; ++it1)
|
for(list<forward_list<Neuron>>::iterator it1(layers.begin()) ; it1!=layers.end() ; ++it1)
|
||||||
{
|
{
|
||||||
|
layer_counter++;
|
||||||
int current_layer_size = 0;
|
int current_layer_size = 0;
|
||||||
for(forward_list<Neuron>::iterator it2(it1) ; it2!=it1.end() ; ++it2)
|
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2)
|
||||||
{
|
{
|
||||||
current_layer_size++;
|
current_layer_size++;
|
||||||
}
|
}
|
||||||
if(i==0)
|
if(layer_counter==1)
|
||||||
|
{
|
||||||
|
prev_layer_size_temp = current_layer_size;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
params_counter += (prev_layer_size_temp+1)*current_layer_size;
|
||||||
|
prev_layer_size_temp = current_layer_size;
|
||||||
|
}
|
||||||
|
if(layer_counter==1)
|
||||||
{
|
{
|
||||||
cout << ">> Input layer" << endl;
|
cout << ">> Input layer" << endl;
|
||||||
cout << "size : " << layers << endl;
|
cout << "size : " << current_layer_size << endl;
|
||||||
cout << "neurons' outputs : ";
|
cout << "neurons' outputs : ";
|
||||||
temp = network->layers_first_neurons[i];
|
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){it2->get_output();}
|
||||||
while(temp != NULL)
|
cout << endl;
|
||||||
{
|
}else if(layer_counter==layers.size())
|
||||||
cout << ("%f ", temp->output);
|
|
||||||
temp = temp->same_layer_next_neuron;
|
|
||||||
}
|
|
||||||
cout << ("\n");
|
|
||||||
}else if(i==layers.size()-1)
|
|
||||||
{
|
{
|
||||||
cout << (">> Output layer\n");
|
cout << (">> Output layer\n");
|
||||||
cout << ("size : %d\n", network->neurons_per_layer[i]);
|
cout << "size : " << current_layer_size << endl;
|
||||||
cout << ("neurons' outputs : ");
|
cout << ("neurons' outputs : ");
|
||||||
temp = network->layers_first_neurons[i];
|
for(forward_list<Neuron>::iterator it2(it1->begin()) ; it2!=it1->end() ; ++it2){it2->get_output();}
|
||||||
while(temp != NULL)
|
cout << endl;
|
||||||
{
|
|
||||||
cout << ("%f ", temp->output);
|
|
||||||
temp = temp->same_layer_next_neuron;
|
|
||||||
}
|
|
||||||
cout << ("\n");
|
|
||||||
}else
|
}else
|
||||||
{
|
{
|
||||||
cout << (">> Hidden layer %d\n", i);
|
cout << ">> Hidden layer " << layer_counter-1 << endl;
|
||||||
cout << ("size : %d\n", network->neurons_per_layer[i]);
|
cout << "size : " << current_layer_size << endl;
|
||||||
}
|
}
|
||||||
cout << ("------------------------------------------------\n");
|
cout << "------------------------------------------------" << endl;
|
||||||
}
|
}
|
||||||
cout << ("Number of parameters : ");
|
cout << "Number of parameters : ";
|
||||||
for(i=1 ; i<network->n_layers ; i++)
|
cout << params_counter << endl;
|
||||||
{
|
|
||||||
n_params += network->neurons_per_layer[i] * (network->neurons_per_layer[i-1] + 1);
|
|
||||||
}
|
|
||||||
cout << ("%d\n", n_params);
|
|
||||||
cout << "#>>==========================================<<#" << endl << endl;
|
cout << "#>>==========================================<<#" << endl << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,22 +8,21 @@
|
||||||
|
|
||||||
enum Activ
|
enum Activ
|
||||||
{
|
{
|
||||||
RELU, TANH, SIGMOID, LINEAR
|
RELU, TANH, SIGMOID, LINEAR, SOFTMAX
|
||||||
};
|
};
|
||||||
|
|
||||||
class Neuron
|
class Neuron
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Neuron(int prev_layer_size, Activ activ_function);
|
Neuron(int prev_layer_size); //prev_layer_size = number of weights
|
||||||
void set_output(float value);
|
void set_output(float value);
|
||||||
float get_output();//to be deleted
|
float get_output();//to be deleted
|
||||||
void activate(std::forward_list<Neuron>::iterator &prev_layer_it);
|
void activate(std::forward_list<Neuron>::iterator &prev_layer_it, Activ activ_function=LINEAR);
|
||||||
private:
|
private:
|
||||||
std::forward_list<float> weights;
|
std::forward_list<float> weights;
|
||||||
float bias;
|
float bias;
|
||||||
float output;
|
float output;
|
||||||
float derror;
|
float derror;
|
||||||
Activ activ;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,7 +31,7 @@ class Network
|
||||||
public:
|
public:
|
||||||
Network(int n_layers, int n_neurons);
|
Network(int n_layers, int n_neurons);
|
||||||
Network(const std::vector<int> &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID);
|
Network(const std::vector<int> &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID);
|
||||||
void print() const;
|
void print();
|
||||||
bool forward(const std::vector<float> &input, const std::vector<float> &target);
|
bool forward(const std::vector<float> &input, const std::vector<float> &target);
|
||||||
bool backward();
|
bool backward();
|
||||||
private:
|
private:
|
||||||
|
|
Laddar…
Referens i nytt ärende