Adding Neuron, Network and Tools classes

This commit is contained in:
chabisik 2022-01-01 21:58:15 +01:00
parent e1413298de
commit 5534661b91
4 changed files with 148 additions and 2 deletions

View file

@ -5,7 +5,7 @@ all : myprogram
clean :
rm -f *.o myprogram
myprogram : main.o
myprogram : myclasses.o main.o
g++ -Wall $^ -o myprogram
%.o : %.c

View file

@ -1,10 +1,33 @@
#include <iostream>
#include <forward_list>
//#include <cmath>
#include "myclasses.h"
#include <vector>
using namespace std;
int main(int argc, char *argv[])
{
Tools::activate_randomness();
return 0;
cout << "Bonjour et bienvenu" << endl;
vector<int> v = {1,2,3,4};
cout << "size = " << v.size() << endl;
cout << "size of bool = " << sizeof(bool) << endl;
Neuron n0(3,SIGMOID);
Neuron n1(3,RELU);n1.set_output(1.0);
Neuron n2(3,RELU);n2.set_output(2.0);
Neuron n3(3,RELU);n3.set_output(-3.0);
forward_list<Neuron> fl;
fl.push_front(n1);fl.push_front(n2);fl.push_front(n3);
forward_list<Neuron>::iterator it(fl.begin());
n0.activate(it);
cout << "is = " << n0.get_output() << endl;
return 0;
}

68
myclasses.cpp Normal file
View file

@ -0,0 +1,68 @@
#include <iostream>
#include <ctime>
#include <cmath>
#include <forward_list>
#include "myclasses.h"
using namespace std;
Neuron::Neuron(int prev_layer_size, Activ activ_function)
{
for(int i(1) ; i<=prev_layer_size ; i++)
{
weights.push_front(Tools::get_random(0.0, 1.0));
}
activ = activ_function;
bias = 0.1;
output = 0.0;
derror = 0.0;
}
void Neuron::set_output(float value)
{
output = value;
}
void Neuron::activate(forward_list<Neuron>::iterator &prev_layer_it)
{
set_output(bias);
for(forward_list<float>::iterator it(weights.begin()) ; it!=weights.end() ; ++it)
{
output += (*it) * ((*prev_layer_it).output);
prev_layer_it++;
}
switch(activ)
{
case RELU:
output = (output > 0.0) ? output : 0.0;
break;
case SIGMOID:
output = 1.0 / (1.0 + exp(-output));
break;
case TANH:
output = tanh(output);
break;
default:
//LINEAR (output=direct weighted sum) as base behavior
break;
}
}
float Neuron::get_output()//to be deleted later
{
return output;
}
void Tools::activate_randomness()
{
srand(time(NULL));
}
float Tools::get_random(float mini, float maxi)
{
return mini + ((float)rand()/(float)RAND_MAX) * (maxi-mini);
}

55
myclasses.h Normal file
View file

@ -0,0 +1,55 @@
#ifndef MYCLASSES_H
#define MYCLASSES_H
#include <forward_list>
#include <list>
#include <vector>
enum Activ
{
RELU, TANH, SIGMOID, LINEAR
};
class Neuron
{
public:
Neuron(int prev_layer_size, Activ activ_function);
void set_output(float value);
float get_output();//to be deleted
void activate(std::forward_list<Neuron>::iterator &prev_layer_it);
private:
std::forward_list<float> weights;
float bias;
float output;
float derror;
Activ activ;
};
class Network
{
public:
Network(int n_neurons);
Network(const std::vector<int> &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID);
bool forward(const std::vector<float> &input, const std::vector<float> &target);
bool backward();
private:
std::list<std::forward_list<Neuron>> layers;
Activ h_activ;
Activ o_activ;
bool _set_errors();
};
class Tools
{
public:
static void activate_randomness();
static float get_random(float mini, float maxi);
private:
};
#endif