diff --git a/Makefile b/Makefile index 2af4a7f..13f8244 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ all : myprogram clean : rm -f *.o myprogram -myprogram : main.o +myprogram : myclasses.o main.o g++ -Wall $^ -o myprogram %.o : %.c diff --git a/main.cpp b/main.cpp index 46d3819..d6bfd24 100644 --- a/main.cpp +++ b/main.cpp @@ -1,10 +1,33 @@ #include #include +//#include +#include "myclasses.h" + +#include using namespace std; + int main(int argc, char *argv[]) { + Tools::activate_randomness(); -return 0; + + cout << "Bonjour et bienvenu" << endl; + vector v = {1,2,3,4}; + cout << "size = " << v.size() << endl; + cout << "size of bool = " << sizeof(bool) << endl; + + Neuron n0(3,SIGMOID); + + Neuron n1(3,RELU);n1.set_output(1.0); + Neuron n2(3,RELU);n2.set_output(2.0); + Neuron n3(3,RELU);n3.set_output(-3.0); + forward_list fl; + fl.push_front(n1);fl.push_front(n2);fl.push_front(n3); + forward_list::iterator it(fl.begin()); + + n0.activate(it); + cout << "is = " << n0.get_output() << endl; + return 0; } diff --git a/myclasses.cpp b/myclasses.cpp new file mode 100644 index 0000000..f15f17d --- /dev/null +++ b/myclasses.cpp @@ -0,0 +1,68 @@ +#include +#include +#include +#include +#include "myclasses.h" + +using namespace std; + +Neuron::Neuron(int prev_layer_size, Activ activ_function) +{ + for(int i(1) ; i<=prev_layer_size ; i++) + { + weights.push_front(Tools::get_random(0.0, 1.0)); + } + activ = activ_function; + bias = 0.1; + output = 0.0; + derror = 0.0; +} + +void Neuron::set_output(float value) +{ + output = value; +} + +void Neuron::activate(forward_list::iterator &prev_layer_it) +{ + set_output(bias); + for(forward_list::iterator it(weights.begin()) ; it!=weights.end() ; ++it) + { + output += (*it) * ((*prev_layer_it).output); + prev_layer_it++; + } + + switch(activ) + { + case RELU: + output = (output > 0.0) ? output : 0.0; + break; + + case SIGMOID: + output = 1.0 / (1.0 + exp(-output)); + break; + + case TANH: + output = tanh(output); + break; + + default: + //LINEAR (output=direct weighted sum) as base behavior + break; + } +} + +float Neuron::get_output()//to be deleted later +{ + return output; +} + +void Tools::activate_randomness() +{ + srand(time(NULL)); +} + +float Tools::get_random(float mini, float maxi) +{ + return mini + ((float)rand()/(float)RAND_MAX) * (maxi-mini); +} \ No newline at end of file diff --git a/myclasses.h b/myclasses.h new file mode 100644 index 0000000..fb6f37f --- /dev/null +++ b/myclasses.h @@ -0,0 +1,55 @@ +#ifndef MYCLASSES_H +#define MYCLASSES_H + +#include +#include +#include + + +enum Activ +{ + RELU, TANH, SIGMOID, LINEAR +}; + +class Neuron +{ +public: + Neuron(int prev_layer_size, Activ activ_function); + void set_output(float value); + float get_output();//to be deleted + void activate(std::forward_list::iterator &prev_layer_it); +private: + std::forward_list weights; + float bias; + float output; + float derror; + Activ activ; +}; + + +class Network +{ +public: + Network(int n_neurons); + Network(const std::vector &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID); + bool forward(const std::vector &input, const std::vector &target); + bool backward(); +private: + std::list> layers; + Activ h_activ; + Activ o_activ; + + bool _set_errors(); +}; + + +class Tools +{ +public: + static void activate_randomness(); + static float get_random(float mini, float maxi); + +private: +}; + +#endif \ No newline at end of file