#ifndef MYCLASSES_H #define MYCLASSES_H #include #include #include enum Activ { RELU, TANH, SIGMOID, LINEAR, SOFTMAX }; class Neuron { public: Neuron(int prev_layer_size); //prev_layer_size = number of weights void set_bias(float value); float get_bias(); void set_nth_weight(int n, float value); float get_nth_weight(int n); float get_weighted_sum(); void set_activated_output(float value); float get_activated_output(); void set_derror(float value); float get_derror(); void activate(std::forward_list::iterator &prev_layer_it, Activ activ_function=LINEAR); private: std::forward_list weights; float bias; float weighted_sum; float activated_output; float derror; }; class Network { public: Network(int n_layers, int n_neurons); Network(const std::vector &n_neurons, Activ h_activ=RELU, Activ o_activ=SIGMOID); float predict(const std::vector &input, bool as_raw=true); void print(); //to be deleted bool forward(const std::vector &input, const std::vector &target); bool set_errors(const std::vector &target); bool backward(float learning_rate); private: std::list> layers; Activ h_activ; Activ o_activ; //bool forward(const std::vector &input, const std::vector &target); //bool set_errors(const std::vector &target); //bool backward(float learning_rate); }; class Tools { public: static void activate_randomness(); static float get_random(float mini, float maxi); //Activation functions and their derivatives static float activation_function(Activ activ, float value); static float activation_function_derivative(Activ activ, float value); //float activation_function(Activ activ, float value); //float activation_function_derivative(Activ activ, float value); private: float relu(float value); float sigmoid(float value); float relu_derivative(float value); float sigmoid_derivative(float value); float tanh_derivative(float value); }; #endif