diff --git a/main.c b/main.c index 5495800..30bc0a6 100644 --- a/main.c +++ b/main.c @@ -5,8 +5,8 @@ #include "neurons.h" #include "network.h" #include "activations.h" -#include "training.h" #include "preprocessing.h" +#include "training.h" int main(int argc, char *argv[]) { diff --git a/training.c b/training.c index 731b1a8..1ad00a3 100644 --- a/training.c +++ b/training.c @@ -5,20 +5,57 @@ #include "activations.h" #include "neurons.h" #include "network.h" +#include "preprocessing.h" +#include "training.h" -void forward(Network *network, float sample[]) +void forward(Network *network, Sample *sample) +{ + Feature *current_feature; + Neuron *current_neuron, *prev_layer_current_neuron; + Weight *current_weight; + int i; + for(i=0 ; in_layers ; i++) + { + if(i==0) //set first layer neurons' output equal to sample's features + { + current_feature = sample->first_feature; + current_neuron = network->layers_first_neurons[i]; + while(current_neuron != NULL) + { + current_neuron->output = current_feature->value; + current_feature = current_feature->next_feature; + current_neuron = current_neuron->same_layer_next_neuron; + } + }else //when layer not first one, do dot product sum with bias + { + current_neuron = network->layers_first_neurons[i]; + while(current_neuron != NULL) + { + prev_layer_current_neuron = network->layers_first_neurons[i-1]; + current_neuron->output = current_neuron->bias; + current_weight = current_neuron->weights; + while(prev_layer_current_neuron != NULL) + { + current_neuron->output += prev_layer_current_neuron->output*current_weight->value; + current_weight = current_weight->next; + prev_layer_current_neuron = prev_layer_current_neuron->same_layer_next_neuron; + } + current_neuron->output = current_neuron->activation( current_neuron->output ); //apply activation function + current_neuron = current_neuron->same_layer_next_neuron; + } + } + } +} + +void errors_propagate(Network *network, Sample *sample) { } -void errors_backpropagate(Network *network, float label[]) +void backpropagate(Network *network, float learning_rate) { } -void apply_backpropagate(Network *network, float learning_rate) -{ - -} \ No newline at end of file diff --git a/training.h b/training.h index fdc0a10..84a40ef 100644 --- a/training.h +++ b/training.h @@ -1,8 +1,8 @@ #ifndef TRAINING_H #define TRAINING_H -void forward(Network *network, float sample[]); -void errors_backpropagate(Network *network, float label[]); -void apply_backpropagate(Network *network, float learning_rate); +void forward(Network *network, Sample *sample); +void errors_propagate(Network *network, Sample *sample); +void backpropagate(Network *network, float learning_rate); #endif \ No newline at end of file