Completing network forward function

This commit is contained in:
Abdel Kader Chabi Sika Boni 2021-12-12 15:40:53 +01:00
parent a8d30e4c60
commit 1ee5cc0af4
3 changed files with 47 additions and 10 deletions

2
main.c
View file

@ -5,8 +5,8 @@
#include "neurons.h" #include "neurons.h"
#include "network.h" #include "network.h"
#include "activations.h" #include "activations.h"
#include "training.h"
#include "preprocessing.h" #include "preprocessing.h"
#include "training.h"
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {

View file

@ -5,20 +5,57 @@
#include "activations.h" #include "activations.h"
#include "neurons.h" #include "neurons.h"
#include "network.h" #include "network.h"
#include "preprocessing.h"
#include "training.h"
void forward(Network *network, float sample[]) void forward(Network *network, Sample *sample)
{
Feature *current_feature;
Neuron *current_neuron, *prev_layer_current_neuron;
Weight *current_weight;
int i;
for(i=0 ; i<network->n_layers ; i++)
{
if(i==0) //set first layer neurons' output equal to sample's features
{
current_feature = sample->first_feature;
current_neuron = network->layers_first_neurons[i];
while(current_neuron != NULL)
{
current_neuron->output = current_feature->value;
current_feature = current_feature->next_feature;
current_neuron = current_neuron->same_layer_next_neuron;
}
}else //when layer not first one, do dot product sum with bias
{
current_neuron = network->layers_first_neurons[i];
while(current_neuron != NULL)
{
prev_layer_current_neuron = network->layers_first_neurons[i-1];
current_neuron->output = current_neuron->bias;
current_weight = current_neuron->weights;
while(prev_layer_current_neuron != NULL)
{
current_neuron->output += prev_layer_current_neuron->output*current_weight->value;
current_weight = current_weight->next;
prev_layer_current_neuron = prev_layer_current_neuron->same_layer_next_neuron;
}
current_neuron->output = current_neuron->activation( current_neuron->output ); //apply activation function
current_neuron = current_neuron->same_layer_next_neuron;
}
}
}
}
void errors_propagate(Network *network, Sample *sample)
{ {
} }
void errors_backpropagate(Network *network, float label[]) void backpropagate(Network *network, float learning_rate)
{ {
} }
void apply_backpropagate(Network *network, float learning_rate)
{
}

View file

@ -1,8 +1,8 @@
#ifndef TRAINING_H #ifndef TRAINING_H
#define TRAINING_H #define TRAINING_H
void forward(Network *network, float sample[]); void forward(Network *network, Sample *sample);
void errors_backpropagate(Network *network, float label[]); void errors_propagate(Network *network, Sample *sample);
void apply_backpropagate(Network *network, float learning_rate); void backpropagate(Network *network, float learning_rate);
#endif #endif