61 lines
2 KiB
C
61 lines
2 KiB
C
#include <stdio.h>
|
|
#include <stdlib.h>
|
|
#include <string.h>
|
|
#include "randomness.h"
|
|
#include "activations.h"
|
|
#include "neurons.h"
|
|
#include "network.h"
|
|
#include "preprocessing.h"
|
|
#include "training.h"
|
|
|
|
|
|
|
|
void forward(Network *network, Sample *sample)
|
|
{
|
|
Feature *current_feature;
|
|
Neuron *current_neuron, *prev_layer_current_neuron;
|
|
Weight *current_weight;
|
|
int i;
|
|
for(i=0 ; i<network->n_layers ; i++)
|
|
{
|
|
if(i==0) //set first layer neurons' output equal to sample's features
|
|
{
|
|
current_feature = sample->first_feature;
|
|
current_neuron = network->layers_first_neurons[i];
|
|
while(current_neuron != NULL)
|
|
{
|
|
current_neuron->output = current_feature->value;
|
|
current_feature = current_feature->next_feature;
|
|
current_neuron = current_neuron->same_layer_next_neuron;
|
|
}
|
|
}else //when layer not first one, do dot product sum with bias
|
|
{
|
|
current_neuron = network->layers_first_neurons[i];
|
|
while(current_neuron != NULL)
|
|
{
|
|
prev_layer_current_neuron = network->layers_first_neurons[i-1];
|
|
current_neuron->output = current_neuron->bias;
|
|
current_weight = current_neuron->weights;
|
|
while(prev_layer_current_neuron != NULL)
|
|
{
|
|
current_neuron->output += prev_layer_current_neuron->output*current_weight->value;
|
|
current_weight = current_weight->next;
|
|
prev_layer_current_neuron = prev_layer_current_neuron->same_layer_next_neuron;
|
|
}
|
|
current_neuron->output = current_neuron->activation( current_neuron->output ); //apply activation function
|
|
current_neuron = current_neuron->same_layer_next_neuron;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
void errors_propagate(Network *network, Sample *sample)
|
|
{
|
|
|
|
}
|
|
|
|
void backpropagate(Network *network, float learning_rate)
|
|
{
|
|
|
|
}
|
|
|