|
@@ -5,20 +5,57 @@
|
5
|
5
|
#include "activations.h"
|
6
|
6
|
#include "neurons.h"
|
7
|
7
|
#include "network.h"
|
|
8
|
+#include "preprocessing.h"
|
|
9
|
+#include "training.h"
|
8
|
10
|
|
9
|
11
|
|
10
|
12
|
|
11
|
|
-void forward(Network *network, float sample[])
|
|
13
|
+void forward(Network *network, Sample *sample)
|
12
|
14
|
{
|
13
|
|
-
|
|
15
|
+ Feature *current_feature;
|
|
16
|
+ Neuron *current_neuron, *prev_layer_current_neuron;
|
|
17
|
+ Weight *current_weight;
|
|
18
|
+ int i;
|
|
19
|
+ for(i=0 ; i<network->n_layers ; i++)
|
|
20
|
+ {
|
|
21
|
+ if(i==0) //set first layer neurons' output equal to sample's features
|
|
22
|
+ {
|
|
23
|
+ current_feature = sample->first_feature;
|
|
24
|
+ current_neuron = network->layers_first_neurons[i];
|
|
25
|
+ while(current_neuron != NULL)
|
|
26
|
+ {
|
|
27
|
+ current_neuron->output = current_feature->value;
|
|
28
|
+ current_feature = current_feature->next_feature;
|
|
29
|
+ current_neuron = current_neuron->same_layer_next_neuron;
|
|
30
|
+ }
|
|
31
|
+ }else //when layer not first one, do dot product sum with bias
|
|
32
|
+ {
|
|
33
|
+ current_neuron = network->layers_first_neurons[i];
|
|
34
|
+ while(current_neuron != NULL)
|
|
35
|
+ {
|
|
36
|
+ prev_layer_current_neuron = network->layers_first_neurons[i-1];
|
|
37
|
+ current_neuron->output = current_neuron->bias;
|
|
38
|
+ current_weight = current_neuron->weights;
|
|
39
|
+ while(prev_layer_current_neuron != NULL)
|
|
40
|
+ {
|
|
41
|
+ current_neuron->output += prev_layer_current_neuron->output*current_weight->value;
|
|
42
|
+ current_weight = current_weight->next;
|
|
43
|
+ prev_layer_current_neuron = prev_layer_current_neuron->same_layer_next_neuron;
|
|
44
|
+ }
|
|
45
|
+ current_neuron->output = current_neuron->activation( current_neuron->output ); //apply activation function
|
|
46
|
+ current_neuron = current_neuron->same_layer_next_neuron;
|
|
47
|
+ }
|
|
48
|
+ }
|
|
49
|
+ }
|
14
|
50
|
}
|
15
|
51
|
|
16
|
|
-void errors_backpropagate(Network *network, float label[])
|
|
52
|
+void errors_propagate(Network *network, Sample *sample)
|
17
|
53
|
{
|
18
|
54
|
|
19
|
55
|
}
|
20
|
56
|
|
21
|
|
-void apply_backpropagate(Network *network, float learning_rate)
|
|
57
|
+void backpropagate(Network *network, float learning_rate)
|
22
|
58
|
{
|
23
|
59
|
|
24
|
|
-}
|
|
60
|
+}
|
|
61
|
+
|