Browse Source

Completing network forward function

parent
commit
1ee5cc0af4
3 changed files with 46 additions and 9 deletions
  1. 1
    1
      main.c
  2. 42
    5
      training.c
  3. 3
    3
      training.h

+ 1
- 1
main.c View File

@@ -5,8 +5,8 @@
5 5
 #include "neurons.h"
6 6
 #include "network.h"
7 7
 #include "activations.h"
8
-#include "training.h"
9 8
 #include "preprocessing.h"
9
+#include "training.h"
10 10
 
11 11
 int main(int argc, char *argv[])
12 12
 {

+ 42
- 5
training.c View File

@@ -5,20 +5,57 @@
5 5
 #include "activations.h"
6 6
 #include "neurons.h"
7 7
 #include "network.h"
8
+#include "preprocessing.h"
9
+#include "training.h"
8 10
 
9 11
 
10 12
 
11
-void forward(Network *network, float sample[])
13
+void forward(Network *network, Sample *sample)
12 14
 {
13
-
15
+    Feature *current_feature;
16
+    Neuron *current_neuron, *prev_layer_current_neuron;
17
+    Weight *current_weight;
18
+    int i;
19
+    for(i=0 ; i<network->n_layers ; i++)
20
+    {
21
+        if(i==0) //set first layer neurons' output equal to sample's features
22
+        {
23
+            current_feature = sample->first_feature;
24
+            current_neuron = network->layers_first_neurons[i];
25
+            while(current_neuron != NULL)
26
+            {
27
+                current_neuron->output = current_feature->value;
28
+                current_feature = current_feature->next_feature;
29
+                current_neuron = current_neuron->same_layer_next_neuron;
30
+            }
31
+        }else //when layer not first one, do dot product sum with bias
32
+        {
33
+            current_neuron = network->layers_first_neurons[i];
34
+            while(current_neuron != NULL)
35
+            {
36
+                prev_layer_current_neuron = network->layers_first_neurons[i-1];
37
+                current_neuron->output = current_neuron->bias;
38
+                current_weight = current_neuron->weights;
39
+                while(prev_layer_current_neuron != NULL)
40
+                {
41
+                    current_neuron->output += prev_layer_current_neuron->output*current_weight->value;
42
+                    current_weight = current_weight->next;
43
+                    prev_layer_current_neuron = prev_layer_current_neuron->same_layer_next_neuron;
44
+                }
45
+                current_neuron->output = current_neuron->activation( current_neuron->output ); //apply activation function
46
+                current_neuron = current_neuron->same_layer_next_neuron;
47
+            }
48
+        }
49
+    }
14 50
 }
15 51
 
16
-void errors_backpropagate(Network *network, float label[])
52
+void errors_propagate(Network *network, Sample *sample)
17 53
 {
18 54
 
19 55
 }
20 56
 
21
-void apply_backpropagate(Network *network, float learning_rate)
57
+void backpropagate(Network *network, float learning_rate)
22 58
 {
23 59
 
24
-}
60
+}
61
+

+ 3
- 3
training.h View File

@@ -1,8 +1,8 @@
1 1
 #ifndef TRAINING_H
2 2
 #define TRAINING_H
3 3
 
4
-void forward(Network *network, float sample[]);
5
-void errors_backpropagate(Network *network, float label[]);
6
-void apply_backpropagate(Network *network, float learning_rate);
4
+void forward(Network *network, Sample *sample);
5
+void errors_propagate(Network *network, Sample *sample);
6
+void backpropagate(Network *network, float learning_rate);
7 7
 
8 8
 #endif

Loading…
Cancel
Save