1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283 |
- #include <stdio.h>
- #include <stdlib.h>
- #include <string.h>
- #include "randomness.h"
- #include "neurons.h"
- #include "activations.h"
-
- Weight *init_weight_list(int n_weights)
- {
- Weight *first, *last;
- int i;
- for(i=1 ; i<=n_weights ; i++)
- {
- Weight *current = (Weight*)malloc(sizeof(Weight));
- current->value = random_float(0.0 , 1.0);
- current->next = NULL;
- if(i==1)
- {
- first = current;
- last = current;
- }else
- {
- last->next = current;
- last = current;
- }
- }
- return first;
- }
-
- Neuron *init_neuron(int n_weights, char *activation_function)
- {
- Neuron *neuron = (Neuron*)malloc(sizeof(Neuron));
- if(n_weights == 0)
- {
- neuron->weights = NULL;
- }else
- {
- neuron->weights = init_weight_list(n_weights);
- }
- neuron->output = 0.0;
- if(strcmp(activation_function, "sigmoid") == 0)
- {
- neuron->activation = sigmoid;
- neuron->activation_derivative = sigmoid_derivative;
- neuron->bias = 1.0;
- }
- else if(strcmp(activation_function, "tanh") == 0)
- {
- neuron->activation = tan_hyp;
- neuron->activation_derivative = tan_hyp_derivative;
- neuron->bias = 1.0;
- }else if(strcmp(activation_function, "linear") == 0)
- {
- neuron->activation = linear;
- neuron->activation_derivative = linear_derivative;
- neuron->bias = 1.0;
- }
- else //relu is assumed as default activation function
- {
- neuron->activation = relu;
- neuron->activation_derivative = relu_derivative;
- neuron->bias = 0.1; //as suggested by scientific articles (advice of setting bias to 0.1 when using ReLU and 1.0 with other functions)
- }
- neuron->delta_error = 0.0;
- neuron->same_layer_next_neuron = NULL;
-
- return neuron;
- }
-
- void destroy_neuron(Neuron *neuron)
- {
- Weight *temp = NULL;
- while(neuron->weights != NULL)
- {
- temp = neuron->weights;
- neuron->weights = neuron->weights->next;
- free(temp);
- }
- if(neuron != NULL)
- {
- free(neuron);
- }
- }
|