Adding more activation functions
This commit is contained in:
parent
7c646330f0
commit
13ca1e16ef
8 changed files with 72 additions and 20 deletions
|
@ -5,10 +5,40 @@
|
|||
|
||||
float sigmoid(float weighted_sum)
|
||||
{
|
||||
return 1.0 / (1 + exp(-weighted_sum));
|
||||
return 1.0 / (1.0 + exp(-weighted_sum));
|
||||
}
|
||||
|
||||
float sigmoid_derivative(float output)
|
||||
{
|
||||
return sigmoid(output) * (1 - sigmoid(output));
|
||||
return sigmoid(output) * (1.0 - sigmoid(output));
|
||||
}
|
||||
|
||||
float tan_hyp(float weighted_sum)
|
||||
{
|
||||
return tanh(weighted_sum);
|
||||
}
|
||||
|
||||
float tan_hyp_derivative(float output)
|
||||
{
|
||||
return 1.0 - (tan_hyp(output) * tan_hyp(output));
|
||||
}
|
||||
|
||||
float relu(float weighted_sum)
|
||||
{
|
||||
return (weighted_sum > 0.0) ? weighted_sum : 0.0;
|
||||
}
|
||||
|
||||
float relu_derivative(float output)
|
||||
{
|
||||
return (output > 0.0) ? 1.0 : 0.0;
|
||||
}
|
||||
|
||||
float linear(float weighted_sum)
|
||||
{
|
||||
return weighted_sum;
|
||||
}
|
||||
|
||||
float linear_derivative(float output)
|
||||
{
|
||||
return 1.0;
|
||||
}
|
|
@ -3,5 +3,11 @@
|
|||
|
||||
float sigmoid(float weighted_sum);
|
||||
float sigmoid_derivative(float output);
|
||||
float tan_hyp(float weighted_sum);
|
||||
float tan_hyp_derivative(float output);
|
||||
float relu(float weighted_sum);
|
||||
float relu_derivative(float output);
|
||||
float linear(float weighted_sum);
|
||||
float linear_derivative(float output);
|
||||
|
||||
#endif
|
7
main.c
7
main.c
|
@ -23,10 +23,9 @@ int main(int argc, char *argv[])
|
|||
float a=5.0;
|
||||
printf("%f\n", (float)exp((double)a));*/
|
||||
|
||||
//int n_neurons[] = {3,10,52,52,6};
|
||||
int n_neurons[] = {15,120,220,120};
|
||||
char *activations[] = {"sigmoid","sigmoid","sigmoid","sigmoid"};
|
||||
Network *net = init_network(n_neurons, 4, activations);
|
||||
int n_neurons[] = {15,120,220,120,200,25};
|
||||
char *activations[] = {"relu","relu","relu","relu","relu","relu"};
|
||||
Network *net = init_network(n_neurons, 6, activations);
|
||||
print_network(net);
|
||||
destroy_network(net);
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ Network *init_network(int n_neurons_per_layer[], int n_layers, char *activation_
|
|||
return network;
|
||||
}
|
||||
|
||||
void print_network(Network *network)
|
||||
void print_network(const Network *network)
|
||||
{
|
||||
int i, n_params=0;
|
||||
printf("#>>==========================================<<#\n");
|
||||
|
|
|
@ -12,7 +12,7 @@ struct network
|
|||
|
||||
Neuron *generate_layer(int n_neurons, int n_neurons_prev_layer, char *activation_function);
|
||||
Network *init_network(int n_neurons_per_layer[], int n_layers, char *activation_function_per_layer[]);
|
||||
void print_network(Network *network);
|
||||
void print_network(const Network *network);
|
||||
void destroy_network(Network *network);
|
||||
|
||||
#endif
|
25
neurons.c
25
neurons.c
|
@ -37,16 +37,29 @@ Neuron *init_neuron(int n_weights, char *activation_function)
|
|||
{
|
||||
neuron->weights = init_weight_list(n_weights);
|
||||
}
|
||||
neuron->bias = random_float(0.0 , 1.0);
|
||||
neuron->output = 0.0;
|
||||
if(strcmp(activation_function, "nothing") == 0)
|
||||
{
|
||||
//to be completed later with tanh, relu, etc : for now only sigmoid is supported and will be the default function
|
||||
}
|
||||
else
|
||||
if(strcmp(activation_function, "sigmoid") == 0)
|
||||
{
|
||||
neuron->activation = sigmoid;
|
||||
neuron->activation_derivative = sigmoid_derivative;
|
||||
neuron->bias = 1.0;
|
||||
}
|
||||
else if(strcmp(activation_function, "tanh") == 0)
|
||||
{
|
||||
neuron->activation = tan_hyp;
|
||||
neuron->activation_derivative = tan_hyp_derivative;
|
||||
neuron->bias = 1.0;
|
||||
}else if(strcmp(activation_function, "linear") == 0)
|
||||
{
|
||||
neuron->activation = linear;
|
||||
neuron->activation_derivative = linear_derivative;
|
||||
neuron->bias = 1.0;
|
||||
}
|
||||
else //relu is assumed as default activation function
|
||||
{
|
||||
neuron->activation = relu;
|
||||
neuron->activation_derivative = relu_derivative;
|
||||
neuron->bias = 0.1; //as suggested by scientific articles (advice of setting bias to 0.1 when using ReLU and 1.0 with other functions)
|
||||
}
|
||||
neuron->delta_error = 0.0;
|
||||
neuron->same_layer_next_neuron = NULL;
|
||||
|
|
12
training.c
12
training.c
|
@ -1,10 +1,12 @@
|
|||
/*#include <stdio.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "randomness.h"
|
||||
#include "network.h"
|
||||
#include "neurons.h"
|
||||
#include "activations.h"
|
||||
#include "neurons.h"
|
||||
#include "network.h"
|
||||
|
||||
|
||||
|
||||
void forward(Network *network, float sample[])
|
||||
{
|
||||
|
@ -16,7 +18,7 @@ void errors_backpropagate(Network *network, float label[])
|
|||
|
||||
}
|
||||
|
||||
void apply_backpropagate(Network *network, float sample[])
|
||||
void apply_backpropagate(Network *network, float learning_rate)
|
||||
{
|
||||
|
||||
}*/
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
#ifndef TRAINING_H
|
||||
#define TRAINING_H
|
||||
|
||||
|
||||
void forward(Network *network, float sample[]);
|
||||
void errors_backpropagate(Network *network, float label[]);
|
||||
void apply_backpropagate(Network *network, float learning_rate);
|
||||
|
||||
#endif
|
Loading…
Reference in a new issue