Updating/Adding functions

This commit is contained in:
Abdel Kader Chabi Sika Boni 2021-11-27 23:34:10 +01:00
parent 04539de3f0
commit 4df6aa7150
18 changed files with 117 additions and 13 deletions

View file

@ -1,3 +1,16 @@
default : myprogram
all : myprogram
clean :
rm *.o myprogram
myprogram : main.o activations.o randomness.o training.o network.o neurons.o
gcc -Wall $^ -o myprogram -lm
%.o : %.c
gcc -Wall $< -c
send :
git add .
git commit

View file

@ -1,13 +1,14 @@
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "activations.h"
float sigmoid(float weighted_sum)
{
return 1.0 / (1 + exp(-(float)weighted_sum));
return 1.0 / (1 + exp(-weighted_sum));
}
float sigmoid_derivative(float output)
{
return 1.0;
return sigmoid(output) * (1 - sigmoid(output));
}

View file

@ -2,6 +2,6 @@
#define ACTIVATIONS_H
float sigmoid(float weighted_sum);
float sigmoid_derivative(float output)
float sigmoid_derivative(float output);
#endif

BIN
activations.o Normaal bestaand

Binary file not shown.

BIN
exe

Binary file not shown.

9
main.c
View file

@ -4,10 +4,12 @@
#include "randomness.h"
#include "neurons.h"
#include "network.h"
#include "activations.h"
#include "training.h"
int main(int argc, char *argv[])
{
init_randomness();
//init_randomness();
printf("Bonjour et bienvenu ;)\n");
int i;
@ -15,6 +17,11 @@ int main(int argc, char *argv[])
{
printf("%dth generated fload = %f\n", i, random_float(0.0 , 1.0));
}*/
printf("sigmoid(%f) = %f\n",0.5, sigmoid(0.5));
printf("sigmoid_derivative(%f) = %f\n",0.8, sigmoid_derivative(0.8));
/*float a=5.0;
printf("%f\n", (float)exp((double)a));*/
return 0;
}

BIN
main.o Normaal bestaand

Binary file not shown.

BIN
myprogram Uitvoerbaar bestand

Binary file not shown.

View file

@ -5,15 +5,70 @@
#include "neurons.h"
#include "network.h"
Neuron *generate_layer(int n_neurons, int n_neurons_prev_layer)
Neuron *generate_layer(int n_neurons, int n_neurons_prev_layer, char *activation_function)
{
Neuron *first=NULL, *last=NULL;
int i;
for(i=1 ; i<=n_neurons ; i++)
{
Neuron *n = init_neuron(n_neurons_prev_layer, activation_function);
if(i==1)
{
first = n;
last = n;
}else
{
last->same_layer_next_neuron = n;
last = n;
}
}
return first;
}
Network *init_network(int n_neurons_per_layer[])
Network *init_network(int n_neurons_per_layer[], int n_layers, char *activation_function_per_layer[])
{
/* initialize the network based on array n_neurons_per_layer :
- size of n_neurons_per_layer is the number of layers
- n_layers is simply the size of array n_neurons_per_layer
- each ith number in array n_neurons_per_layer is the number of neurons in ith layer
- array activation_function_per_layer must be of same size as n_neurons_per_layer
*/
Network *network = (Network*)malloc(sizeof(Network));
network->n_layers = n_layers;
network->neurons_per_layer = (int*)malloc(n_layers * sizeof(int));
network->layers_first_neurons = (Neuron**)malloc(n_layers * sizeof(Neuron*));
int i;
for(i=0 ; i<n_layers ; i++)
{
network->neurons_per_layer[i] = n_neurons_per_layer[i];
if(i==0)
{
network->layers_first_neurons[i] = generate_layer(n_neurons_per_layer[i], 0, activation_function_per_layer[i]);
}else
{
network->layers_first_neurons[i] = generate_layer(n_neurons_per_layer[i], n_neurons_per_layer[i-1], activation_function_per_layer[i]);
}
}
return network;
}
void print_network(Network *network)
{
}
void destroy_network(Network *network)
{
int i;
Neuron *temp;
for(i=0 ; i<network->n_layers ; i++)
{
while(network->layers_first_neurons[i] != NULL)
{
temp = network->layers_first_neurons[i];
network->layers_first_neurons[i] = temp->same_layer_next_neuron;
destroy_neuron(temp);
}
}
}

View file

@ -4,12 +4,15 @@
typedef struct network Network;
struct network
{
Neuron *layers_first_neurons; //first neuron of each layer of the network
//Neuron *layers_last_neurons; //last neuron of each layer of the network
int n_layers;
int *neurons_per_layer; //keeps track of number of layers' neurons
Neuron **layers_first_neurons; //pointers on first neuron of each layer of the network
//Neuron *layers_last_neurons; //last neuron of each layer of the network
};
Neuron *generate_layer(int n_neurons, int n_neurons_prev_layer);
Network *init_network(int n_neurons_per_layer[]);
Neuron *generate_layer(int n_neurons, int n_neurons_prev_layer, char *activation_function);
Network *init_network(int n_neurons_per_layer[], int n_layers, char *activation_function_per_layer[]);
void print_network(Network *network);
void destroy_network(Network *network);
#endif

BIN
network.o Normaal bestaand

Binary file not shown.

View file

@ -3,8 +3,9 @@
#include <string.h>
#include "randomness.h"
#include "neurons.h"
#include "activations.h"
Neuron *init_neuron(int n_weights)
Neuron *init_neuron(int n_weights, char *activation_function)
{
Neuron *neuron = (Neuron*)malloc(sizeof(Neuron));
if(n_weights == 0)
@ -21,6 +22,15 @@ Neuron *init_neuron(int n_weights)
}
neuron->bias = random_float(0.0 , 1.0);
neuron->output = 0.0;
if(strcmp(activation_function, "nothing") == 0)
{
//to be completed later with tanh, relu, etc : for now only sigmoid is supported and will be the default function
}
else
{
neuron->activation = sigmoid;
neuron->activation_derivative = sigmoid_derivative;
}
neuron->delta_error = 0.0;
neuron->same_layer_next_neuron = NULL;

View file

@ -7,11 +7,13 @@ struct neuron
float *weights; //weights associated to the neuron
float bias; //neuron's bias
float output; //output of the neuron
float (*activation)(float);
float (*activation_derivative)(float);
float delta_error; //the delta error for updating current weights
Neuron *same_layer_next_neuron;
};
Neuron *init_neuron(int n_weights);
Neuron *init_neuron(int n_weights, char *activation_function);
void destroy_neuron(Neuron *neuron);

BIN
neurons.o Normaal bestaand

Binary file not shown.

BIN
randomness.o Normaal bestaand

Binary file not shown.

View file

@ -0,0 +1,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "randomness.h"
#include "neurons.h"
#include "activations.h"

View file

@ -0,0 +1,6 @@
#ifndef TRAINING_H
#define TRAINING_H
#endif

BIN
training.o Normaal bestaand

Binary file not shown.