OCR / XOR / NN / NN_Tools / layer.c
layer.c
Raw
#include "layer.h"

t_layer* CreateLayer(int nbNeuron, t_layer *prev)
{
	t_layer* layer = (t_layer*)malloc(sizeof(t_layer));
	layer->nbNeuron = nbNeuron;
	layer->prev = prev;
	layer->next = NULL;
	if (prev != NULL)
	{
		prev->next = layer;
		layer->weight = CreateMatrix(prev->nbNeuron, layer->nbNeuron);
		SetRandomWeight(layer->weight, prev->nbNeuron, layer->nbNeuron);
		layer->bias = CreateMatrix(layer->nbNeuron, 1);
		SetRandomWeight(layer->bias, layer->nbNeuron, 1);
	}
	layer->neuron = CreateMatrix(layer->nbNeuron, 1);
	layer->activation = NULL;
	layer->dActivation = NULL;

	return layer;
}


t_layer* CreateActivationLayer(t_layer *prev, float (*activation)(float val), float (*dActivation)(float val))
{
	if(prev == NULL)
		return NULL;
	t_layer* layer = (t_layer*)malloc(sizeof(t_layer));
	layer->nbNeuron = prev->nbNeuron;
	layer->prev = prev;
	layer->next = NULL;
	prev->next = layer;
	layer->neuron = CreateMatrix(layer->nbNeuron, 1);
	layer->activation = activation;
	layer->dActivation = dActivation;

	return layer;
}

void FeedForward(t_layer* layer)
{
	if (layer->prev != NULL)
	{
		if (layer->activation != NULL)
		{
			ApplyFunction(layer->prev->neuron, layer->neuron, layer->nbNeuron, 1, layer->activation);
		}
		else
		{
			for (int k = 0; k < layer->nbNeuron; k++)
			{
				layer->neuron[k] = layer->bias[k];
				for(int j = 0; j < layer->prev->nbNeuron; j++)
				{
					//printf("TEST3.CLASSIC.FOR.J and K = %d and %d\n", j, k);
					layer->neuron[k] += layer->weight[j * layer->nbNeuron + k] * layer->prev->neuron[j];
				}
				printf("TEST3.Classic3\n");
			}
			printf("TEST3.ClassicEnd\n");
		}
	}


	if (layer->next != NULL)
	{
		FeedForward(layer->next);
	}
}

void BackPropagation(t_layer* layer, float* DError, float trainSpeed)
{
	if (layer->prev != NULL)
	{
		float* prevError;
		if (layer->activation != NULL)
		{
			prevError = CreateMatrix(layer->nbNeuron, 1);
			for (int i = 0; i < layer->nbNeuron; i++)
			{
				prevError[i] = DError[i] * layer->dActivation(layer->prev->neuron[i]);
			}
		}
		else
		{
			prevError = CreateMatrix(layer->prev->nbNeuron, 1);
			for (int i = 0; i < layer->prev->nbNeuron; i++)
			{
				prevError[i] = 0;
				for (int j = 0; j < layer->nbNeuron; j++)
				{
					prevError[i] += DError[j] * layer->weight[i * layer->nbNeuron + j];
				}
			}

			for (int j = 0; j < layer->nbNeuron; j++)
			{
				for (int i = 0; i < layer->prev->nbNeuron; i++)
				{
					layer->weight[i * layer->nbNeuron + j] -= layer->prev->neuron[i] * DError[j] * trainSpeed;
				}
				layer->bias[j] -= DError[j] * trainSpeed;
			}
		}
		BackPropagation(layer->prev, prevError, trainSpeed);
	}
	free(DError);
}



void FreeNetwork(t_layer* layer)
{
	if(layer->prev != NULL)
	{
		free(layer->weight);
		free(layer->bias);
	}
	free(layer->neuron);
	if (layer->next != NULL)
	{
		FreeNetwork(layer->next);
	}
	free(layer);
}