#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include "nnTypes.h"
void InitializeRandoms() {
srand(4711);
}
int RandomEqualINT(int Low, int High) {
return rand() % (High-Low+1) + Low;
}
double RandomEqualDouble(double Low, double High) {
return ((double) rand() / RAND_MAX) * (High-Low) + Low;
}
//just for testing alternative ways of soring the values... (for now is the same: DOUBLE)
VAL DoubleToVal(double dValue) {
return (VAL) dValue;
}
double ValToDouble(VAL val) {
return (double) val;
}
void DEBUG(char *s) {
Serial.println(s);
}
#define NUM_LAYERS 3
#define N 2
#define M 1
int Units[NUM_LAYERS] = {N, 3, M};
double XOR_Outputs[4] = { 1.0, 0.0, 0.0, 1.0 };
double XOR_Inputs [4][2] = {
{0, 0},
{0, 1},
{1, 0},
{1, 1}
};
double TrainError;
double TestError;
void InitializeApplication(NET* Net) {
Net->Alpha = 0.5;
Net->Eta = 0.05;
Net->Gain = 1.0;
}
void FinalizeApplication(NET* Net) { }
/******************************************************************************
I N I T I A L I Z A T I O N
******************************************************************************/
void GenerateNetwork(NET* Net)
{
int l,i;
Net->Layer = (LAYER**) calloc(NUM_LAYERS, sizeof(LAYER*));
for (l=0; l<NUM_LAYERS; l++) {
Net->Layer[l] = (LAYER*) malloc(sizeof(LAYER));
if (Net->Layer[l] == NULL) DEBUG("NULL1");
Net->Layer[l]->Units = Units[l];
Net->Layer[l]->Output = (VAL*) calloc(Units[l]+1, sizeof(VAL));
Net->Layer[l]->Error = (VAL*) calloc(Units[l]+1, sizeof(VAL));
Net->Layer[l]->Weight = (VAL**) calloc(Units[l]+1, sizeof(VAL*));
Net->Layer[l]->WeightSave = (VAL**) calloc(Units[l]+1, sizeof(VAL*));
Net->Layer[l]->dWeight = (VAL**) calloc(Units[l]+1, sizeof(VAL*));
Net->Layer[l]->Output[0] = DoubleToVal(BIAS);
if (Net->Layer[l]->Output == NULL) DEBUG("NULL2");
if (Net->Layer[l]->Error == NULL) DEBUG("NULL3");
if (Net->Layer[l]->Weight == NULL) DEBUG("NULL4");
if (Net->Layer[l]->WeightSave == NULL) DEBUG("NULL5");
if (Net->Layer[l]->dWeight == NULL) DEBUG("NULL6");
if (l != 0) {
for (i=1; i<=Units[l]; i++) {
Net->Layer[l]->Weight[i] = (VAL*) calloc(Units[l-1]+1, sizeof(VAL));
Net->Layer[l]->WeightSave[i] = (VAL*) calloc(Units[l-1]+1, sizeof(VAL));
Net->Layer[l]->dWeight[i] = (VAL*) calloc(Units[l-1]+1, sizeof(VAL));
if (Net->Layer[l]->Weight[i] == NULL) DEBUG("NULL7");
if (Net->Layer[l]->WeightSave[i] == NULL) DEBUG("NULL8");
if (Net->Layer[l]->dWeight[i] == NULL) DEBUG("NULL9");
}
}
}
Net->InputLayer = Net->Layer[0];
Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];
Net->Alpha = 0.9;
Net->Eta = 0.25;
Net->Gain = 1.0;
}
void RandomWeights(NET* Net)
{
int l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = DoubleToVal(RandomEqualDouble(-0.5, 0.5));
}
}
}
}
void SetInput(NET* Net, double* Input)
{
int i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = DoubleToVal(Input[i-1]);
}
}
void GetOutput(NET* Net, double* Output)
{
int i;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Output[i-1] = ValToDouble(Net->OutputLayer->Output[i]);
}
}
void SaveWeights(NET* Net)
{
int l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];
}
}
}
}
void RestoreWeights(NET* Net)
{
int l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];
}
}
}
}
void PropagateLayer(NET* Net, LAYER* Lower, LAYER* Upper)
{
int i,j;
double Sum;
for (i = 1; i <= Upper->Units; i++) {
Sum = 0;
for (j=0; j<=Lower->Units; j++) {
Sum += ValToDouble(Upper->Weight[i][j]) * ValToDouble(Lower->Output[j]);
}
Upper->Output[i] = DoubleToVal(1 / (1 + exp(-Net->Gain * Sum)));
}
}
void PropagateNet(NET* Net)
{
int l;
for (l=0; l<NUM_LAYERS-1; l++) {
PropagateLayer(Net, Net->Layer[l], Net->Layer[l+1]);
}
}
void ComputeOutputError(NET* Net, double* Target)
{
int i;
double Out, Err;
Net->Error = 0;
for (i = 1; i <= Net->OutputLayer->Units; i++) {
Out = ValToDouble(Net->OutputLayer->Output[i]);
Err = Target[i-1] - Out;
Net->OutputLayer->Error[i] = DoubleToVal(Net->Gain * Out * (1 - Out) * Err);
Net->Error += 0.5 * sqr(Err);
}
}
void BackpropagateLayer(NET* Net, LAYER* Upper, LAYER* Lower)
{
int i,j;
double Out, Err;
for (i = 1; i <= Lower->Units; i++) {
Out = ValToDouble(Lower->Output[i]);
Err = 0;
for (j = 1; j <= Upper->Units; j++) {
Err += ValToDouble(Upper->Weight[j][i]) * ValToDouble(Upper->Error[j]);
}
Lower->Error[i] = DoubleToVal(Net->Gain * Out * (1-Out) * Err);
}
}
void BackpropagateNet(NET* Net)
{
int l;
for (l=NUM_LAYERS-1; l>1; l--) {
BackpropagateLayer(Net, Net->Layer[l], Net->Layer[l-1]);
}
}
void AdjustWeights(NET* Net)
{
int l,i,j;
double Out, Err, dWeight;
for (l = 1; l < NUM_LAYERS; l++) {
for (i = 1; i <= Net->Layer[l]->Units; i++) {
for (j = 0; j <= Net->Layer[l-1]->Units; j++) {
Out = ValToDouble(Net->Layer[l-1]->Output[j]);
Err = ValToDouble(Net->Layer[l]->Error[i]);
dWeight = ValToDouble(Net->Layer[l]->dWeight[i][j]);
Net->Layer[l]->Weight[i][j] += DoubleToVal(Net->Eta * Err * Out + Net->Alpha * dWeight);
Net->Layer[l]->dWeight[i][j] = DoubleToVal(Net->Eta * Err * Out);
}
}
}
}
void SimulateNet(NET* Net, double* Input, double* Output, double* Target, BOOL Training)
{
SetInput(Net, Input);
PropagateNet(Net);
GetOutput(Net, Output);
ComputeOutputError(Net, Target);
if (Training) {
BackpropagateNet(Net);
AdjustWeights(Net);
}
}
void TrainNet(NET* Net, int Epochs)
{
int n, i;
double Output[M];
for (i = 0; i < Epochs; i++) {
for (n = 0; n < 4; n++) {
SimulateNet(Net, XOR_Inputs[n], Output, &XOR_Outputs[n], TRUE);
}
}
}
void TestNet(NET* Net)
{
int n;
double Output[M];
TrainError = 0;
for (n = 0; n < 4; n++) {
SimulateNet(Net, XOR_Inputs[n], Output, &XOR_Outputs[n], FALSE);
TrainError += Net->Error;
}
TestError = 0;
for (n = 0; n < 4; n++) {
SimulateNet(Net, XOR_Inputs[n], Output, &XOR_Outputs[n], FALSE);
TestError += Net->Error;
}
}
void EvaluateNet(NET* Net)
{
int n;
double Output [M];
DEBUG("--");
for (n = 0; n < 4; n++) {
SimulateNet(Net, XOR_Inputs[n], Output, &XOR_Outputs[n], FALSE);
DEBUG(Output [0]);
}
}
void setup()
{
Serial.begin(9600);
randomSeed(analogRead(0));
NET Net;
BOOL Stop;
double MinTestError;
InitializeRandoms();
GenerateNetwork(&Net);
RandomWeights(&Net);
InitializeApplication(&Net);
DEBUG("START");
Stop = FALSE;
MinTestError = 5000000;
do {
TrainNet(&Net, 10);
TestNet(&Net);
if (TestError < MinTestError) {
//DEBUG("Save");
//DEBUG(TrainError);
//DEBUG(TestError);
MinTestError = TestError;
SaveWeights(&Net);
}
else if (TestError < 0.2) {
DEBUG("END");
Stop = TRUE;
RestoreWeights(&Net);
}
} while (NOT Stop);
TestNet(&Net);
EvaluateNet(&Net);
TestNet(&Net);
EvaluateNet(&Net);
FinalizeApplication(&Net);
}
void loop() {
//This was not originaly for the arduino
for(;;);
}