Bobot_nn.c

#include "../game/g_local.h"
#include "bobot_nn.h"
#include "bobot.h"
int            moveWeightsNotEvolving[BOBOT_MOVE_BRAIN__MAX_NEURONS_PER_LAYER+1][BOBOT_MOVE_BRAIN__MAX_NEURONS_PER_LAYER+1][BOBOT_MOVE_BRAIN__MAX_NEURONS_PER_LAYER+1];
int            lookWeightsNotEvolving[BOBOT_LOOK_BRAIN__MAX_NEURONS_PER_LAYER+1][BOBOT_LOOK_BRAIN__MAX_NEURONS_PER_LAYER+1][BOBOT_LOOK_BRAIN__MAX_NEURONS_PER_LAYER+1];
int            numMoveWeightsEvolving = BOBOT_MOVE_BRAIN__NUM_WEIGHTS;
int            numLookWeightsEvolving = BOBOT_LOOK_BRAIN__NUM_WEIGHTS;

/*---------------------------- methods for Neuron */

void CreateSNeuron(SNeuron* neuron, int NumInputs)
{    int i;
    neuron->m_NumInputs = NumInputs+1;

    /*    we need an additional weight for the bias hence the +1 */
    for (i=0; i<NumInputs+1; ++i)
    {    /*    set up the weights with an initial random value */
        neuron->vecWeight[i] = RandomClamped();
        //    G_Printf("new weight : %f\n",neuron->vecWeight[i]);
    }
}
/************************ methods for NeuronLayer **********************
    ctor creates a layer of neurons of the required size by calling the
    SNeuron ctor the rqd number of times
-----------------------------------------------------------------------*/
void CreateSNeuronLayer(SNeuronLayer* neuronLayer, int NumNeurons,int NumInputsPerNeuron)
{    int i;
    neuronLayer->m_NumNeurons = NumNeurons;
    for (i=0; i<NumNeurons; ++i)
    { CreateSNeuron(&(neuronLayer->vecNeurons[i]), NumInputsPerNeuron); }
}

/************************ methods forCNeuralNet ************************
------------------------------default ctor ----------------------------
    creates a ANN
-----------------------------------------------------------------------*/
void CreateNeuralNet(CNeuralNet* neuralNet,int numInputs,int numOutputs,int numHiddenLayers,int neuronsPerHiddenLyr)
{
    neuralNet->m_NumInputs                =    numInputs;
    neuralNet->m_NumOutputs                =    numOutputs;
    neuralNet->m_NumHiddenLayers        =    numHiddenLayers;
    neuralNet->m_NeuronsPerHiddenLyr    =    neuronsPerHiddenLyr;
    CreateNet(neuralNet);
}

/*------------------------------createNet()------------------------------
    this method builds the ANN. The weights are all initially set to
    random values -1 < w < 1
------------------------------------------------------------------------*/
void CreateNet(CNeuralNet* neuralNet)
{    int i;

    /*    create the layers of the network */
    if (neuralNet->m_NumHiddenLayers > 0)
    {    /*    create first hidden layer */
        CreateSNeuronLayer(&(neuralNet->vecLayers[0]),neuralNet->m_NeuronsPerHiddenLyr,neuralNet->m_NumInputs);
        for (i=0; i<neuralNet->m_NumHiddenLayers-1; ++i)
        { CreateSNeuronLayer(&(neuralNet->vecLayers[i]),neuralNet->m_NeuronsPerHiddenLyr,neuralNet->m_NeuronsPerHiddenLyr); }

        /*    create output layer */
        CreateSNeuronLayer(&(neuralNet->vecLayers[neuralNet->m_NumHiddenLayers]),neuralNet->m_NumOutputs,neuralNet->m_NeuronsPerHiddenLyr);
    }
  else
  {  /*    create output layer */
      CreateSNeuronLayer(&(neuralNet->vecLayers[1]),neuralNet->m_NumOutputs,neuralNet->m_NumInputs);
  }
}

/*---------------------------------GetWeights-----------------------------
    returns a vector containing the weights
------------------------------------------------------------------------*/
void GetWeights(CNeuralNet* neuralNet, double* weights)
{    int i,j,k;
    int indice = 0;

    /*    for each layer */
    for (i=0; i<neuralNet->m_NumHiddenLayers + 1; ++i)
    {    /*    for each neuron */
        for (j=0; j<neuralNet->vecLayers[i].m_NumNeurons; ++j)
        {    /*    for each weight */
            for (k=0; k<neuralNet->vecLayers[i].vecNeurons[j].m_NumInputs; ++k)
            {    /*    si le poids doit évoluer */
                //    if(!weightsNotEvolving[i][j][k])
                //    {
                weights[indice] = neuralNet->vecLayers[i].vecNeurons[j].vecWeight[k];
                indice++;
                //    }
            }
        }
    }
}

/*-----------------------------------PutWeights---------------------------
    given an array of doubles this function replaces the weights in the NN
  with the new values
------------------------------------------------------------------------*/
void PutWeights(CNeuralNet* neuralNet, double* weights)
{    int i,j,k;
    int cWeight = 0;

    /*    for each layer */
    for (i=0; i<neuralNet->m_NumHiddenLayers + 1; ++i)
    {    /*    for each neuron */
        for (j=0; j<neuralNet->vecLayers[i].m_NumNeurons; ++j)
        {    /*    for each weight */
            for (k=0; k<neuralNet->vecLayers[i].vecNeurons[j].m_NumInputs; ++k)
            {    // si le poids doit évoluer
                //    if(!weightsNotEvolving[i][j][k])
                //    {
                neuralNet->vecLayers[i].vecNeurons[j].vecWeight[k] = weights[cWeight];
                cWeight++;
                //}
            }
        }
    }
    return;
}

/*---------------------------------GetNumberOfWeights---------------------
    returns the total number of weights needed for the net
------------------------------------------------------------------------*/
int GetNumberOfWeights(CNeuralNet* neuralNet) /*const*/
{    int i,j,k;
    int weights = 0;

    /*    for each layer */
    for (i=0; i<neuralNet->m_NumHiddenLayers + 1; ++i)
    {    /*    for each neuron */
        for (j=0; j<neuralNet->vecLayers[i].m_NumNeurons; ++j)
        {    /*    for each weight */
            for (k=0; k<neuralNet->vecLayers[i].vecNeurons[j].m_NumInputs; ++k)
            { weights++; }
        }
    }
    return weights;
}
/*-------------------------------UpdateNN---------------------------------
    given an input vector this function calculates the output vector
------------------------------------------------------------------------*/
void UpdateNN(CNeuralNet* neuralNet, double* inputs, double* finalOutputs)
{    /*    stores the resultant outputs from each layer */
    double outputs[BOBOT_MAX_NUM_NEURONS_PER_HIDDEN_LAYER+1];
    double newInputs[BOBOT_MAX_NUM_NEURONS_PER_HIDDEN_LAYER+1];
    int cWeight = 0;
    int i,j,k;

    for(j=0;j<neuralNet->m_NumInputs;j++)
    { newInputs[j] = inputs[j] + (RandomClamped() * BOBOT_MAX_NOISE); } // avec du bruit
   
    /*    For each layer.... */
    for (i=0; i<neuralNet->m_NumHiddenLayers + 1; ++i)
    {    // G_Printf("Layer %d\n",i);
        if ( i > 0 )
        {    for(j=0;j<neuralNet->m_NeuronsPerHiddenLyr+1;j++)
            { newInputs[j] = outputs[j]; }
        }
       
        /*    outputs.clear(); */
        cWeight = 0;

        /*    for each neuron sum the (inputs * corresponding weights).Throw
            the total at our sigmoid function to get the output. */
        for (j=0; j<neuralNet->vecLayers[i].m_NumNeurons; ++j)
        {    double netinput = 0;
            int    NumInputs = neuralNet->vecLayers[i].vecNeurons[j].m_NumInputs;
            //G_Printf("Neuron %d\n",j);
           
            /*    for each weight */
            for (k=0; k<NumInputs - 1; ++k)
            {    /*    sum the weights x inputs */
                //G_Printf("Weight %d = %f\n",k,neuralNet->vecLayers[i].vecNeurons[j].vecWeight[k]);
                netinput += neuralNet->vecLayers[i].vecNeurons[j].vecWeight[k] * newInputs[cWeight];
                cWeight++;
            }

            /*    add in the bias */
            netinput += neuralNet->vecLayers[i].vecNeurons[j].vecWeight[NumInputs-1] * BOBOT_BIAS;

            /*    we can store the outputs from each layer as we generate them.
                The combined activation is first filtered through the sigmoid function */
   
            if(i == neuralNet->m_NumHiddenLayers)    // si on a à faire au layer de sortie du réseau
            { finalOutputs[j] = Sigmoid(netinput, (double) BOBOT_ACTIVATION_RESPONSE); }
            else                                    // c'est un layer caché
            { outputs[j] = Sigmoid(netinput, BOBOT_ACTIVATION_RESPONSE); }
            cWeight = 0;
        }
    }
}
/*    -------------------------------Sigmoid function------------------------- */
double Sigmoid(double netinput, double response)
{    return ( (1 / ( 1 + exp(-netinput / response)))); }

/*    détermine les poids qui n'évolueront pas au fil des générations */
void LoadWeightsNotEvolving(int brainType)
{    FILE        *pIn;
    char        filename[60];
    char        brainName[60];
    //    vmCvar_t    mapname;
    //    double        weight;
    int            i,j,k;
    int            numWeightsNotEvolving = 0;
   
    strcpy(filename,".\\bobot\\brains\\");
    switch(brainType)
    {
        case BOBOT_MOVE_BRAIN :
            strcpy(brainName,BOBOT_MOVE_BRAIN__BRAIN_NAME);
            break;
        case BOBOT_LOOK_BRAIN :
            strcpy(brainName,BOBOT_LOOK_BRAIN__BRAIN_NAME);
            break;
        default :
            G_Printf("BOBOT ERROR : SaveBestBrain : bad brain type\n");
            break;
    }
    strcat(filename,brainName);
    strcat(filename,"_weightsNotEvolving.nn"); // il est très important de ne laisser aucune ligne vide à la fin du fichier
   
    /*    initialisation : a priori tous les poids évoluent */
    for(i=0;i<BOBOT_MAX_NEURONS_PER_LAYER+1;i++)
    {    for(j=0;j<BOBOT_MAX_NEURONS_PER_LAYER+1;j++)
        {    for(k=0;k<BOBOT_MAX_NEURONS_PER_LAYER+1;k++)
            {    if(brainType == BOBOT_MOVE_BRAIN)
                {    moveWeightsNotEvolving[i][j][k] = 0; }
                if(brainType == BOBOT_LOOK_BRAIN)
                {    lookWeightsNotEvolving[i][j][k] = 0; }
            }
        }
    }
    if((pIn = fopen(filename, "rt" )) == NULL)
    {   G_Printf("BOBOT %s: All the weights will evolve.\n",brainName);
        return;
    }
    G_Printf("BOBOT %s : Determining the weights which will not evolve... ",brainName);
   
    /*    ces poids n'évolueront pas au fil des générations */
    while(!feof(pIn))
    {    fscanf(pIn,"%d %d %d",&i,&j,&k);
        if(brainType == BOBOT_MOVE_BRAIN)
        { moveWeightsNotEvolving[i][j][k] = 1; }
        if(brainType == BOBOT_LOOK_BRAIN)
        { lookWeightsNotEvolving[i][j][k] = 1; }
        numWeightsNotEvolving++;
    }
    switch(brainType)
    {
        case BOBOT_MOVE_BRAIN:
            numMoveWeightsEvolving -= numWeightsNotEvolving;
            G_Printf("found %d weights that will not evolve, and %d that will evolve.\n",numWeightsNotEvolving,numMoveWeightsEvolving);
            break;
        case BOBOT_LOOK_BRAIN:
            numLookWeightsEvolving -= numWeightsNotEvolving;
            G_Printf("found %d weights that will not evolve, and %d that will evolve.\n",numWeightsNotEvolving,numLookWeightsEvolving);
            break;
        }
    fclose(pIn);
}

Créer un site gratuit avec e-monsite - Signaler un contenu illicite sur ce site