delicious goat
delicious goat

Reputation: 47

Neural Networks are all acting identically

I am trying to make a neural network trained with the genetic algorithm to play flappy bird.

When I initialize the first generation of random neural networks, everything works as expected: there is a relatively random distribution of actions. However, after they all die and I go to initialize the secon generation, they all do the same thing: jump constantly. I have no idea what the problem is.

I am using the matrix library Eigen; here is the code to my neural network class:

#include "pch.h"
#include <Eigen/Dense>
#include <Eigen/Core>
using namespace Eigen;

class NeuralNetwork
{
private:

    static double sigmoid(double x)
    {
        return 1 / (1 + exp(-x));
    }

public:
    MatrixXd weightsIH;
    MatrixXd weightsHO;
    MatrixXd biasesIH;
    MatrixXd biasesHO;

public:

    NeuralNetwork(int in, int hid, int out)
    {
        weightsIH = MatrixXd::Random(hid,in);

        weightsHO = MatrixXd::Random(out,hid);

        biasesIH = MatrixXd::Random(hid,1);
        biasesHO = MatrixXd::Random(out,1);  
    }

    //Copy Constructors
    NeuralNetwork(NeuralNetwork& net)
    {
        weightsIH = net.weightsIH;
        weightsHO = net.weightsHO;

        biasesIH = net.biasesIH;
        biasesHO = net.biasesHO;
    }

    NeuralNetwork(std::shared_ptr<NeuralNetwork> net)
    {
        weightsIH = net->weightsIH;
        weightsHO = net->weightsHO;

        biasesIH = net->biasesIH;
        biasesHO = net->biasesHO;
    }


    int feedForward(MatrixXd input)
    {     
        MatrixXd hiddenOutputs = (weightsIH * input) + biasesIH;
        hiddenOutputs.unaryExpr([&](double x) {return sigmoid(x); });

        MatrixXd finalOutputs = (weightsHO * hiddenOutputs) + biasesHO;
        finalOutputs.unaryExpr([&](double x) {return sigmoid(x); });


        if (finalOutputs(0) > finalOutputs(1))
            return 0;
        return 1; 
    }

    void mutate(double rate)
    {
       srand((unsigned)time(NULL));
       weightsIH.unaryExpr([&](double x) {
           if ((double)rand() / (double)RAND_MAX < rate)
           {

               return ((double)rand() / (double)RAND_MAX);
           }
           else
           {
               return x;
           }
       });
       weightsHO.unaryExpr([&](double x) {
           if ((double)rand() / (double)RAND_MAX < rate)
           {

               return ((double)rand() / (double)RAND_MAX);
           }
           else
           {
               return x;
           }
        });
       biasesIH.unaryExpr([&](double x) {
           if ((double)rand() / (double)RAND_MAX < rate)
           {

               return ((double)rand() / (double)RAND_MAX);
           }
           else
           {
               return x;
           }
       });
       biasesHO.unaryExpr([&](double x) {
           if ((double)rand() / (double)RAND_MAX < rate)
           {

               return ((double)rand() / (double)RAND_MAX);
           }
           else
           {
               return x;
           }
       });
    }

};

Upvotes: 0

Views: 59

Answers (0)

Related Questions