Dr_Freeman
Dr_Freeman

Reputation: 271

Training ff nn using backpropagation

I have encountered some difficulties while training my nn. When I use, lets say, 10 training sets, at the end of training procces neural network is trained just for the last two. I'm entering same values that I have used to train network and I am getting wrong results save for the last two. It seems to me that new nn memory suppresses older memory. I'm using 64 input neurons, 42 neurons in hidden layer and one output neuron. Sigmoid function is used for activating neurons. Training inputs and expected outputs are in 0 to 1 range. Does anyone have any clue what might be causing the problem?

    Neuron b = new Neuron();
    Fft f = new Fft();
    float e = 2.71828f;
    float eta = 0.05f;
    float alpha = 0.05f;
    float[] saw = new float[42];
    float[] dh = new float[42];
    float error = 0;
    float dto = 0;
    Random broj = new Random();
    TextReader br = new StreamReader("d:/trening.txt");
    TextReader ir = new StreamReader("d:\\input.txt");


    float NextFloat(Random rng, float min, float max)
    {
        return (float)(min + (rng.NextDouble() * (max - min)));
    }

    public void load()//load memory
    {
        int i, j;
        byte[] floatBytes;
        BinaryReader br = new BinaryReader(File.Open("d:/memorija.txt", FileMode.Open));

        for (j = 0; j <= 41; j++)
        {
            for (i = 0; i <= 64; i++)
            {
                floatBytes = br.ReadBytes(4);
                b.w12[i][j] = BitConverter.ToSingle(floatBytes, 0);
            }
        }
        for (j = 0; j <= 1; j++)
        {
            for (i = 0; i <= 41; i++)
            {
                floatBytes = br.ReadBytes(4);
                b.w23[i][j] = BitConverter.ToSingle(floatBytes, 0);
            }
        }
        br.Close();
    }


    public void trening()//Get training inputs and expected outputs
    {                    //Calls process methode 
        int i, n,ct=0;
        using (TextReader tr = new StreamReader("d:/trening.txt"))
        {
            do
            {
                ct++;
            } while (tr.ReadLine() != null);
            tr.Close();
        }

        for (n = 0; n < (ct-1)/65; n++)
        {
            for (i = 1; i <= 65; i++)
                b.input[i] = Convert.ToSingle(br.ReadLine());

            process(b.input[65]);
            target.Text = ((b.input[65]).ToString());
        } 

    }
    public void process(double t)//Trains nn using backpropagation
    {
        error = 0;
        do
        {
            int i, j, k;
            BinaryWriter bw = new BinaryWriter(File.Open("d:\\memorija.txt", FileMode.Create));
            i = k = j = 0;
            for (j = 1; j <= 41; j++)
            {
                b.ulaz2[j] = b.w12[0][j];
                for (i = 1; i <= 64; i++)
                {
                    b.ulaz2[j] += b.input[i] * b.w12[i][j];
                } b.izlaz2[j] = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz2[j])));
                if (b.izlaz2[j] < 0)
                    MessageBox.Show(b.izlaz2[j].ToString());
            }
            for (k = 1; k <= 1; k++)
            {
                b.ulaz3 = b.w23[0][k];
                for (j = 1; j <= 41; j++)
                {
                    b.ulaz3 += b.izlaz2[j] * b.w23[j][k];
                } b.izlaz = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz3)));

                error += (float)(0.5 * (t - b.izlaz) * (t - b.izlaz));
                dto = (float)(t - b.izlaz) * b.izlaz * (1 - b.izlaz);
            }

            for (j = 1; j <= 41; j++)
            {
                saw[j] = 0;
                for (k = 1; k <= 1; k++)
                {
                    saw[j] += dto * b.izlaz2[j];
                } dh[j] = saw[j] * b.izlaz2[j] * (1 - b.izlaz2[j]);

            }
            for (j = 1; j <= 41; j++)
            {
                b.w12d[0][j] = eta * dh[j] + alpha * b.w12d[0][j];
                b.w12[0][j] += b.w12d[0][j];
                for (i = 1; i <= 64; i++)
                {
                    b.w12d[i][j] = eta * b.input[i] * dh[j] + alpha * b.w12d[i][j];
                    b.w12[i][j] += b.w12d[i][j];
                }
            }
            for (k = 1; k <= 1; k++)
            {
                b.w23d[0][k] = eta * dto + alpha * b.w23d[0][k];
                b.w23[0][k] += b.w23d[0][k];
                for (j = 1; j <= 41; j++)
                {
                    b.w23d[j][k] = eta * b.izlaz2[j] * dto + alpha * b.w23d[j][k];
                    b.w23[j][k] += b.w23d[j][k];
                }
            }
            for (j = 0; j <= 41; j++)
            {
                for (i = 0; i <= 64; i++)
                    bw.Write(b.w12[i][j]);
            }

            for (j = 0; j <= 1; j++)
            {
                for (i = 0; i <= 41; i++)
                    bw.Write(b.w23[i][j]);
            }
            bw.Close();
            izlazb.Text = Convert.ToString(b.izlaz);
            errorl.Text = Convert.ToString(Math.Abs(b.izlaz - b.input[64]));
        } while (Math.Abs(b.izlaz - t) > 0.03);
    }

    public void test()//This methode gets input values and gives output based on previous training
    {
        int i = 0, j = 0, k = 0;
        for (i = 1; i < 65; i++)
            b.input[i] = (float)Convert.ToDouble(ir.ReadLine());

        for (j = 1; j <= 41; j++)
        {
            b.ulaz2[j] = b.w12[0][j];
            for (i = 1; i <= 64; i++)
            {
                b.ulaz2[j] += b.input[i] * b.w12[i][j];
            } b.izlaz2[j] = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz2[j])));
        }
        for (k = 1; k <= 1; k++)
        {
            b.ulaz3 = b.w23[0][k];
            for (j = 1; j <= 41; j++)
            {
                b.ulaz3 += b.izlaz2[j] * b.w23[j][k];
            } b.izlaz = (float)(1.0 / (1.0 + Math.Pow(e, -b.ulaz3)));
        } izlazb.Text = Convert.ToString(b.izlaz);
        target.Text = "/";
        errorl.Text = "/";
    }

    public void reset()//Resets memory
    {
        BinaryWriter fw = new BinaryWriter(File.Open("d:\\memorija.txt", FileMode.Create));
        int i = 0;
        int j = 0;
        Random broj = new Random();
        for (j = 0; j <= 41; j++)
        {
            for (i = 0; i <= 64; i++)
            {
                b.w12[i][j] = 0;
                b.w12[i][j] = 2 * (NextFloat(broj, -0.5f, 0.5f));
                fw.Write(b.w12[i][j]);
            }
        }
      for (j = 0; j <= 1; j++)
        {
            for (i = 0; i <= 41; i++)
            {
                b.w23[i][j] = 0;
                b.w23[i][j] = 2 * (NextFloat(broj, -0.5f, 0.5f));
                fw.Write(b.w23[i][j]);
            }
        }
        fw.Close();
    }

}

}

And neuron class

 public class Neuron
{
    public float[][] w12 = new float[65][];//(65, 42);
    public float[][] w12d = new float[65][];//(65, 42);
    public float[][] w23 = new float[42][];//(42,2);
    public float[][] w23d = new float[42][];//(42, 2);
    public float[] ulaz2 = new float[42];
    public float[] izlaz2 = new float[42];
    public float ulaz3;
    public float[] input =new float[66];
    public static float[] ioutput;
    public float izlaz;
    public void arrayInit()
    {
        int i, j;
        for (i = 0; i <=64; i++)
        {
            w12[i] = new float[42];
            w12d[i] = new float[42];
        }

        for (i = 0; i <42; i++)
        {
            w23[i] = new float[2];
            w23d[i] = new float[2];
        }
        for (j = 0; j < 42; j++)
            for (i = 0; i <=64; i++)
            {
                w12[i][j] = 0;
                w12d[i][j] = 0;
            }
        for (j = 0; j < 2; j++)
            for (i = 0; i < 42; i++)
            {
                w23[i][j] = 0;
                w23d[i][j] = 0;
            }
    }
}

Upvotes: 2

Views: 653

Answers (1)

Dr_Freeman
Dr_Freeman

Reputation: 271

I found out what the problem was. I didn't mix training arrays, I was introducing one array to nn until it was trained for it, instead of introducing all arrays in cyclic manner. I hope this will be useful for someone.

Upvotes: 2

Related Questions