Ahmad Afkande
Ahmad Afkande

Reputation: 137

converting Matlab Neural Network into C++ Neural Network

I created a Neural Network in Matlab with newff, for handwritten Digits recognition.

I just trained it to recognize only 0 & 1 values from images.

with 3 Layers, Input Layer has 9 Neurons, Hidden Layer has 5 Neurons, and output Layer 1 Neuron,and there is 9 inputs.

my out puts are 0.1 & 0.2 ,and all Layers outputs function are "tansig".

I test it in Matlab and Network works Fine. now I want to create this network in c++ , I Wrote the Code and I copied all the Weights and Biasses(total 146 weights). but when I put the same input data to Network the output value is not correct.

can anyone guide me?

this is my networks code:

here's my networks code...

public class Neuron
{
    public Neuron()
    { }
    public Neuron(int SumOfInputs)
    {
        m_SumOfInputs = SumOfInputs;
    }
    public double act(double[] Input, double[] weight, double bias)
    {
        double tmp = bias;
        for (int i = 0; i < m_SumOfInputs; i++)
            tmp += (Input[i] * weight[i]);
        m_output = 1.0 / (1.0 + Math.Exp(-tmp));
        return m_output;
    }
    public double m_output;
    private int m_SumOfInputs;
};

public class Net
{
    public Net()
    {
        int i;
        //net1 , net2
        //initializing inputLayer Neurons
        for (i = 0; i < 9; i++)
            InputLayer[i] = new Neuron(9);
        //initializing HiddenLayer Neurons
        for (i = 0; i < 5; i++)
            HiddenLayer[i] = new Neuron(9);
        //initializing OutputLayer
        OutputLayer = new Neuron(5);
    }
    public double Calculate(double[] inputs)
    {
        double[] ILay_Outputs = new double[9];
        double[] HLay_Outputs = new double[5];
        //inputLayer acting
        ILay_Outputs[0] = InputLayer[0].act(inputs, IW1, Ib[0]);
        ILay_Outputs[1] = InputLayer[1].act(inputs, IW2, Ib[1]);
        ILay_Outputs[2] = InputLayer[2].act(inputs, IW3, Ib[2]);
        ILay_Outputs[3] = InputLayer[3].act(inputs, IW4, Ib[3]);
        ILay_Outputs[4] = InputLayer[4].act(inputs, IW5, Ib[4]);
        ILay_Outputs[5] = InputLayer[5].act(inputs, IW6, Ib[5]);
        ILay_Outputs[6] = InputLayer[6].act(inputs, IW7, Ib[6]);
        ILay_Outputs[7] = InputLayer[7].act(inputs, IW8, Ib[7]);
        ILay_Outputs[8] = InputLayer[8].act(inputs, IW9, Ib[8]);
        //HiddenLayer acting
        HLay_Outputs[0] = HiddenLayer[0].act(ILay_Outputs, HW1, Hb[0]);
        HLay_Outputs[1] = HiddenLayer[1].act(ILay_Outputs, HW2, Hb[1]);
        HLay_Outputs[2] = HiddenLayer[2].act(ILay_Outputs, HW3, Hb[2]);
        HLay_Outputs[3] = HiddenLayer[3].act(ILay_Outputs, HW4, Hb[3]);
        HLay_Outputs[4] = HiddenLayer[4].act(ILay_Outputs, HW5, Hb[4]);
        //OutputLayer acting
        OutputLayer.act(HLay_Outputs, OW, Ob);

        return OutputLayer.m_output;
    }
    //variables
    Neuron[] InputLayer = new Neuron[9];
    Neuron[] HiddenLayer = new Neuron[5];
    Neuron OutputLayer;

    //net2 tansig tansig tansig
    double[] IW1 = { 0.726312035124743, 1.01034015912570, 0.507178716484559, -0.254689455765290, 0.475299816659036, 0.0336358919735363, -0.715890843015230, 0.466632424349648, 0.565406467159982 };
    double[] IW2 = { 0.866482591050076, -0.672473224929341, 0.915599891389326, 0.310163265280920, -0.373812653648686, -0.0859927887021936, 0.0100063635393257, 0.816638798257382, -0.540771172965867 };
    double[] IW3 = { 0.138868216294952, 1.93121321568871, -0.564704445249800, 0.834275586326333, 3.08348295981989, 0.899715248285303, -0.661916798988641, 6.00562393127300, 6.11939776912678 };
    double[] IW4 = { 0.578089791487308, 0.885170493965113, -0.992514702569606, 0.415980526304333, -0.706140252063166, 0.442017877881589, -0.449053823645690, -0.0894051386719344, -0.348622179369911 };
    double[] IW5 = { -0.407756482945129, 0.0786764402198765, 0.972408690276837, -0.959955597431701, -0.977769442966978, 1.52121267506016, 0.503296357838885, -3.31593633455649, -3.47834004737816 };
    double[] IW6 = { -1.17474983226852, 0.870140308892922, 1.50545637070446, 0.369712493398677, -0.569857993006262, -0.732502911495791, -0.668984976457441, -1.48023312055586, -0.893472571240467 };
    double[] IW7 = { -0.860518592120001, -1.48432158859269, 0.957060799463945, -0.680797771869510, -0.270752283410268, -0.218766920514208, 0.168091770241510, -2.50326075864844, -0.800988078966455 };
    double[] IW8 = { 0.436492138260917, 0.280081066366966, 0.484813099857825, -0.310693876078844, 1.60359045377467, 1.57343220231689, -1.21552190886612, 2.03276547165735, 1.27245062411707 };
    double[] IW9 = { 1.66853306274827, -1.59142022586958, 0.862315766588855, 0.676048095028997, -2.22623540036057, -1.48036066273542, -0.0386781503608105, -5.18214728910353, -5.21258509200432 };

    double[] HW1 = { 0.577543862468449, 0.452264642610010, -0.869014797322399, 0.122435296258077, 0.507631314535324, 0.0386430216115630, -0.398222802253669, -0.614601040619812, 1.43324133164016 };
    double[] HW2 = { 0.163344332215885, 0.434728230081814, -3.04877964757120, -0.118300732191499, -2.63220585865390, 0.443163977179405, -2.11883915836372, 2.07955461474729, -3.94441429060856 };
    double[] HW3 = { -0.156103043064606, -0.482049683802527, 1.24788068138172, -1.05731056687422, -0.615321348655331, 0.214815967784408, 0.375762477817552, -0.728649292060764, -0.212151944122515 };
    double[] HW4 = { 1.78276088127139, 1.15086535250306, 1.25967219208841, -0.446026243031773, -3.94742837475153, -1.33311929047378, -2.09356929069216, 0.0736879745054291, 1.51472991137144 };
    double[] HW5 = { 0.744372844550077, 0.400815326319268, -4.94686055701529, 0.444773365537176, 2.65351865321717, 1.87143709824455, 1.74346707204902, -3.28220218001754, 5.78321274609173 };

    double[] OW = { -1.09112204235009, -7.13508015318964, -1.02533926874837, 3.80439015418632, -4.16711367340349 };

    double[] Ib =  {-1.77988445077976,
                -1.37323967952292,
                -0.547465218997906,
                0.331535304175263,
                -0.0167810612906040,
                0.734128501831859,
                -0.543321122358485,
                -1.13525462762255,
                1.82870615182942};
    double[] Hb =  {1.68321697741393,
                -0.862080862212137,
                -0.536310792063381,
                -0.772019935790668,
                1.51470472867250};
    double Ob = -0.156343477742835;
};

Upvotes: 2

Views: 1398

Answers (2)

Ahmad Afkande
Ahmad Afkande

Reputation: 137

I found the problem.

in matlab, before inputs go to the network, they all go to a function named applyminmax in a .m file named mapminmax.m, and then this function outputs are the network inputs.

after the simulation on network is done, the outputs go to a function named reverse in the same .m file. and this function outputs is the final output of the Neural Network.

Upvotes: 0

rainman
rainman

Reputation: 121

You mention in your description that you want to use the Tansig activation function, but in your code you have the implementation for the Logsig activation function. Tansig approximation would be:

2/(1+Math.Exp(-2*tmp))-1

I am also not sure how you get the weights for the input layer, are these perhaps the weights for the hidden layer. Matlab does not generate weights for the input layers since the inputs are directly connected to the hidden layer. Where net.IW are the weights for the first (hidden) layer, the weights for the subsequent layers (including output) are given by net.LW.

Besides the above I don't see obvious bugs/errors in your code, maybe try a simpler network first and train it to do the old and wise XOR relationship.

Lastly I would like to mention if you are writing this code for a micro-controller it's easier to do it in C and without objects. Your code will be smaller and faster. A step by step example is given here.

Upvotes: 2

Related Questions