Reputation: 5220
EDIT: I managed to get together couple of simple examples https://github.com/developer239/neural-network-playground
Could anyone help me with simple neat example teaching the net how to solve XOR or some other similar problem? But using the NEAT technique so that I would not have to specify training data set?
Using javascript: https://github.com/cazala/synaptic or https://github.com/wagenaartje/neataptic
1. Initialize network
2. Generate generation
3. Go through each genome in generation and evaluate its fitness (how good it is)
4. Take 2 best genomes from generation
5. Merge genomes 50 / 50 at random
6. Mutate final genome
7. Generate second generation
This would be extremely helpful. Same teqnique is being used here:
https://github.com/ivanseidel/IAMDinosaur
https://www.youtube.com/watch?v=P7XHzqZjXQs
I went through the source code but there is WAY to much stuff going on. I understand the general idea. However I have no idea how to implement the solution.
Thank you :)
Upvotes: 1
Views: 1260
Reputation: 921
I know this is quite an old question, and neataptic isn't very widely used, but I found a much simpler way to implement this so i thought i'd share
const { Neat } = require('neataptic');
function fitness(network) {
let score = 0;
score += Math.abs(0 - network.activate([0,0])[0]);
score += Math.abs(1 - network.activate([0,1])[0]);
score += Math.abs(1 - network.activate([1,0])[0]);
score += Math.abs(0 - network.activate([1,1])[0]);
score = -score;
return score;
}
(async () => {
const neat = new Neat(2 ,1 ,fitness, {});
for (let i = 0; i < 10000; i++) {
await neat.evolve();
}
const fittest = neat.getFittest();
console.log(fitness(fittest));
console.log(fittest.activate([0,0]));
console.log(fittest.activate([0,1]));
console.log(fittest.activate([1,0]));
console.log(fittest.activate([1,1]));
})();
Upvotes: 1
Reputation: 5220
I managed to write my own solution. You can find it here: https://github.com/developer239/neural-network-playground/tree/master/neatXOR
The main difference from the solution in documentation is that in genetic.js
you can dynamically change the learning process.
This is entry file:
const genetic = require('./genetic')
genetic.generateRandomPopulation()
for (let iteration = 0; iteration < 1000; iteration += 1) {
genetic.live()
genetic.evolve()
}
const genom = genetic.neat.population[0]
console.log(`
Result for genom with index 0 in the newest population. Note that selection / mutation happened
after we called last evolve function so this is not necessarily the best genome in the population.
[0, 0] = ${genom.activate([0, 0])} (should be 0)
[1, 1] = ${genom.activate([1, 1])} (should be 0)
[0, 1] = ${genom.activate([0, 1])} (should be 1)
[1, 0] = ${genom.activate([1, 0])} (should be 1)
`)
This is genetic.js
file:
const { Neat, architect } = require('neataptic')
module.exports = {
neat: null, // https://wagenaartje.github.io/neataptic/docs/neat/
possibleInputs: [
[0, 0], // expected output 0
[1, 1], // expected output 0
[0, 1], // expected output 1
[1, 0], // expected output 1
],
generateRandomPopulation: function () {
this.neat = new Neat(
2, // number of inputs
1, // number of outputs
null, // fitnessFunction - in this example we are calculating fitness inside live method
{
elitism: 5, // this sets how many genomes in population will be passed into next generation without mutation https://www.researchgate.net/post/What_is_meant_by_the_term_Elitism_in_the_Genetic_Algorithm
mutationRate: 0.3, // sets the mutation rate. If set to 0.3, 30% of the new population will be mutated. Default is 0.3
network: // https://wagenaartje.github.io/neataptic/docs/architecture/network/
new architect.Random(
2,
3,
1,
),
},
)
},
// the closer the output gets to expectedOutput the better
// note that optimal fitness in this example is 0 neural network seems to work fine though
calculateFitness: function (expectedOutput, output) {
let closeCount = Math.abs(expectedOutput - output)
let fitness = closeCount * -1
return fitness
},
live: function () {
// increment generation index
this.neat.generation += 1
// loop through each genome
for (let genomeIndex in this.neat.population) {
const possibleInputs = this.possibleInputs
const genome = this.neat.population[genomeIndex]
genome.score = 0
// loop through each input
for (let i = 0; i < possibleInputs.length; i += 1) {
let input = possibleInputs[i]
// test each input
let output = genome.activate(input)[0]
// calculate fitness for each output
// we have 4 different inputs so the total score is sum of 4 different fitness values
if (i <= 1) {
genome.score += this.calculateFitness(0, output)
} else {
genome.score += this.calculateFitness(1, output)
}
}
}
},
evolve: function () {
const neat = this.neat
console.log(`[generation ${neat.generation}] Average score: ${neat.getAverage()} (the closer to zero the better)`)
// sort by genome.score in descending order
neat.sort()
// our new population will be here
let newPopulation = []
// we want to push neat.elitism number of best genomes into the new population automatically
for (let i = 0; i < neat.elitism; i++) {
newPopulation.push(neat.population[i])
}
// we want to get offspring from the current population and push it into the new population
for (let i = 0; i < neat.popsize - neat.elitism; i++) {
newPopulation.push(neat.getOffspring())
}
// set new population
neat.population = newPopulation
// mutate the population
neat.mutate()
},
}
Upvotes: 0
Reputation: 6759
There is an example on Neataptic's README.md.
// this network learns the XOR gate (through neuro-evolution)
var network = new Network(2,1);
var trainingSet = [
{ input: [0,0], output: [0] },
{ input: [0,1], output: [1] },
{ input: [1,0], output: [1] },
{ input: [1,1], output: [0] }
];
await network.evolve(trainingSet, {
equal: true,
error: 0.03
});
Neataptic has it all built-in so all you have to provide is a data set. If you need more info on how this has been set up, read this article.
For problems with dynamic solutions, a custom loop and fitness function has to be implemented.
Upvotes: 1