// Cloned by test2 on 10 Nov 2022 from World "XOR multi-layer network" by "Coding Train" project
// Please leave this clone trail here.
// XOR multi-layer network
// Port from:
// https://github.com/CodingTrain/Toy-Neural-Network-JS/tree/master/examples/xor
// with modifications
// libraries from:
// https://github.com/CodingTrain/Toy-Neural-Network-JS/tree/master/lib
// ported to here:
// https://ancientbrain.com/uploads.php?userid=codingtrain
// Theory:
// A sum of 10 random small numbers (pos or neg, can cancel out) is likely to be small
// A sum of 50000 random small numbers (pos or neg, can cancel out) is likely to be much larger
// proof?
// nohidden = 10 will give x close to 0 (y=1/2)
// nohidden = 50000 will give all sorts of x, but likely - large or + large (y = 0 or 1)
//=== Tweaker's box ============================================
// number of nodes in each layer:
const noinput = 2;
const nohidden = 10000;
const nooutput = 1;
// define the exemplars to learn from:
let training_data = [
{ inputs: [0, 0], outputs: [0] },
{ inputs: [0, 1], outputs: [1] },
{ inputs: [1, 0], outputs: [1] },
{ inputs: [1, 1], outputs: [0] }
];
var nn; // global var
const learningrate = 0.2;
// train this number of times per draw()
const notrain = 10;
// Take screenshot on this step:
AB.screenshotStep = 200;
// divide 0,1 into squares
// show all squares or just the corner squares:
var showall = true;
const canvassize = 400;
const squaresize = 40;
const cols = 10 ;
const rows = 10;
// Matrix.randomize() is changed to point to this. Must be defined by user of Matrix.
function randomWeight()
{
return ( AB.randomFloatAtoB ( -0.5, 0.5 ) );
// Coding Train default is -1 to 1
}
//=== End of tweaker's box ============================================
function setup()
{
createCanvas (canvassize, canvassize);
$.getScript ( "/uploads/codingtrain/matrix.js", function()
{
$.getScript ( "/uploads/codingtrain/nn.js", function()
{
nn = new NeuralNetwork ( noinput, nohidden, nooutput );
});
});
}
function draw()
{
// check if libraries loaded yet:
if ( typeof nn == 'undefined' ) return;
nn.setLearningRate ( learningrate );
background ('#ffffcc');
logthem();
// train n times
for (let i = 0; i < notrain ; i++)
{
let data = random ( training_data );
nn.train ( data.inputs, data.outputs );
}
// draw either some squares or all squares:
if ( showall )
{
// redraw all squares each time round
for (let i = 0; i < cols; i++)
for (let j = 0; j < rows; j++)
drawsquare ( i, j );
}
else
{
// redraw just the 4 squares
for ( let i = 0; i < cols; i = i + cols-1 )
for ( let j = 0; j < rows; j = j + rows-1 )
drawsquare ( i, j );
}
}
// copy this function logthem
// call it in draw at the right place
// check all good in console
// change console.log to AB.msg
function logthem()
{
let inputs = [0, 0]; let y = nn.predict(inputs);
let inputs2 = [0, 1]; let y2 = nn.predict(inputs2);
let inputs3 = [1, 0]; let y3 = nn.predict(inputs3);
let inputs4 = [1, 1]; let y4 = nn.predict(inputs4);
AB.msg ( "<span style='color:red'><b> " +
"[0,0] leads to " + y[0].toFixed(2) + "<br>" +
"[0,1] leads to " + y2[0].toFixed(2) + "<br>" +
"[1,0] leads to " + y3[0].toFixed(2) + "<br>" +
"[1,1] leads to " + y4[0].toFixed(2) + "<br>" +
"</b></span>" );
}
function drawsquare ( i, j )
{
// i = 8;
// j = 8;
let x1 = i / cols;
let x2 = j / rows;
let inputs = [x1, x2];
let y = nn.predict(inputs);
// console.log ( "input (" +x1 + "," + x2 + ") output " + y );
strokeWeight(2);
stroke('black');
// stroke ('red');
fill ( y * 255 ); // 0 is black, 1 is white
rect ( i * squaresize, j * squaresize, squaresize, squaresize );
}