Using parameters: train file = train1.txt train file = test1.txt nLevels = 2 D = [2, 3, 1] alpha = 0.01 # iterations = 200 Initial weights: Level 0 W[ 0 ][ 0 ][ 0 ] = 0.6888 W[ 0 ][ 0 ][ 1 ] = 0.5159 W[ 0 ][ 0 ][ 2 ] = -0.1589 W[ 0 ][ 1 ][ 0 ] = -0.4822 W[ 0 ][ 1 ][ 1 ] = 0.0225 W[ 0 ][ 1 ][ 2 ] = -0.1901 W[ 0 ][ 2 ][ 0 ] = 0.5676 W[ 0 ][ 2 ][ 1 ] = -0.3934 W[ 0 ][ 2 ][ 2 ] = -0.0468 Level 1 W[ 1 ][ 0 ][ 0 ] = 0.1668 W[ 1 ][ 1 ][ 0 ] = 0.8162 W[ 1 ][ 2 ][ 0 ] = 0.0094 W[ 1 ][ 3 ][ 0 ] = -0.4363 Iteration 10 Train accuracy = 0.500 Test accuracy = 0.478 Iteration 20 Train accuracy = 0.500 Test accuracy = 0.514 Iteration 30 Train accuracy = 0.542 Test accuracy = 0.558 Iteration 40 Train accuracy = 0.583 Test accuracy = 0.586 Iteration 50 Train accuracy = 0.583 Test accuracy = 0.586 Iteration 60 Train accuracy = 0.625 Test accuracy = 0.602 Iteration 70 Train accuracy = 0.667 Test accuracy = 0.606 Iteration 80 Train accuracy = 0.750 Test accuracy = 0.627 Iteration 90 Train accuracy = 0.792 Test accuracy = 0.703 Iteration 100 Train accuracy = 0.917 Test accuracy = 0.839 Iteration 110 Train accuracy = 0.917 Test accuracy = 0.859 Iteration 120 Train accuracy = 0.917 Test accuracy = 0.859 Iteration 130 Train accuracy = 0.917 Test accuracy = 0.871 Iteration 140 Train accuracy = 0.917 Test accuracy = 0.892 Iteration 150 Train accuracy = 0.958 Test accuracy = 0.916 Iteration 160 Train accuracy = 0.958 Test accuracy = 0.920 Iteration 170 Train accuracy = 0.958 Test accuracy = 0.928 Iteration 180 Train accuracy = 1.000 Test accuracy = 0.932 Iteration 190 Train accuracy = 1.000 Test accuracy = 0.932 Iteration 200 Train accuracy = 1.000 Test accuracy = 0.932 Final learned weights: Level 0 W[ 0 ][ 0 ][ 0 ] = 1.9120 W[ 0 ][ 0 ][ 1 ] = -1.1676 W[ 0 ][ 0 ][ 2 ] = -0.4393 W[ 0 ][ 1 ][ 0 ] = -2.0314 W[ 0 ][ 1 ][ 1 ] = 1.4596 W[ 0 ][ 1 ][ 2 ] = 0.0564 W[ 0 ][ 2 ][ 0 ] = 1.1323 W[ 0 ][ 2 ][ 1 ] = 0.6395 W[ 0 ][ 2 ][ 2 ] = -0.4231 Level 1 W[ 1 ][ 0 ][ 0 ] = 1.8166 W[ 1 ][ 1 ][ 0 ] = 1.8283 W[ 1 ][ 2 ][ 0 ] = 0.5742 W[ 1 ][ 3 ][ 0 ] = -1.2882