3
|
1 module backprop_test;
|
|
2
|
|
3 import aid.nn.multilayer.backprop;
|
|
4 import aid.nn.outputFunctions;
|
|
5 import std.stdio;
|
|
6
|
|
7 /+float[][] trainingInputs = [
|
|
8 [0,0,0],
|
|
9 [0,0,1],
|
|
10 [0,1,0],
|
|
11 [0,1,1],
|
|
12 [1,0,0],
|
|
13 [1,0,1],
|
|
14 [1,1,0],
|
|
15 [1,1,1]];
|
|
16
|
|
17 float[][] trainingOutputs = [
|
|
18 [0.1],
|
|
19 [0.9],
|
|
20 [0.9],
|
|
21 [0.1],
|
|
22 [0.9],
|
|
23 [0.1],
|
|
24 [0.1],
|
|
25 [0.9]];+/
|
|
26
|
|
27 float[][] trainingInputs = [
|
|
28 [0,0],
|
|
29 [0,1],
|
|
30 [1,0],
|
|
31 [1,1]];
|
|
32
|
|
33 float[][] trainingOutputs = [
|
|
34 [0.1],
|
|
35 [0.9],
|
|
36 [0.9],
|
|
37 [0.1]];
|
|
38
|
|
39 void main(){
|
|
40 Backprop nn = new Backprop(2,[4,1],[&sigmoid,&sigmoid]);
|
|
41
|
|
42 float error = 10.0;
|
|
43 float[] output;
|
|
44 int iter = 0;
|
|
45 while(error >= 0.5){
|
|
46 error = nn.calculateError(trainingInputs,trainingOutputs);
|
|
47 if(iter % 100 == 0){
|
|
48 writefln("Iter: %d",iter);
|
|
49 for(int i=0; i<trainingInputs.length; i++){
|
|
50 output = nn.evaluate(trainingInputs[i]);
|
|
51 writef(" %d:", i); printArray(output);
|
|
52 }
|
|
53 writefln(" Error: %f", error);
|
|
54 }
|
|
55 nn.train(trainingInputs,trainingOutputs);
|
|
56 }
|
|
57 writefln("Total Iters: %d",iter);
|
|
58 for(int i=0; i<trainingInputs.length; i++){
|
|
59 output = nn.evaluate(trainingInputs[i]);
|
|
60 writef(" %d:", i); printArray(output);
|
|
61 }
|
|
62 writefln(" Error: %f", error);
|
|
63 }
|
|
64
|
|
65 void printArray(float[] array){
|
|
66 writef("[");
|
|
67 for(int i=0; i<array.length-1; i++){
|
|
68 writef("%f, ",array[i]);
|
|
69 }
|
|
70 writefln("%f]",array[$]);
|
|
71 } |