diff trunk/backprop_test.d @ 3:314d68bafeff

Backprop and backprop_test added (no testing).
author revcompgeek
date Fri, 11 Apr 2008 18:12:55 -0600
parents
children 73beed484455
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/trunk/backprop_test.d	Fri Apr 11 18:12:55 2008 -0600
@@ -0,0 +1,71 @@
+module backprop_test;
+
+import aid.nn.multilayer.backprop;
+import aid.nn.outputFunctions;
+import std.stdio;
+
+/+float[][] trainingInputs = [
+	[0,0,0],
+	[0,0,1],
+	[0,1,0],
+	[0,1,1],
+	[1,0,0],
+	[1,0,1],
+	[1,1,0],
+	[1,1,1]];
+
+float[][] trainingOutputs = [
+	[0.1],
+	[0.9],
+	[0.9],
+	[0.1],
+	[0.9],
+	[0.1],
+	[0.1],
+	[0.9]];+/
+
+float[][] trainingInputs = [
+	[0,0],
+	[0,1],
+	[1,0],
+	[1,1]];
+
+float[][] trainingOutputs = [
+	[0.1],
+	[0.9],
+	[0.9],
+	[0.1]];
+
+void main(){
+	Backprop nn = new Backprop(2,[4,1],[&sigmoid,&sigmoid]);
+	
+	float error = 10.0;
+	float[] output;
+	int iter = 0;
+	while(error >= 0.5){
+		error = nn.calculateError(trainingInputs,trainingOutputs);
+		if(iter % 100 == 0){
+			writefln("Iter: %d",iter);
+			for(int i=0; i<trainingInputs.length; i++){
+				output = nn.evaluate(trainingInputs[i]);
+				writef("  %d:", i); printArray(output);
+			}
+			writefln("  Error: %f", error);
+		}
+		nn.train(trainingInputs,trainingOutputs);
+	}
+	writefln("Total Iters: %d",iter);
+	for(int i=0; i<trainingInputs.length; i++){
+		output = nn.evaluate(trainingInputs[i]);
+		writef("  %d:", i); printArray(output);
+	}
+	writefln("  Error: %f", error);
+}
+
+void printArray(float[] array){
+	writef("[");
+	for(int i=0; i<array.length-1; i++){
+		writef("%f, ",array[i]);
+	}
+	writefln("%f]",array[$]);
+}
\ No newline at end of file