|
@@ -28,6 +28,7 @@ func main() {
|
|
|
// Load minst somehow 28x28 784
|
|
|
nSample := 1
|
|
|
|
|
|
+ learningRate := float64(1.0)
|
|
|
// Load minst into floats array
|
|
|
|
|
|
// Make a matrix from the weights variable
|
|
@@ -42,23 +43,57 @@ func main() {
|
|
|
inp := f.Op("matNew", 2, 1, f.In(0))
|
|
|
|
|
|
// Layer1 weights
|
|
|
- w1 := f.Op("matNew", 5, 2, f.Var("w1", f.Op("normFloat", 5*2)))
|
|
|
+ w1 := f.Op("matNew", 3, 2, f.Var("w1", f.Op("normFloat", 3*2)))
|
|
|
+ // Layour 1 result
|
|
|
+ l1res := f.Op("matSigmoid", f.Op("matMul", w1, inp))
|
|
|
|
|
|
- l1op := f.Op("matSigmoid", f.Op("matMul", w1, inp))
|
|
|
+ // weights 2
|
|
|
+ w2 := f.Op("matNew", 2, 3, f.Var("w2", f.Op("normFloat", 2*3)))
|
|
|
|
|
|
- // 5 inputs 2 output
|
|
|
- w2 := f.Op("matNew", 2, 5, f.Var("w2", f.Op("normFloat", 2*5)))
|
|
|
+ // Layour 2 result
|
|
|
+ l2res := f.Op("matSigmoid", f.Op("matMul", w2, l1res))
|
|
|
|
|
|
- // Previous layer result
|
|
|
- l2op := f.Op("matSigmoid", f.Op("matMul", w2, l1op))
|
|
|
+ // Network output
|
|
|
+ netRes := l2res
|
|
|
+
|
|
|
+ //////////////////////
|
|
|
+ // Create trainer ops
|
|
|
+ /////////
|
|
|
+ // Backpropagation
|
|
|
+
|
|
|
+ log.Println("Grab error from output layer")
|
|
|
+ errOp := f.Op("matSub", inp, netRes)
|
|
|
+ log.Println(errOp.Process(sample))
|
|
|
+
|
|
|
+ log.Println("Calculate deltas")
|
|
|
+ deltaOp := f.Op("matMulElem", f.Op("matSigmoidD", l2res), errOp)
|
|
|
+ log.Println(deltaOp.Process(sample))
|
|
|
+
|
|
|
+ log.Println("Multiplying the following matrixes")
|
|
|
+ log.Println(l2res.Process(sample))
|
|
|
+ log.Println(deltaOp.Process(sample))
|
|
|
+ log.Println("Calculate changes to apply to hidden layer")
|
|
|
+ outChangesOp := f.Op("matMul", l2res, deltaOp)
|
|
|
+ log.Println(outChangesOp.Process(sample))
|
|
|
+
|
|
|
+ outChangesOp = f.Op("matScale", outChangesOp, learningRate)
|
|
|
+
|
|
|
+ train1 := f.SetVar("w2", f.Op("matAdd", outChangesOp, w1))
|
|
|
+ // Set Var w1
|
|
|
+ //
|
|
|
+ log.Println("Training 1")
|
|
|
+ log.Println(train1.Process(sample))
|
|
|
+
|
|
|
+ log.Println("deltaOutputLayer")
|
|
|
+ log.Println(deltaOp.Process(sample))
|
|
|
|
|
|
// Do we need this?
|
|
|
- netOp := f.Op("toFloatArr", l2op)
|
|
|
+ //netOp := f.Op("toFloatArr", l2op)
|
|
|
|
|
|
- netResI, _ := netOp.Process(sample)
|
|
|
+ /*netResI, _ := netOp.Process(sample)
|
|
|
res := netResI.([]float64)
|
|
|
log.Println("Network result:", res)
|
|
|
- log.Println("Loss", res[0]-sample[0], res[1]-sample[1])
|
|
|
+ log.Println("Error", sample[0]-res[0], sample[1]-res[1])*/
|
|
|
// Back propagation
|
|
|
|
|
|
//log.Println(layerRes.Process(samples[0]))
|