|
@@ -26,9 +26,10 @@ func main() {
|
|
// Try binary operations first
|
|
// Try binary operations first
|
|
// 2 inputs 1 output
|
|
// 2 inputs 1 output
|
|
// Load minst somehow 28x28 784
|
|
// Load minst somehow 28x28 784
|
|
- nSample := 1
|
|
|
|
|
|
+ nSample := 2
|
|
|
|
|
|
learningRate := float64(1.0)
|
|
learningRate := float64(1.0)
|
|
|
|
+ _ = learningRate
|
|
// Load minst into floats array
|
|
// Load minst into floats array
|
|
|
|
|
|
// Make a matrix from the weights variable
|
|
// Make a matrix from the weights variable
|
|
@@ -37,24 +38,39 @@ func main() {
|
|
// Layer 1
|
|
// Layer 1
|
|
//
|
|
//
|
|
sample := samples[nSample]
|
|
sample := samples[nSample]
|
|
|
|
+ label := labels[nSample] // Label output
|
|
|
|
+
|
|
|
|
+ process := func(op flow.Operation) flow.Data { // just an helper
|
|
|
|
+ res, err := op.Process(sample, []float64{label})
|
|
|
|
+ if err != nil {
|
|
|
|
+ panic(err)
|
|
|
|
+ }
|
|
|
|
+ return res
|
|
|
|
+ }
|
|
|
|
|
|
// Define input
|
|
// Define input
|
|
// Make a matrix out of the input
|
|
// Make a matrix out of the input
|
|
- inp := f.Op("matNew", 2, 1, f.In(0))
|
|
|
|
|
|
+ input := f.Op("matNew", 2, 1, f.In(0))
|
|
|
|
+ log.Println("inMat:", process(input))
|
|
|
|
+
|
|
|
|
+ output := f.Op("matNew", 1, 1, f.In(1))
|
|
|
|
+ log.Println("inRes:", process(output))
|
|
|
|
|
|
- // Layer1 weights
|
|
|
|
|
|
+ // Layer1 weights 2 in 3 out
|
|
w1 := f.Op("matNew", 3, 2, f.Var("w1", f.Op("normFloat", 3*2)))
|
|
w1 := f.Op("matNew", 3, 2, f.Var("w1", f.Op("normFloat", 3*2)))
|
|
|
|
+ log.Println("w1 weights:", process(w1))
|
|
// Layour 1 result
|
|
// Layour 1 result
|
|
- l1res := f.Op("matSigmoid", f.Op("matMul", w1, inp))
|
|
|
|
|
|
+ l1res := f.Op("matSigmoid", f.Op("matMul", w1, input))
|
|
|
|
|
|
- // weights 2
|
|
|
|
- w2 := f.Op("matNew", 2, 3, f.Var("w2", f.Op("normFloat", 2*3)))
|
|
|
|
|
|
+ // weights 2 // 3 in 1 out
|
|
|
|
+ w2 := f.Op("matNew", 1, 3, f.Var("w2", f.Op("normFloat", 1*3)))
|
|
|
|
+ log.Println("w2 weights:", process(w2))
|
|
|
|
|
|
// Layour 2 result
|
|
// Layour 2 result
|
|
l2res := f.Op("matSigmoid", f.Op("matMul", w2, l1res))
|
|
l2res := f.Op("matSigmoid", f.Op("matMul", w2, l1res))
|
|
|
|
|
|
|
|
+ log.Println("Layer2 result:", process(l2res))
|
|
// Network output
|
|
// Network output
|
|
- netRes := l2res
|
|
|
|
|
|
|
|
//////////////////////
|
|
//////////////////////
|
|
// Create trainer ops
|
|
// Create trainer ops
|
|
@@ -62,33 +78,30 @@ func main() {
|
|
// Backpropagation
|
|
// Backpropagation
|
|
|
|
|
|
log.Println("Grab error from output layer")
|
|
log.Println("Grab error from output layer")
|
|
- errOp := f.Op("matSub", inp, netRes)
|
|
|
|
- log.Println(errOp.Process(sample))
|
|
|
|
|
|
+ errOp := f.Op("matSub", output, l2res)
|
|
|
|
+ log.Println("ErrorOp:", process(errOp))
|
|
|
|
|
|
log.Println("Calculate deltas")
|
|
log.Println("Calculate deltas")
|
|
deltaOp := f.Op("matMulElem", f.Op("matSigmoidD", l2res), errOp)
|
|
deltaOp := f.Op("matMulElem", f.Op("matSigmoidD", l2res), errOp)
|
|
- log.Println(deltaOp.Process(sample))
|
|
|
|
|
|
+ log.Println("Delta:", process(deltaOp))
|
|
|
|
|
|
- log.Println("Multiplying the following matrixes")
|
|
|
|
- log.Println(l2res.Process(sample))
|
|
|
|
- log.Println(deltaOp.Process(sample))
|
|
|
|
- log.Println("Calculate changes to apply to hidden layer")
|
|
|
|
outChangesOp := f.Op("matMul", l2res, deltaOp)
|
|
outChangesOp := f.Op("matMul", l2res, deltaOp)
|
|
- log.Println(outChangesOp.Process(sample))
|
|
|
|
|
|
+ log.Println("Changes to be made", process(outChangesOp))
|
|
|
|
|
|
- outChangesOp = f.Op("matScale", outChangesOp, learningRate)
|
|
|
|
|
|
+ /*
|
|
|
|
+ outChangesOp = f.Op("matScale", outChangesOp, learningRate)
|
|
|
|
|
|
- train1 := f.SetVar("w2", f.Op("matAdd", outChangesOp, w1))
|
|
|
|
- // Set Var w1
|
|
|
|
- //
|
|
|
|
- log.Println("Training 1")
|
|
|
|
- log.Println(train1.Process(sample))
|
|
|
|
|
|
+ train1 := f.SetVar("w2", f.Op("matAdd", outChangesOp, w1))
|
|
|
|
+ // Set Var w1
|
|
|
|
+ //
|
|
|
|
+ log.Println("Training 1")
|
|
|
|
+ log.Println(train1.Process(sample))
|
|
|
|
|
|
- log.Println("deltaOutputLayer")
|
|
|
|
- log.Println(deltaOp.Process(sample))
|
|
|
|
|
|
+ log.Println("deltaOutputLayer")
|
|
|
|
+ log.Println(deltaOp.Process(sample))
|
|
|
|
|
|
- // Do we need this?
|
|
|
|
- //netOp := f.Op("toFloatArr", l2op)
|
|
|
|
|
|
+ // Do we need this?
|
|
|
|
+ //netOp := f.Op("toFloatArr", l2op)*/
|
|
|
|
|
|
/*netResI, _ := netOp.Process(sample)
|
|
/*netResI, _ := netOp.Process(sample)
|
|
res := netResI.([]float64)
|
|
res := netResI.([]float64)
|