diff --git a/axon/basic_test.go b/axon/basic_test.go index 32d60c1e..ec7f21ff 100644 --- a/axon/basic_test.go +++ b/axon/basic_test.go @@ -90,7 +90,7 @@ var ParamSets = params.Sets{ }, } -func newTestNet(ctx *Context, nData int) *Network { +func newTestNet(nData int) *Network { testNet := NewNetwork("testNet") testNet.SetRandSeed(42) // critical for ActAvg values testNet.SetMaxData(ctx, nData) @@ -107,8 +107,8 @@ func newTestNet(ctx *Context, nData int) *Network { testNet.Rubicon.SetNUSs(ctx, 4, 3) testNet.Rubicon.Defaults() - testNet.Build(ctx) - ctx.NData = uint32(nData) + testNet.Build() + testNet.Ctx.NData = uint32(nData) testNet.Defaults() testNet.ApplyParams(ParamSets["Base"], false) // false) // true) // no msg testNet.InitWeights(ctx) // get GScale here @@ -117,7 +117,7 @@ func newTestNet(ctx *Context, nData int) *Network { } // full connectivity -func newTestNetFull(ctx *Context, nData int) *Network { +func newTestNetFull(nData int) *Network { testNet := NewNetwork("testNetFull") testNet.SetRandSeed(42) // critical for ActAvg values testNet.SetMaxData(ctx, nData) @@ -143,7 +143,6 @@ func newTestNetFull(ctx *Context, nData int) *Network { func TestSynValues(t *testing.T) { tol := Tol8 - ctx := NewContext() testNet := newTestNet(ctx, 1) hidLay := testNet.LayerByName("Hidden") p, err := hidLay.RecvPathBySendName("Input") @@ -206,8 +205,6 @@ func TestSpikeProp(t *testing.T) { pt := net.ConnectLayers(inLay, hidLay, paths.NewOneToOne(), ForwardPath) - ctx := NewContext() - net.Build(ctx) net.Defaults() net.ApplyParams(ParamSets["Base"], false) @@ -269,8 +266,7 @@ func StructValues(obj any, vals map[string]float32, key string) { // TestInitWeights tests that initializing the weights results in same state func TestInitWeights(t *testing.T) { - nData := 4 - ctx := NewContext() + nData := 1 testNet := newTestNet(ctx, nData) inPats := newInPats() @@ -361,7 +357,6 @@ func TestGPUAct(t *testing.T) { // Note: use NetDebugAct for printf debugging of all values -- // "this is only a test" func NetActTest(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) testNet.InitExt(ctx) inPats := newInPats() @@ -557,7 +552,6 @@ func ReportValDiffs(t *testing.T, tolerance float32, va, vb map[string]float32, // and also returns a map of all values and variables that can be used for a more // fine-grained diff test, e.g., see the GPU version. func NetDebugAct(t *testing.T, printValues bool, gpu bool, nData int, initWts bool) map[string]float32 { - ctx := NewContext() testNet := newTestNet(ctx, nData) testNet.ApplyParams(ParamSets["FullDecay"], false) return RunDebugAct(t, ctx, testNet, printValues, gpu, initWts) @@ -684,7 +678,6 @@ func TestGPULearn(t *testing.T) { } func NetTestLearn(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) // fmt.Printf("synbanks: %d\n", ctx.NetIndexes.NSynCaBanks) @@ -854,7 +847,6 @@ func TestGPURLRate(t *testing.T) { } func NetTestRLRate(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) inPats := newInPats() inLay := testNet.LayerByName("Input") @@ -1016,7 +1008,6 @@ func NetTestRLRate(t *testing.T, tol float32, gpu bool) { // and also returns a map of all values and variables that can be used for a more // fine-grained diff test, e.g., see the GPU version. func NetDebugLearn(t *testing.T, printValues bool, gpu bool, maxData, nData int, initWts, submean, slowAdapt bool) map[string]float32 { - ctx := NewContext() var testNet *Network rand.Seed(1337) @@ -1251,7 +1242,6 @@ func TestGPUSynCa(t *testing.T) { if os.Getenv("TEST_GPU") != "true" { t.Skip("Set TEST_GPU env var to run GPU tests") } - ctx := NewContext() testNet := newTestNetFull(ctx, 16) _ = testNet // testNet.ConfigGPUnoGUI(ctx) @@ -1280,8 +1270,6 @@ func TestInhibAct(t *testing.T) { inhibNet.ConnectLayers(hidLay, outLay, one2one, ForwardPath) inhibNet.ConnectLayers(outLay, hidLay, one2one, BackPath) - ctx := NewContext() - inhibNet.Build(ctx) inhibNet.Defaults() inhibNet.ApplyParams(ParamSets["Base"], false) @@ -1408,7 +1396,6 @@ func saveToFile(net *Network, t *testing.T) { } func TestSendGatherIndexes(t *testing.T) { - ctx := NewContext() nData := uint32(3) net := newTestNet(ctx, int(nData)) diff --git a/axon/basic_test.goal b/axon/basic_test.goal index a55d9347..55e3da44 100644 --- a/axon/basic_test.goal +++ b/axon/basic_test.goal @@ -88,7 +88,7 @@ var ParamSets = params.Sets{ }, } -func newTestNet(ctx *Context, nData int) *Network { +func newTestNet(nData int) *Network { testNet := NewNetwork("testNet") testNet.SetRandSeed(42) // critical for ActAvg values testNet.SetMaxData(ctx, nData) @@ -105,8 +105,8 @@ func newTestNet(ctx *Context, nData int) *Network { testNet.Rubicon.SetNUSs(ctx, 4, 3) testNet.Rubicon.Defaults() - testNet.Build(ctx) - ctx.NData = uint32(nData) + testNet.Build() + testNet.Ctx.NData = uint32(nData) testNet.Defaults() testNet.ApplyParams(ParamSets["Base"], false) // false) // true) // no msg testNet.InitWeights(ctx) // get GScale here @@ -115,7 +115,7 @@ func newTestNet(ctx *Context, nData int) *Network { } // full connectivity -func newTestNetFull(ctx *Context, nData int) *Network { +func newTestNetFull(nData int) *Network { testNet := NewNetwork("testNetFull") testNet.SetRandSeed(42) // critical for ActAvg values testNet.SetMaxData(ctx, nData) @@ -141,7 +141,6 @@ func newTestNetFull(ctx *Context, nData int) *Network { func TestSynValues(t *testing.T) { tol := Tol8 - ctx := NewContext() testNet := newTestNet(ctx, 1) hidLay := testNet.LayerByName("Hidden") p, err := hidLay.RecvPathBySendName("Input") @@ -204,8 +203,6 @@ func TestSpikeProp(t *testing.T) { pt := net.ConnectLayers(inLay, hidLay, paths.NewOneToOne(), ForwardPath) - ctx := NewContext() - net.Build(ctx) net.Defaults() net.ApplyParams(ParamSets["Base"], false) @@ -267,8 +264,7 @@ func StructValues(obj any, vals map[string]float32, key string) { // TestInitWeights tests that initializing the weights results in same state func TestInitWeights(t *testing.T) { - nData := 4 - ctx := NewContext() + nData := 1 testNet := newTestNet(ctx, nData) inPats := newInPats() @@ -359,7 +355,6 @@ func TestGPUAct(t *testing.T) { // Note: use NetDebugAct for printf debugging of all values -- // "this is only a test" func NetActTest(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) testNet.InitExt(ctx) inPats := newInPats() @@ -555,7 +550,6 @@ func ReportValDiffs(t *testing.T, tolerance float32, va, vb map[string]float32, // and also returns a map of all values and variables that can be used for a more // fine-grained diff test, e.g., see the GPU version. func NetDebugAct(t *testing.T, printValues bool, gpu bool, nData int, initWts bool) map[string]float32 { - ctx := NewContext() testNet := newTestNet(ctx, nData) testNet.ApplyParams(ParamSets["FullDecay"], false) return RunDebugAct(t, ctx, testNet, printValues, gpu, initWts) @@ -682,7 +676,6 @@ func TestGPULearn(t *testing.T) { } func NetTestLearn(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) // fmt.Printf("synbanks: %d\n", ctx.NetIndexes.NSynCaBanks) @@ -852,7 +845,6 @@ func TestGPURLRate(t *testing.T) { } func NetTestRLRate(t *testing.T, tol float32, gpu bool) { - ctx := NewContext() testNet := newTestNet(ctx, 1) inPats := newInPats() inLay := testNet.LayerByName("Input") @@ -1014,7 +1006,6 @@ func NetTestRLRate(t *testing.T, tol float32, gpu bool) { // and also returns a map of all values and variables that can be used for a more // fine-grained diff test, e.g., see the GPU version. func NetDebugLearn(t *testing.T, printValues bool, gpu bool, maxData, nData int, initWts, submean, slowAdapt bool) map[string]float32 { - ctx := NewContext() var testNet *Network rand.Seed(1337) @@ -1249,7 +1240,6 @@ func TestGPUSynCa(t *testing.T) { if os.Getenv("TEST_GPU") != "true" { t.Skip("Set TEST_GPU env var to run GPU tests") } - ctx := NewContext() testNet := newTestNetFull(ctx, 16) _ = testNet // testNet.ConfigGPUnoGUI(ctx) @@ -1277,8 +1267,6 @@ func TestInhibAct(t *testing.T) { inhibNet.ConnectLayers(hidLay, outLay, one2one, ForwardPath) inhibNet.ConnectLayers(outLay, hidLay, one2one, BackPath) - ctx := NewContext() - inhibNet.Build(ctx) inhibNet.Defaults() inhibNet.ApplyParams(ParamSets["Base"], false) @@ -1405,7 +1393,6 @@ func saveToFile(net *Network, t *testing.T) { } func TestSendGatherIndexes(t *testing.T) { - ctx := NewContext() nData := uint32(3) net := newTestNet(ctx, int(nData)) diff --git a/axon/layerparams.go b/axon/layerparams.go index 643521f6..98fd9a35 100644 --- a/axon/layerparams.go +++ b/axon/layerparams.go @@ -8,6 +8,7 @@ package axon import ( "encoding/json" + "fmt" "cogentcore.org/core/math32" "github.com/emer/axon/v2/fsfffb" @@ -803,6 +804,7 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, lpi, ni, di uint32) { spksper := ctx.ThetaCycles / 8 bin := ctx.Cycle / spksper spk := Neurons.Value(int(Spike), int(ni), int(di)) + fmt.Println(ctx.Cycle, bin) switch bin { case 0: Neurons.SetAdd(spk, int(SpkBin0), int(ni), int(di)) diff --git a/axon/layerparams.goal b/axon/layerparams.goal index 599318c8..1eb0e44a 100644 --- a/axon/layerparams.goal +++ b/axon/layerparams.goal @@ -5,6 +5,7 @@ package axon import ( + "fmt" "encoding/json" "cogentcore.org/core/math32" @@ -801,6 +802,7 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, lpi, ni, di uint32) { spksper := ctx.ThetaCycles / 8 bin := ctx.Cycle / spksper spk := Neurons[Spike, ni, di] + fmt.Println(ctx.Cycle, bin) switch bin { case 0: Neurons[SpkBin0, ni, di] += spk diff --git a/axon/networkbase.go b/axon/networkbase.go index 6bcd0cb7..a25edcdc 100644 --- a/axon/networkbase.go +++ b/axon/networkbase.go @@ -698,6 +698,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add nix.RubiconNPosUSs = nt.Rubicon.NPosUSs nix.RubiconNNegUSs = nt.Rubicon.NNegUSs + fmt.Println("totPools", totPools) nt.LayParams = make([]LayerParams, nLayers) sltensor.SetShapeSizes(&nt.LayerStates, int(LayerVarsN), nLayers, maxData) sltensor.SetShapeSizes(&nt.Pools, int(PoolVarsN), totPools, maxData) @@ -734,7 +735,6 @@ func (nt *Network) Build(simCtx *Context) error { //types:add ly.NeurStIndex = uint32(neurIndex) ly.MaxData = uint32(maxData) np := ly.NumPools() + 1 - npd := np * maxData ly.NPools = uint32(np) ly.Params.Index = uint32(li) ly.Params.MaxData = uint32(maxData) @@ -755,7 +755,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add } for pi := 0; pi < np; pi++ { for di := 0; di < maxData; di++ { - nt.PoolsInt.Set(int32(li), int(PoolLayerIdx), int(pi), int(di)) + nt.PoolsInt.Set(int32(li), int(PoolLayerIdx), int(poolIndex+pi), int(di)) } } if ly.Type.IsExt() { @@ -788,7 +788,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add rpathIndex += len(rpaths) neurIndex += nn pathIndex += len(spaths) - poolIndex += npd + poolIndex += np } if totSynapses > math.MaxUint32 { log.Fatalf("ERROR: total number of synapses is greater than uint32 capacity\n") diff --git a/axon/networkbase.goal b/axon/networkbase.goal index 35d35bff..8e9a2afa 100644 --- a/axon/networkbase.goal +++ b/axon/networkbase.goal @@ -695,6 +695,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add nix.RubiconNPosUSs = nt.Rubicon.NPosUSs nix.RubiconNNegUSs = nt.Rubicon.NNegUSs + fmt.Println("totPools", totPools) nt.LayParams = make([]LayerParams, nLayers) sltensor.SetShapeSizes(&nt.LayerStates, int(LayerVarsN), nLayers, maxData) sltensor.SetShapeSizes(&nt.Pools, int(PoolVarsN), totPools, maxData) @@ -731,7 +732,6 @@ func (nt *Network) Build(simCtx *Context) error { //types:add ly.NeurStIndex = uint32(neurIndex) ly.MaxData = uint32(maxData) np := ly.NumPools() + 1 - npd := np * maxData ly.NPools = uint32(np) ly.Params.Index = uint32(li) ly.Params.MaxData = uint32(maxData) @@ -752,7 +752,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add } for pi := 0; pi < np; pi++ { for di := 0; di < maxData; di++ { - nt.PoolsInt[PoolLayerIdx, pi, di] = int32(li) + nt.PoolsInt[PoolLayerIdx, poolIndex + pi, di] = int32(li) } } if ly.Type.IsExt() { @@ -785,7 +785,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add rpathIndex += len(rpaths) neurIndex += nn pathIndex += len(spaths) - poolIndex += npd + poolIndex += np } if totSynapses > math.MaxUint32 { log.Fatalf("ERROR: total number of synapses is greater than uint32 capacity\n") diff --git a/axon/pool.go b/axon/pool.go index f8c0a9da..01b0b619 100644 --- a/axon/pool.go +++ b/axon/pool.go @@ -9,6 +9,7 @@ package axon import ( "cogentcore.org/core/base/atomicx" "cogentcore.org/core/math32" + "fmt" "github.com/emer/axon/v2/fsfffb" "log" "sync/atomic" @@ -280,6 +281,7 @@ func PoolPoolGi(ctx *Context, pi, di uint32) { return } li := PoolsInt.Value(int(PoolLayerIdx), int(pi), int(di)) + fmt.Println(li, pi, di) PoolAvgMaxCalc(pi, di) PoolInhibIntToRaw(pi, di) ly := GetLayers(uint32(li)) diff --git a/axon/pool.goal b/axon/pool.goal index 115a54a4..c3833ea2 100644 --- a/axon/pool.goal +++ b/axon/pool.goal @@ -5,6 +5,7 @@ package axon import ( + "fmt" "log" "sync/atomic" "cogentcore.org/core/base/atomicx" @@ -278,6 +279,7 @@ func PoolPoolGi(ctx *Context, pi, di uint32) { return } li := PoolsInt[PoolLayerIdx, pi, di] + fmt.Println(li, pi, di) PoolAvgMaxCalc(pi, di) PoolInhibIntToRaw(pi, di) ly := GetLayers(uint32(li))