Skip to content

Commit

Permalink
TestInitWeights is working except context needs to be the same; need …
Browse files Browse the repository at this point in the history
…to revisit.
  • Loading branch information
rcoreilly committed Nov 3, 2024
1 parent bdf15d7 commit 1b59abb
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 42 deletions.
23 changes: 5 additions & 18 deletions axon/basic_test.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

23 changes: 5 additions & 18 deletions axon/basic_test.goal
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ var ParamSets = params.Sets{
},
}

func newTestNet(ctx *Context, nData int) *Network {
func newTestNet(nData int) *Network {
testNet := NewNetwork("testNet")
testNet.SetRandSeed(42) // critical for ActAvg values
testNet.SetMaxData(ctx, nData)
Expand All @@ -105,8 +105,8 @@ func newTestNet(ctx *Context, nData int) *Network {
testNet.Rubicon.SetNUSs(ctx, 4, 3)
testNet.Rubicon.Defaults()

testNet.Build(ctx)
ctx.NData = uint32(nData)
testNet.Build()
testNet.Ctx.NData = uint32(nData)
testNet.Defaults()
testNet.ApplyParams(ParamSets["Base"], false) // false) // true) // no msg
testNet.InitWeights(ctx) // get GScale here
Expand All @@ -115,7 +115,7 @@ func newTestNet(ctx *Context, nData int) *Network {
}

// full connectivity
func newTestNetFull(ctx *Context, nData int) *Network {
func newTestNetFull(nData int) *Network {
testNet := NewNetwork("testNetFull")
testNet.SetRandSeed(42) // critical for ActAvg values
testNet.SetMaxData(ctx, nData)
Expand All @@ -141,7 +141,6 @@ func newTestNetFull(ctx *Context, nData int) *Network {

func TestSynValues(t *testing.T) {
tol := Tol8
ctx := NewContext()
testNet := newTestNet(ctx, 1)
hidLay := testNet.LayerByName("Hidden")
p, err := hidLay.RecvPathBySendName("Input")
Expand Down Expand Up @@ -204,8 +203,6 @@ func TestSpikeProp(t *testing.T) {

pt := net.ConnectLayers(inLay, hidLay, paths.NewOneToOne(), ForwardPath)

ctx := NewContext()

net.Build(ctx)
net.Defaults()
net.ApplyParams(ParamSets["Base"], false)
Expand Down Expand Up @@ -267,8 +264,7 @@ func StructValues(obj any, vals map[string]float32, key string) {

// TestInitWeights tests that initializing the weights results in same state
func TestInitWeights(t *testing.T) {
nData := 4
ctx := NewContext()
nData := 1
testNet := newTestNet(ctx, nData)
inPats := newInPats()

Expand Down Expand Up @@ -359,7 +355,6 @@ func TestGPUAct(t *testing.T) {
// Note: use NetDebugAct for printf debugging of all values --
// "this is only a test"
func NetActTest(t *testing.T, tol float32, gpu bool) {
ctx := NewContext()
testNet := newTestNet(ctx, 1)
testNet.InitExt(ctx)
inPats := newInPats()
Expand Down Expand Up @@ -555,7 +550,6 @@ func ReportValDiffs(t *testing.T, tolerance float32, va, vb map[string]float32,
// and also returns a map of all values and variables that can be used for a more
// fine-grained diff test, e.g., see the GPU version.
func NetDebugAct(t *testing.T, printValues bool, gpu bool, nData int, initWts bool) map[string]float32 {
ctx := NewContext()
testNet := newTestNet(ctx, nData)
testNet.ApplyParams(ParamSets["FullDecay"], false)
return RunDebugAct(t, ctx, testNet, printValues, gpu, initWts)
Expand Down Expand Up @@ -682,7 +676,6 @@ func TestGPULearn(t *testing.T) {
}

func NetTestLearn(t *testing.T, tol float32, gpu bool) {
ctx := NewContext()
testNet := newTestNet(ctx, 1)

// fmt.Printf("synbanks: %d\n", ctx.NetIndexes.NSynCaBanks)
Expand Down Expand Up @@ -852,7 +845,6 @@ func TestGPURLRate(t *testing.T) {
}

func NetTestRLRate(t *testing.T, tol float32, gpu bool) {
ctx := NewContext()
testNet := newTestNet(ctx, 1)
inPats := newInPats()
inLay := testNet.LayerByName("Input")
Expand Down Expand Up @@ -1014,7 +1006,6 @@ func NetTestRLRate(t *testing.T, tol float32, gpu bool) {
// and also returns a map of all values and variables that can be used for a more
// fine-grained diff test, e.g., see the GPU version.
func NetDebugLearn(t *testing.T, printValues bool, gpu bool, maxData, nData int, initWts, submean, slowAdapt bool) map[string]float32 {
ctx := NewContext()
var testNet *Network
rand.Seed(1337)

Expand Down Expand Up @@ -1249,7 +1240,6 @@ func TestGPUSynCa(t *testing.T) {
if os.Getenv("TEST_GPU") != "true" {
t.Skip("Set TEST_GPU env var to run GPU tests")
}
ctx := NewContext()
testNet := newTestNetFull(ctx, 16)
_ = testNet
// testNet.ConfigGPUnoGUI(ctx)
Expand Down Expand Up @@ -1277,8 +1267,6 @@ func TestInhibAct(t *testing.T) {
inhibNet.ConnectLayers(hidLay, outLay, one2one, ForwardPath)
inhibNet.ConnectLayers(outLay, hidLay, one2one, BackPath)

ctx := NewContext()

inhibNet.Build(ctx)
inhibNet.Defaults()
inhibNet.ApplyParams(ParamSets["Base"], false)
Expand Down Expand Up @@ -1405,7 +1393,6 @@ func saveToFile(net *Network, t *testing.T) {
}

func TestSendGatherIndexes(t *testing.T) {
ctx := NewContext()
nData := uint32(3)
net := newTestNet(ctx, int(nData))

Expand Down
2 changes: 2 additions & 0 deletions axon/layerparams.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions axon/layerparams.goal
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
package axon

import (
"fmt"
"encoding/json"

"cogentcore.org/core/math32"
Expand Down Expand Up @@ -801,6 +802,7 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, lpi, ni, di uint32) {
spksper := ctx.ThetaCycles / 8
bin := ctx.Cycle / spksper
spk := Neurons[Spike, ni, di]
fmt.Println(ctx.Cycle, bin)
switch bin {
case 0:
Neurons[SpkBin0, ni, di] += spk
Expand Down
6 changes: 3 additions & 3 deletions axon/networkbase.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions axon/networkbase.goal
Original file line number Diff line number Diff line change
Expand Up @@ -695,6 +695,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
nix.RubiconNPosUSs = nt.Rubicon.NPosUSs
nix.RubiconNNegUSs = nt.Rubicon.NNegUSs

fmt.Println("totPools", totPools)
nt.LayParams = make([]LayerParams, nLayers)
sltensor.SetShapeSizes(&nt.LayerStates, int(LayerVarsN), nLayers, maxData)
sltensor.SetShapeSizes(&nt.Pools, int(PoolVarsN), totPools, maxData)
Expand Down Expand Up @@ -731,7 +732,6 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
ly.NeurStIndex = uint32(neurIndex)
ly.MaxData = uint32(maxData)
np := ly.NumPools() + 1
npd := np * maxData
ly.NPools = uint32(np)
ly.Params.Index = uint32(li)
ly.Params.MaxData = uint32(maxData)
Expand All @@ -752,7 +752,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
}
for pi := 0; pi < np; pi++ {
for di := 0; di < maxData; di++ {
nt.PoolsInt[PoolLayerIdx, pi, di] = int32(li)
nt.PoolsInt[PoolLayerIdx, poolIndex + pi, di] = int32(li)
}
}
if ly.Type.IsExt() {
Expand Down Expand Up @@ -785,7 +785,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
rpathIndex += len(rpaths)
neurIndex += nn
pathIndex += len(spaths)
poolIndex += npd
poolIndex += np
}
if totSynapses > math.MaxUint32 {
log.Fatalf("ERROR: total number of synapses is greater than uint32 capacity\n")
Expand Down
2 changes: 2 additions & 0 deletions axon/pool.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions axon/pool.goal
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
package axon

import (
"fmt"
"log"
"sync/atomic"
"cogentcore.org/core/base/atomicx"
Expand Down Expand Up @@ -278,6 +279,7 @@ func PoolPoolGi(ctx *Context, pi, di uint32) {
return
}
li := PoolsInt[PoolLayerIdx, pi, di]
fmt.Println(li, pi, di)
PoolAvgMaxCalc(pi, di)
PoolInhibIntToRaw(pi, di)
ly := GetLayers(uint32(li))
Expand Down

0 comments on commit 1b59abb

Please sign in to comment.