Skip to content

Commit

Permalink
trying 8 spike bins instead of 4
Browse files Browse the repository at this point in the history
  • Loading branch information
rcoreilly committed Jun 22, 2024
1 parent 7b63b37 commit c18299f
Show file tree
Hide file tree
Showing 20 changed files with 98 additions and 26 deletions.
10 changes: 5 additions & 5 deletions axon/enumgen.go

Large diffs are not rendered by default.

16 changes: 14 additions & 2 deletions axon/layerparams.go
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,7 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, ni, di uint32, lpl *Pool) {
SetNrnV(ctx, ni, di, SpkMax, spkmax)
}
}
spksper := ctx.ThetaCycles / 4
spksper := ctx.ThetaCycles / 8
bin := ctx.Cycle / spksper
spk := NrnV(ctx, ni, di, Spike)
switch bin {
Expand All @@ -725,8 +725,16 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, ni, di uint32, lpl *Pool) {
AddNrnV(ctx, ni, di, SpkBin1, spk)
case 2:
AddNrnV(ctx, ni, di, SpkBin2, spk)
default:
case 3:
AddNrnV(ctx, ni, di, SpkBin3, spk)
case 4:
AddNrnV(ctx, ni, di, SpkBin4, spk)
case 5:
AddNrnV(ctx, ni, di, SpkBin5, spk)
case 6:
AddNrnV(ctx, ni, di, SpkBin6, spk)
default:
AddNrnV(ctx, ni, di, SpkBin7, spk)
}
}

Expand Down Expand Up @@ -990,6 +998,10 @@ func (ly *LayerParams) NewStateNeuron(ctx *Context, ni, di uint32, vals *LayerVa
SetNrnV(ctx, ni, di, SpkBin1, 0)
SetNrnV(ctx, ni, di, SpkBin2, 0)
SetNrnV(ctx, ni, di, SpkBin3, 0)
SetNrnV(ctx, ni, di, SpkBin4, 0)
SetNrnV(ctx, ni, di, SpkBin5, 0)
SetNrnV(ctx, ni, di, SpkBin6, 0)
SetNrnV(ctx, ni, di, SpkBin7, 0)

ly.Acts.DecayState(ctx, ni, di, ly.Acts.Decay.Act, ly.Acts.Decay.Glong, ly.Acts.Decay.AHP)
// Note: synapse-level Ca decay happens in DWt
Expand Down
4 changes: 4 additions & 0 deletions axon/neuron.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,10 @@ const (
SpkBin1
SpkBin2
SpkBin3
SpkBin4
SpkBin5
SpkBin6
SpkBin7

// SpkPrv is final CaSpkD activation state at end of previous theta cycle. used for specialized learning mechanisms that operate on delayed sending activations.
SpkPrv
Expand Down
14 changes: 13 additions & 1 deletion axon/pathparams.go
Original file line number Diff line number Diff line change
Expand Up @@ -313,13 +313,25 @@ func (pj *PathParams) SynCa(ctx *Context, si, ri, di uint32, syCaP, syCaD *float
sb2 := NrnV(ctx, si, di, SpkBin2)
rb3 := NrnV(ctx, ri, di, SpkBin3)
sb3 := NrnV(ctx, si, di, SpkBin3)
rb4 := NrnV(ctx, ri, di, SpkBin4)
sb4 := NrnV(ctx, si, di, SpkBin4)
rb5 := NrnV(ctx, ri, di, SpkBin5)
sb5 := NrnV(ctx, si, di, SpkBin5)
rb6 := NrnV(ctx, ri, di, SpkBin6)
sb6 := NrnV(ctx, si, di, SpkBin6)
rb7 := NrnV(ctx, ri, di, SpkBin7)
sb7 := NrnV(ctx, si, di, SpkBin7)

b0 := 0.1 * (rb0 * sb0)
b1 := 0.1 * (rb1 * sb1)
b2 := 0.1 * (rb2 * sb2)
b3 := 0.1 * (rb3 * sb3)
b4 := 0.1 * (rb4 * sb4)
b5 := 0.1 * (rb5 * sb5)
b6 := 0.1 * (rb6 * sb6)
b7 := 0.1 * (rb7 * sb7)

pj.Learn.KinaseCa.FinalCa(b0, b1, b2, b3, syCaP, syCaD)
pj.Learn.KinaseCa.FinalCa(b0, b1, b2, b3, b4, b5, b6, b7, syCaP, syCaD)
}

// DWtSynCortex computes the weight change (learning) at given synapse for cortex.
Expand Down
Binary file modified axon/shaders/gpu_applyext.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_cycle.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_dwt.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_dwtfmdi.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_dwtsubmean.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_gather.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_newstate_neuron.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_newstate_pool.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_plusneuron.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_plusstart.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_postspike.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_sendspike.spv
Binary file not shown.
Binary file modified axon/shaders/gpu_wtfmdwt.spv
Binary file not shown.
4 changes: 2 additions & 2 deletions examples/deep_fsa/params.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ var ParamSets = netparams.Sets{
"Layer.Inhib.Layer.FB": "1",
"Layer.Acts.Dend.SSGi": "0", // 0 > higher -- kills nmda maint!
"Layer.CT.GeGain": "2.0", // 2.0 > 1.5 for sure
"Layer.CT.DecayTau": "70", // 50 > 30 -- 30 ok but takes a bit to get going
"Layer.CT.DecayTau": "80", // 50 > 30 -- 30 ok but takes a bit to get going
"Layer.Acts.Decay.Act": "0.0",
"Layer.Acts.Decay.Glong": "0.0",
"Layer.Acts.GabaB.Gbar": "0.015", // 0.015 def > 0.01
"Layer.Acts.MaintNMDA.Gbar": "0.007", // 0.007 best, but 0.01 > lower if reg nmda weak
"Layer.Acts.MaintNMDA.Tau": "250", // 200 > 100 > 300
"Layer.Acts.MaintNMDA.Tau": "200", // 200 > 100 > 300
"Layer.Acts.NMDA.Gbar": "0.007", // 0.007 matching maint best
"Layer.Acts.NMDA.Tau": "200", // 200 > 100
"Layer.Learn.TrgAvgAct.SynScaleRate": "0.005", // 0.005 > 0.0002 (much worse)
Expand Down
19 changes: 14 additions & 5 deletions kinase/linear.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ type Linear struct {
PlusCycles int `default:"50"`

// CyclesPerBin specifies the bin size for accumulating spikes
CyclesPerBin int `default:"25"`
CyclesPerBin int `edit:"-"`

// NumBins = NCycles / CyclesPerBin
NumBins int `edit:"-"`
NumBins int `default:"8"`

// MaxHz is the maximum firing rate to sample in minus, plus phases
MaxHz int `default:"120"`
Expand Down Expand Up @@ -79,13 +79,14 @@ func (ls *Linear) Defaults() {
ls.MaxHz = 100
ls.StepHz = 10 // note: 5 gives same results
ls.NTrials = 2 // 20 "
ls.NumBins = 8
ls.Update()
}

func (ls *Linear) Update() {
ls.CyclesPerBin = ls.NCycles / ls.NumBins
ls.Neuron.Update()
ls.Synapse.Update()
ls.NumBins = ls.NCycles / ls.CyclesPerBin
nhz := ls.MaxHz / ls.StepHz
ls.TotalTrials = nhz * nhz * nhz * nhz * ls.NTrials
ls.SpikeBins = make([]float32, ls.NumBins)
Expand Down Expand Up @@ -307,9 +308,17 @@ func (ls *Linear) Regress() {
r.StopTolerance = 0.00001
r.ZeroOffset = true

// NBins = 4
// r.Coeff.Values = []float64{
// 0.05, 0.25, 0.5, 0.6, 0, // linear progression
// 0.25, 0.5, 0.5, 0.25, 0} // hump in the middle

// NBins = 8
r.Coeff.Values = []float64{
0.05, 0.25, 0.5, 0.6, 0, // linear progression
0.25, 0.5, 0.5, 0.25, 0} // hump in the middle
0.3, 0.4, 0.55, 0.65, 0.75, 0.85, 1.0, 1.0, 0, // linear progression
0.5, 0.65, 0.75, 0.9, 0.9, 0.9, 0.65, 0.55, .0} // hump in the middle

fmt.Println(r.Coeffs())

r.Run()

Expand Down
57 changes: 46 additions & 11 deletions kinase/params.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,44 +157,79 @@ func (kp *SynCaParams) FromCa(ca float32, caM, caP, caD *float32) {
kp.Dt.FromCa(kp.CaScale*ca, caM, caP, caD)
}

// BinWeights are coefficients for computing Ca based on binned
// BinWeights4 are 4 coefficients for computing Ca based on binned
// spike counts, for linear regression computation.
type BinWeights struct { //types:add
type BinWeights4 struct { //types:add
Bin0, Bin1, Bin2, Bin3 float32
}

func (bw *BinWeights) Init(b0, b1, b2, b3 float32) {
func (bw *BinWeights4) Init(b0, b1, b2, b3 float32) {
bw.Bin0 = b0
bw.Bin1 = b1
bw.Bin2 = b2
bw.Bin3 = b3
}

// Product returns product of weights times bin values
func (bw *BinWeights) Product(b0, b1, b2, b3 float32) float32 {
func (bw *BinWeights4) Product(b0, b1, b2, b3 float32) float32 {
return bw.Bin0*b0 + bw.Bin1*b1 + bw.Bin2*b2 + bw.Bin3*b3
}

// BinWeights8 are 8 coefficients for computing Ca based on binned
// spike counts, for linear regression computation.
type BinWeights8 struct { //types:add
Bin0, Bin1, Bin2, Bin3, Bin4, Bin5, Bin6, Bin7 float32
}

func (bw *BinWeights8) Init(b0, b1, b2, b3, b4, b5, b6, b7 float32) {
bw.Bin0 = b0
bw.Bin1 = b1
bw.Bin2 = b2
bw.Bin3 = b3
bw.Bin4 = b4
bw.Bin5 = b5
bw.Bin6 = b6
bw.Bin7 = b7
}

// Product returns product of weights times bin values
func (bw *BinWeights8) Product(b0, b1, b2, b3, b4, b5, b6, b7 float32) float32 {
return bw.Bin0*b0 + bw.Bin1*b1 + bw.Bin2*b2 + bw.Bin3*b3 + bw.Bin4*b4 + bw.Bin5*b5 + bw.Bin6*b6 + bw.Bin7*b7
}

// SynCaLinear computes synaptic calcium using linear equations from
// cascading Ca integration, including final CaP = CaMKII and CaD = DAPK1
// timescales for LTP potentiation vs. LTD depression factors.
type SynCaLinear struct { //types:add
CaP BinWeights `display:"inline"`
CaD BinWeights `display:"inline"`
CaP BinWeights8 `display:"inline"`
CaD BinWeights8 `display:"inline"`

// CaGain is extra multiplier for Synaptic Ca
CaGain float32 `default:"1"`
pad, pad1, pad2 float32
}

func (kp *SynCaLinear) Defaults() {
kp.CaP.Init(0.07, 0.3, 0.5, 0.6) // linear progression
kp.CaD.Init(0.25, 0.5, 0.5, 0.3) // up and down
// kp.CaP.Init(0.07, 0.3, 0.5, 0.6) // linear progression
// kp.CaD.Init(0.25, 0.5, 0.5, 0.3) // up and down
kp.CaP.Init(0.3, 0.4, 0.55, 0.65, 0.75, 0.85, 1.0, 1.0) // linear progression
kp.CaD.Init(0.5, 0.65, 0.75, 0.9, 0.9, 0.9, 0.65, 0.55) // up and down
kp.CaGain = 1
}

func (kp *SynCaLinear) Update() {
}

// // FinalCa4 uses a linear regression to compute the final Ca values
// func (kp *SynCaLinear) FinalCa4(b0, b1, b2, b3 float32, caP, caD *float32) {
// *caP = kp.CaP.Product(b0, b1, b2, b3)
// *caD = kp.CaD.Product(b0, b1, b2, b3)
// }

// FinalCa uses a linear regression to compute the final Ca values
func (kp *SynCaLinear) FinalCa(b0, b1, b2, b3 float32, caP, caD *float32) {
*caP = kp.CaP.Product(b0, b1, b2, b3)
*caD = kp.CaD.Product(b0, b1, b2, b3)
func (kp *SynCaLinear) FinalCa(b0, b1, b2, b3, b4, b5, b6, b7 float32, caP, caD *float32) {
*caP = kp.CaGain * kp.CaP.Product(b0, b1, b2, b3, b4, b5, b6, b7)
*caD = kp.CaGain * kp.CaD.Product(b0, b1, b2, b3, b4, b5, b6, b7)
}

//gosl:end kinase

0 comments on commit c18299f

Please sign in to comment.