-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsynapse.go
238 lines (186 loc) · 9.64 KB
/
synapse.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
// Copyright (c) 2019, The Emergent Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package axon
import (
"fmt"
)
//gosl:start synapse
// SynapseVars are the neuron variables representing current synaptic state,
// specifically weights.
type SynapseVars int32 //enums:enum
const (
// Wt is effective synaptic weight value, determining how much conductance one spike drives on the receiving neuron, representing the actual number of effective AMPA receptors in the synapse. Wt = SWt * WtSig(LWt), where WtSig produces values between 0-2 based on LWt, centered on 1.
Wt SynapseVars = iota
// LWt is rapidly learning, linear weight value -- learns according to the lrate specified in the connection spec. Biologically, this represents the internal biochemical processes that drive the trafficking of AMPA receptors in the synaptic density. Initially all LWt are .5, which gives 1 from WtSig function.
LWt
// SWt is slowly adapting structural weight value, which acts as a multiplicative scaling factor on synaptic efficacy: biologically represents the physical size and efficacy of the dendritic spine. SWt values adapt in an outer loop along with synaptic scaling, with constraints to prevent runaway positive feedback loops and maintain variance and further capacity to learn. Initial variance is all in SWt, with LWt set to .5, and scaling absorbs some of LWt into SWt.
SWt
// DWt is delta (change in) synaptic weight, from learning -- updates LWt which then updates Wt.
DWt
// DSWt is change in SWt slow synaptic weight -- accumulates DWt
DSWt
// IMPORTANT: if DSWt is not the last, need to update gosl defn below
)
// SynapseCaVars are synapse variables for calcium involved in learning,
// which are data parallel input specific.
type SynapseCaVars int32 //enums:enum
const (
// Tr is trace of synaptic activity over time, which is used for
// credit assignment in learning.
// In MatrixPath this is a tag that is then updated later when US occurs.
Tr SynapseCaVars = iota
// DTr is delta (change in) Tr trace of synaptic activity over time
DTr
// DiDWt is delta weight for each data parallel index (Di).
// This is directly computed from the Ca values (in cortical version)
// and then aggregated into the overall DWt (which may be further
// integrated across MPI nodes), which then drives changes in Wt values.
DiDWt
// IMPORTANT: if DiDWt is not the last, need to update gosl defn below
)
// SynapseIndexes are the neuron indexes and other uint32 values (flags, etc).
// There is only one of these per neuron -- not data parallel.
type SynapseIndexes int32 //enums:enum
const (
// SynRecvIndex is receiving neuron index in network's global list of neurons
SynRecvIndex SynapseIndexes = iota
// SynSendIndex is sending neuron index in network's global list of neurons
SynSendIndex
// SynPathIndex is pathway index in global list of pathways organized as [Layers][RecvPaths]
SynPathIndex
// IMPORTANT: if SynPathIndex is not the last, need to update gosl defn below
)
//gosl:end synapse
//gosl:hlsl synapse
/*
static const SynapseVars SynapseVarsN = DSWt + 1;
static const SynapseCaVars SynapseCaVarsN = DiDWt + 1;
static const SynapseIndexes SynapseIndexesN = SynPathIndex + 1;
*/
//gosl:end synapse
//gosl:start synapse
////////////////////////////////////////////////
// Strides
// SynapseVarStrides encodes the stride offsets for synapse variable access
// into network float32 array.
type SynapseVarStrides struct {
// synapse level
Synapse uint32
// variable level
Var uint32
pad, pad1 uint32
}
// note: when increasing synapse var capacity beyond 2^31, change back to uint64
// Index returns the index into network float32 array for given synapse, and variable
func (ns *SynapseVarStrides) Index(synIndex uint32, nvar SynapseVars) uint32 {
// return uint64(synIndex)*uint64(ns.Synapse) + uint64(nvar)*uint64(ns.Var)
return synIndex*ns.Synapse + uint32(nvar)*ns.Var
}
// SetSynapseOuter sets strides with synapses as outer loop:
// [Synapses][Vars], which is optimal for CPU-based computation.
func (ns *SynapseVarStrides) SetSynapseOuter() {
ns.Synapse = uint32(SynapseVarsN)
ns.Var = 1
}
// SetVarOuter sets strides with vars as outer loop:
// [Vars][Synapses], which is optimal for GPU-based computation.
func (ns *SynapseVarStrides) SetVarOuter(nsyn int) {
ns.Var = uint32(nsyn)
ns.Synapse = 1
}
////////////////////////////////////////////////
// SynapseCaVars
// SynapseCaStrides encodes the stride offsets for synapse variable access
// into network float32 array. Data is always the inner-most variable.
type SynapseCaStrides struct {
// synapse level
Synapse uint64
// variable level
Var uint64
}
// Index returns the index into network float32 array for given synapse, data, and variable
func (ns *SynapseCaStrides) Index(synIndex, di uint32, nvar SynapseCaVars) uint64 {
return uint64(synIndex)*ns.Synapse + uint64(nvar)*ns.Var + uint64(di)
}
// SetSynapseOuter sets strides with synapses as outer loop:
// [Synapses][Vars][Data], which is optimal for CPU-based computation.
func (ns *SynapseCaStrides) SetSynapseOuter(ndata int) {
ns.Synapse = uint64(ndata) * uint64(SynapseCaVarsN)
ns.Var = uint64(ndata)
}
// SetVarOuter sets strides with vars as outer loop:
// [Vars][Synapses][Data], which is optimal for GPU-based computation.
func (ns *SynapseCaStrides) SetVarOuter(nsyn, ndata int) {
ns.Var = uint64(ndata) * uint64(nsyn)
ns.Synapse = uint64(ndata)
}
////////////////////////////////////////////////
// Indexes
// SynapseIndexStrides encodes the stride offsets for synapse index access
// into network uint32 array.
type SynapseIndexStrides struct {
// synapse level
Synapse uint32
// index value level
Idx uint32
pad, pad1 uint32
}
// Index returns the index into network uint32 array for given synapse, index value
func (ns *SynapseIndexStrides) Index(synIdx uint32, idx SynapseIndexes) uint32 {
return synIdx*ns.Synapse + uint32(idx)*ns.Idx
}
// SetSynapseOuter sets strides with synapses as outer dimension:
// [Synapses][Indexes] (outer to inner), which is optimal for CPU-based
// computation.
func (ns *SynapseIndexStrides) SetSynapseOuter() {
ns.Synapse = uint32(SynapseIndexesN)
ns.Idx = 1
}
// SetIndexOuter sets strides with indexes as outer dimension:
// [Indexes][Synapses] (outer to inner), which is optimal for GPU-based
// computation.
func (ns *SynapseIndexStrides) SetIndexOuter(nsyn int) {
ns.Idx = uint32(nsyn)
ns.Synapse = 1
}
//gosl:end synapse
// SynapseVarProps has all of the display properties for synapse variables, including desc tooltips
var SynapseVarProps = map[string]string{
"Wt ": `desc:"effective synaptic weight value, determining how much conductance one spike drives on the receiving neuron, representing the actual number of effective AMPA receptors in the synapse. Wt = SWt * WtSig(LWt), where WtSig produces values between 0-2 based on LWt, centered on 1."`,
"LWt": `desc:"rapidly learning, linear weight value -- learns according to the lrate specified in the connection spec. Biologically, this represents the internal biochemical processes that drive the trafficking of AMPA receptors in the synaptic density. Initially all LWt are .5, which gives 1 from WtSig function."`,
"SWt": `desc:"slowly adapting structural weight value, which acts as a multiplicative scaling factor on synaptic efficacy: biologically represents the physical size and efficacy of the dendritic spine. SWt values adapt in an outer loop along with synaptic scaling, with constraints to prevent runaway positive feedback loops and maintain variance and further capacity to learn. Initial variance is all in SWt, with LWt set to .5, and scaling absorbs some of LWt into SWt."`,
"DWt": `auto-scale:"+" desc:"delta (change in) synaptic weight, from learning -- updates LWt which then updates Wt."`,
"DSWt": `auto-scale:"+" desc:"change in SWt slow synaptic weight -- accumulates DWt"`,
"CaM": `auto-scale:"+" desc:"first stage running average (mean) Ca calcium level (like CaM = calmodulin), feeds into CaP"`,
"CaP": `auto-scale:"+"desc:"shorter timescale integrated CaM value, representing the plus, LTP direction of weight change and capturing the function of CaMKII in the Kinase learning rule"`,
"CaD": `auto-scale:"+" desc:"longer timescale integrated CaP value, representing the minus, LTD direction of weight change and capturing the function of DAPK1 in the Kinase learning rule"`,
"Tr": `auto-scale:"+" desc:"trace of synaptic activity over time -- used for credit assignment in learning. In MatrixPath this is a tag that is then updated later when US occurs."`,
"DTr": `auto-scale:"+" desc:"delta (change in) Tr trace of synaptic activity over time"`,
"DiDWt": `auto-scale:"+" desc:"delta weight for each data parallel index (Di) -- this is directly computed from the Ca values (in cortical version) and then aggregated into the overall DWt (which may be further integrated across MPI nodes), which then drives changes in Wt values"`,
}
var (
SynapseVarNames []string
SynapseVarsMap map[string]int
)
func init() {
SynapseVarsMap = make(map[string]int, int(SynapseVarsN)+int(SynapseCaVarsN))
for i := Wt; i < SynapseVarsN; i++ {
vnm := i.String()
SynapseVarNames = append(SynapseVarNames, vnm)
SynapseVarsMap[vnm] = int(i)
}
for i := Tr; i < SynapseCaVarsN; i++ {
vnm := i.String()
SynapseVarNames = append(SynapseVarNames, vnm)
SynapseVarsMap[vnm] = int(SynapseVarsN) + int(i)
}
}
// SynapseVarByName returns the index of the variable in the Synapse, or error
func SynapseVarByName(varNm string) (int, error) {
i, ok := SynapseVarsMap[varNm]
if !ok {
return -1, fmt.Errorf("Synapse VarByName: variable name: %s not valid", varNm)
}
return i, nil
}