From c16f15d043fa331e21a86e90fde1d1b7ebd90d08 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 9 Aug 2024 13:34:14 -0700 Subject: [PATCH 01/10] params, emer.Layer rewrite progress --- emer/README.md | 12 +- emer/enumgen.go | 43 --- emer/layer.go | 650 +++++++++++++++++++++------------ emer/laynames.go | 7 +- emer/netparams.go | 9 +- emer/params.go | 354 ------------------ emer/typegen.go | 10 +- go.mod | 4 +- go.sum | 8 +- params/README.md | 45 +-- params/apply.go | 26 +- params/diff.go | 60 +-- params/flex.go | 14 +- params/io.go | 588 +---------------------------- params/params.go | 159 +------- params/params_test.go | 228 +++++------- params/styler.go | 32 +- params/tweak_test.go | 71 ++-- params/typegen.go | 8 +- relpos/rel.go | 108 +++--- weights/json.go | 5 +- weights/{wts.go => weights.go} | 0 22 files changed, 707 insertions(+), 1734 deletions(-) delete mode 100644 emer/params.go rename weights/{wts.go => weights.go} (100%) diff --git a/emer/README.md b/emer/README.md index 23509b00..98dc2324 100644 --- a/emer/README.md +++ b/emer/README.md @@ -1,15 +1,11 @@ Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/v2/emer) -Package emer provides minimal interfaces for the basic structural elements of neural networks -including: +Package emer provides minimal interfaces for the basic structural elements of neural networks including: * emer.Network, emer.Layer, emer.Unit, emer.Path (pathway that interconnects layers) -These interfaces are intended to be just sufficient to support visualization and generic -analysis kinds of functions, but explicitly avoid exposing ANY of the algorithmic aspects, -so that those can be purely encoded in the implementation structs. +These interfaces are intended to be just sufficient to support visualization and generic analysis kinds of functions, but explicitly avoid exposing ANY of the algorithmic aspects, so that those can be purely encoded in the implementation structs. -At this point, given the extra complexity it would require, these interfaces do not support -the ability to build or modify networks. +At this point, given the extra complexity it would require, these interfaces do not support the ability to build or modify networks. -Also added support for managing parameters in the `emer.Params` object, which handles standard parameter set logic and support for applying to networks, and the new `NetSize` map for configuring network size. +Also added support for managing parameters in the `emer.Params` object, which handles standard parameter set logic and support for applying to networks, and the `NetSize` map for configuring network size. diff --git a/emer/enumgen.go b/emer/enumgen.go index 4230ba2e..791f2cb4 100644 --- a/emer/enumgen.go +++ b/emer/enumgen.go @@ -6,49 +6,6 @@ import ( "cogentcore.org/core/enums" ) -var _LayerTypeValues = []LayerType{0, 1, 2, 3} - -// LayerTypeN is the highest valid value for type LayerType, plus one. -const LayerTypeN LayerType = 4 - -var _LayerTypeValueMap = map[string]LayerType{`Hidden`: 0, `Input`: 1, `Target`: 2, `Compare`: 3} - -var _LayerTypeDescMap = map[LayerType]string{0: `Hidden is an internal representational layer that does not receive direct input / targets`, 1: `Input is a layer that receives direct external input in its Ext inputs`, 2: `Target is a layer that receives direct external target inputs used for driving plus-phase learning`, 3: `Compare is a layer that receives external comparison inputs, which drive statistics but do NOT drive activation or learning directly`} - -var _LayerTypeMap = map[LayerType]string{0: `Hidden`, 1: `Input`, 2: `Target`, 3: `Compare`} - -// String returns the string representation of this LayerType value. -func (i LayerType) String() string { return enums.String(i, _LayerTypeMap) } - -// SetString sets the LayerType value from its string representation, -// and returns an error if the string is invalid. -func (i *LayerType) SetString(s string) error { - return enums.SetString(i, s, _LayerTypeValueMap, "LayerType") -} - -// Int64 returns the LayerType value as an int64. -func (i LayerType) Int64() int64 { return int64(i) } - -// SetInt64 sets the LayerType value from an int64. -func (i *LayerType) SetInt64(in int64) { *i = LayerType(in) } - -// Desc returns the description of the LayerType value. -func (i LayerType) Desc() string { return enums.Desc(i, _LayerTypeDescMap) } - -// LayerTypeValues returns all possible values for the type LayerType. -func LayerTypeValues() []LayerType { return _LayerTypeValues } - -// Values returns all possible values for the type LayerType. -func (i LayerType) Values() []enums.Enum { return enums.Values(_LayerTypeValues) } - -// MarshalText implements the [encoding.TextMarshaler] interface. -func (i LayerType) MarshalText() ([]byte, error) { return []byte(i.String()), nil } - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (i *LayerType) UnmarshalText(text []byte) error { - return enums.UnmarshalText(i, text, "LayerType") -} - var _PathTypeValues = []PathType{0, 1, 2, 3} // PathTypeN is the highest valid value for type PathType, plus one. diff --git a/emer/layer.go b/emer/layer.go index 1423fcac..a15c13e1 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -9,6 +9,8 @@ package emer import ( "fmt" "io" + "log" + "math" "cogentcore.org/core/math32" "cogentcore.org/core/tensor" @@ -17,216 +19,102 @@ import ( "github.com/emer/emergent/v2/weights" ) -// Layer defines the basic interface for neural network layers, used for managing the structural -// elements of a network, and for visualization, I/O, etc. -// Interfaces are automatically pointers -- think of this as a pointer to your specific layer -// type, with a very basic interface for accessing general structural properties. Nothing -// algorithm-specific is implemented here -- all of that goes in your specific layer struct. -type Layer interface { - params.Styler // TypeName, Name, and Class methods for parameter styling - - // InitName MUST be called to initialize the layer's pointer to itself as an emer.Layer - // which enables the proper interface methods to be called. Also sets the name, and - // the parent network that this layer belongs to (which layers may want to retain). - InitName(lay Layer, name string, net Network) - - // Label satisfies the core.Labeler interface for getting the name of objects generically - Label() string - - // SetName sets name of layer - SetName(nm string) - - // AddClass adds a CSS-style class name(s) for this layer, - // ensuring that it is not a duplicate, and properly space separated. - // Returns Layer so it can be chained to set other properties too - AddClass(cls ...string) Layer - - // IsOff returns true if layer has been turned Off (lesioned) -- for experimentation - IsOff() bool - - // SetOff sets the "off" (lesioned) status of layer. Also sets the Off state of all - // pathways from this layer to other layers. - SetOff(off bool) - - // Shape returns the organization of units in the layer, in terms of an array of dimensions. - // Row-major ordering is default (Y then X), outer-most to inner-most. - // if 2D, then it is a simple Y,X layer with no sub-structure (pools). - // If 4D, then it number of pools Y, X and then number of units per pool Y, X - Shape() *tensor.Shape - - // Is2D() returns true if this is a 2D layer (no Pools) - Is2D() bool - - // Is4D() returns true if this is a 4D layer (has Pools as inner 2 dimensions) - Is4D() bool - - // Index4DFrom2D returns the 4D index from 2D coordinates - // within which inner dims are interleaved. Returns false if 2D coords are invalid. - Index4DFrom2D(x, y int) ([]int, bool) - - // Type returns the functional type of layer according to LayerType (extensible in - // more specialized algorithms) - Type() LayerType - - // SetType sets the functional type of layer - SetType(typ LayerType) - - // Config configures the basic parameters of the layer - Config(shape []int, typ LayerType) - - // RelPos returns the relative 3D position specification for this layer - // for display in the 3D NetView -- see Pos() for display conventions. - RelPos() relpos.Rel +var ( + // LayerDimNames2D provides the standard Shape dimension names for 2D layers + LayerDimNames2D = []string{"Y", "X"} - // SetRelPos sets the the relative 3D position specification for this layer - SetRelPos(r relpos.Rel) - - // Pos returns the 3D position of the lower-left-hand corner of the layer. - // The 3D view has layers arranged in X-Y planes stacked vertically along the Z axis. - // Somewhat confusingly, this differs from the standard 3D graphics convention, - // where the vertical dimension is Y and Z is the depth dimension. However, in the - // more "layer-centric" way of thinking about it, it is natural for the width & height - // to map onto X and Y, and then Z is left over for stacking vertically. - Pos() math32.Vector3 + // LayerDimNames4D provides the standard Shape dimension names for 4D layers + // which have Pools and then neurons within pools. + LayerDimNames4D = []string{"PoolY", "PoolX", "NeurY", "NeurX"} +) - // SetPos sets the 3D position of this layer -- will generally be overwritten by - // automatic RelPos setting, unless that doesn't specify a valid relative position. - SetPos(pos math32.Vector3) +// Layer defines the basic interface for neural network layers, +// necessary to support the visualization (NetView), I/O, +// and parameter setting functionality provided by emergent. +// Interfaces are automatically pointers, so think of this +// as a pointer to your specific layer type, +type Layer interface { + // StyleType, StyleClass, and StyleClass methods for parameter styling. + params.Styler - // Size returns the display size of this layer for the 3D view -- see Pos() for general info. - // This is multiplied by the RelPos.Scale factor to rescale layer sizes, and takes - // into account 2D and 4D layer structures. - Size() math32.Vector2 + // AsEmer returns the layer as an *emer.LayerBase, + // to access base functionality. + AsEmer() *LayerBase - // Index returns a 0..n-1 index of the position of the layer within list of layers - // in the network. For backprop networks, index position has computational significance. - // For Leabra networks, it only has significance in determining who gets which weights for - // enforcing initial weight symmetry -- higher layers get weights from lower layers. - Index() int + // Label satisfies the core.Labeler interface for getting + // the name of objects generically. + Label() string - // SetIndex sets the layer index - SetIndex(idx int) + // TypeName is the type or category of layer, defined + // by the algorithm (and usually set by an enum). + TypeName() string - // UnitVarNames returns a list of variable names available on the units in this layer. + // UnitVarNames returns a list of variable names available + // on the units in this layer. // This is typically a global list so do not modify! UnitVarNames() []string - // UnitVarProps returns a map of unit variable properties, with the key being the - // name of the variable, and the value gives a space-separated list of + // UnitVarProps returns a map of unit variable properties, + // with the key being the name of the variable, and the + // value gives a space-separated list of // go-tag-style properties for that variable. // The NetView recognizes the following properties: // range:"##" = +- range around 0 for default display scaling // min:"##" max:"##" = min, max display range - // auto-scale:"+" or "-" = use automatic scaling instead of fixed range or not. + // auto-scale:"+" or "-" = use automatic scaling instead of fixed + // range or not. // zeroctr:"+" or "-" = control whether zero-centering is used // desc:"txt" tooltip description of the variable // Note: this is a global list so do not modify! UnitVarProps() map[string]string - // UnitVarIndex returns the index of given variable within the Neuron, - // according to *this layer's* UnitVarNames() list (using a map to lookup index), - // or -1 and error message if not found. + // UnitVarIndex returns the index of given variable within + // the Neuron, according to *this layer's* UnitVarNames() list + // (using a map to lookup index), or -1 and error message if + // not found. UnitVarIndex(varNm string) (int, error) // UnitVarNum returns the number of Neuron-level variables - // for this layer. This is needed for extending indexes in derived types. + // for this layer. This is needed for extending indexes in + // derived types. UnitVarNum() int // UnitVal1D returns value of given variable index on given unit, // using 1-dimensional index, and a data parallel index di, - // for networks capable of processing multiple input patterns in parallel. - // returns NaN on invalid index. + // for networks capable of processing multiple input patterns + // in parallel. Returns NaN on invalid index. // This is the core unit var access method used by other methods, // so it is the only one that needs to be updated for derived layer types. UnitVal1D(varIndex int, idx, di int) float32 - // UnitValues fills in values of given variable name on unit, - // for each unit in the layer, into given float32 slice (only resized if not big enough). - // di is a data parallel index di, for networks capable of processing input patterns in parallel. - // Returns error on invalid var name. - UnitValues(vals *[]float32, varNm string, di int) error - - // UnitValuesTensor fills in values of given variable name on unit - // for each unit in the layer, into given tensor. - // di is a data parallel index di, for networks capable of processing input patterns in parallel. - // If tensor is not already big enough to hold the values, it is - // set to the same shape as the layer. - // Returns error on invalid var name. - UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) error - - // UnitValuesRepTensor fills in values of given variable name on unit - // for a smaller subset of representative units in the layer, into given tensor. - // di is a data parallel index di, for networks capable of processing input patterns in parallel. - // This is used for computationally intensive stats or displays that work - // much better with a smaller number of units. - // The set of representative units are defined by SetRepIndexes -- all units - // are used if no such subset has been defined. - // If tensor is not already big enough to hold the values, it is - // set to RepShape to hold all the values if subset is defined, - // otherwise it calls UnitValuesTensor and is identical to that. - // Returns error on invalid var name. - UnitValuesRepTensor(tsr tensor.Tensor, varNm string, di int) error - - // RepIndexes returns the current set of representative unit indexes. - // which are a smaller subset of units that represent the behavior - // of the layer, for computationally intensive statistics and displays - // (e.g., PCA, ActRF, NetView rasters). - // Returns nil if none has been set (in which case all units should be used). - // See utility function CenterPoolIndexes that returns indexes of - // units in the central pools of a 4D layer. - RepIndexes() []int - - // RepShape returns the shape to use for the subset of representative - // unit indexes, in terms of an array of dimensions. See Shape() for more info. - // Layers that set RepIndexes should also set this, otherwise a 1D array - // of len RepIndexes will be used. - // See utility function CenterPoolShape that returns shape of - // units in the central pools of a 4D layer. - RepShape() *tensor.Shape - - // SetRepIndexesShape sets the RepIndexes, and RepShape and as list of dimension sizes - SetRepIndexesShape(idxs, shape []int) - - // UnitVal returns value of given variable name on given unit, - // using shape-based dimensional index. - // Returns NaN on invalid var name or index. - // di is a data parallel index di, for networks capable of processing input patterns in parallel. - UnitValue(varNm string, idx []int, di int) float32 + // VarRange returns the min / max values for given variable + // over the layer + VarRange(varNm string) (min, max float32, err error) - // NRecvPaths returns the number of receiving pathways + // NRecvPaths returns the number of receiving pathways. NRecvPaths() int - // RecvPath returns a specific receiving pathway + // RecvPath returns a specific receiving pathway. RecvPath(idx int) Path - // NSendPaths returns the number of sending pathways + // NSendPaths returns the number of sending pathways. NSendPaths() int - // SendPath returns a specific sending pathway + // SendPath returns a specific sending pathway. SendPath(idx int) Path - // SendNameTry looks for a pathway connected to this layer whose sender layer has a given name - SendNameTry(sender string) (Path, error) - - // SendNameTypeTry looks for a pathway connected to this layer whose sender layer has a given name and type - SendNameTypeTry(sender, typ string) (Path, error) - - // RecvNameTry looks for a pathway connected to this layer whose receiver layer has a given name - RecvNameTry(recv string) (Path, error) - - // RecvNameTypeTry looks for a pathway connected to this layer whose receiver layer has a given name and type - RecvNameTypeTry(recv, typ string) (Path, error) - // RecvPathValues fills in values of given synapse variable name, // for pathway from given sending layer and neuron 1D index, // for all receiving neurons in this layer, // into given float32 slice (only resized if not big enough). - // pathType is the string representation of the path type -- used if non-empty, - // useful when there are multiple pathways between two layers. + // pathType is the string representation of the path type; + // used if non-empty, useful when there are multiple pathways + // between two layers. // Returns error on invalid var name. - // If the receiving neuron is not connected to the given sending layer or neuron - // then the value is set to math32.NaN(). - // Returns error on invalid var name or lack of recv path (vals always set to nan on path err). + // If the receiving neuron is not connected to the given sending + // layer or neuron then the value is set to math32.NaN(). + // Returns error on invalid var name or lack of recv path + // (vals always set to nan on path err). RecvPathValues(vals *[]float32, varNm string, sendLay Layer, sendIndex1D int, pathType string) error // SendPathValues fills in values of given synapse variable name, @@ -241,18 +129,22 @@ type Layer interface { // Returns error on invalid var name or lack of recv path (vals always set to nan on path err). SendPathValues(vals *[]float32, varNm string, recvLay Layer, recvIndex1D int, pathType string) error - // Defaults sets default parameter values for all Layer and recv pathway parameters - Defaults() + // todo: do we need all of these:? - // UpdateParams() updates parameter values for all Layer and recv pathway parameters, + // UpdateParams() updates parameter values for all Layer + // and recv pathway parameters, // based on any other params that might have changed. UpdateParams() - // ApplyParams applies given parameter style Sheet to this layer and its recv pathways. - // Calls UpdateParams on anything set to ensure derived parameters are all updated. - // If setMsg is true, then a message is printed to confirm each parameter that is set. + // ApplyParams applies given parameter style Sheet to this + // layer and its recv pathways. + // Calls UpdateParams on anything set to ensure derived + // parameters are all updated. + // If setMsg is true, then a message is printed to confirm + // each parameter that is set. // it always prints a message if a parameter fails to be set. - // returns true if any params were set, and error if there were any errors. + // returns true if any params were set, and error if + // there were any errors. ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) // SetParam sets parameter at given path to given value. @@ -266,45 +158,358 @@ type Layer interface { // AllParams returns a listing of all parameters in the Layer AllParams() string - // WriteWtsJSON writes the weights from this layer from the receiver-side perspective - // in a JSON text format. We build in the indentation logic to make it much faster and + // WriteWtsJSON writes the weights from this layer from the + // receiver-side perspective in a JSON text format. + // We build in the indentation logic to make it much faster and // more efficient. WriteWtsJSON(w io.Writer, depth int) - // ReadWtsJSON reads the weights from this layer from the receiver-side perspective - // in a JSON text format. This is for a set of weights that were saved *for one layer only* - // and is not used for the network-level ReadWtsJSON, which reads into a separate + // ReadWtsJSON reads the weights from this layer from the + // receiver-side perspective in a JSON text format. + // This is for a set of weights that were saved + // *for one layer only* and is not used for the + // network-level ReadWtsJSON, which reads into a separate // structure -- see SetWts method. ReadWtsJSON(r io.Reader) error - // SetWts sets the weights for this layer from weights.Layer decoded values + // SetWts sets the weights for this layer from weights.Layer + // decoded values SetWts(lw *weights.Layer) error +} - // Build constructs the layer and pathway state based on the layer shapes - // and patterns of interconnectivity - Build() error +// LayerBase defines the basic shared data for neural network layers, +// used for managing the structural elements of a network, +// and for visualization, I/O, etc. +// Nothing algorithm-specific is implemented here; +// all of that goes in your specific layer struct. +type LayerBase struct { + // EmerLayer provides access to the emer.Layer interface + // methods for functions defined in the LayerBase type. + // Must set this with a pointer to the actual instance + // when created, using InitLayer function. + EmerLayer Layer + + // Name of the layer, which must be unique within the network. + // Layers are typically accessed directly by name, via a map. + Name string + + // Class is for applying parameter styles across multiple layers + // that all get the same parameters. This can be space separated + // with multple classes. + Class string + + // Shape of the layer, either 2D or 4D. Although spatial topology + // is not relevant to all algorithms, the 2D shape is important for + // efficiently visualizing large numbers of units / neurons. + // 4D layers have 2D Pools of units embedded within a larger 2D + // organization of such pools. This is used for max-pooling or + // pooled inhibition at a finer-grained level, and biologically + // corresopnds to hypercolumns in the cortex for example. + // Order is outer-to-inner (row major), so Y then X for 2D; + // 4D: Y-X unit pools then Y-X neurons within pools. + Shape tensor.Shape + + // Pos specifies the relative spatial relationship to another + // layer, which determines positioning. Every layer except one + // "anchor" layer should be positioned relative to another, + // e.g., RightOf, Above, etc. This provides robust positioning + // in the face of layer size changes etc. + // Layers are arranged in X-Y planes, stacked vertically along the Z axis. + Pos relpos.Pos `table:"-" display:"inline"` + + // Index is a 0..n-1 index of the position of the layer within + // the list of layers in the network. + Index int `display:"-" inactive:"-"` + + // SampleIndexes returns the current set of "sample" unit indexes, + // which are a smaller subset of units that represent the behavior + // of the layer, for computationally intensive statistics and displays + // (e.g., PCA, ActRF, NetView rasters), when the layer is large. + // Returns nil if none has been set (in which case all units are used). + // See utility function CenterPoolIndexes that returns indexes of + // units in the central pools of a 4D layer. + SampleIndexes []int - // VarRange returns the min / max values for given variable - // over the layer - VarRange(varNm string) (min, max float32, err error) + // SampleShape is the shape to use for the subset of sample + // unit indexes, in terms of an array of dimensions. + // See Shape for more info. + // Layers that set SampleIndexes should also set this, + // otherwise a 1D array of len SampleIndexes will be used. + // See utility function CenterPoolShape that returns shape of + // units in the central pools of a 4D layer. + SampleShape tensor.Shape +} + +// InitLayer initializes the layer, setting the EmerLayer interface +// to provide access to it for LayerBase methods, along with the name. +func InitLayer(l Layer, name string) { + lb := l.AsEmer() + lb.EmerLayer = l + lb.Name = name } -// LayerDimNames2D provides the standard Shape dimension names for 2D layers -var LayerDimNames2D = []string{"Y", "X"} +func (ly *LayerBase) AsEmer() *LayerBase { return ly } -// LayerDimNames4D provides the standard Shape dimension names for 4D layers -// which have Pools and then neurons within pools. -var LayerDimNames4D = []string{"PoolY", "PoolX", "NeurY", "NeurX"} +// params.Styler: +func (ly *LayerBase) StyleType() string { return "Layer" } +func (ly *LayerBase) StyleClass() string { return ly.EmerLayer.TypeName() + " " + ly.Class } +func (ly *LayerBase) StyleName() string { return ly.Name } + +// AddClass adds a CSS-style class name(s) for this layer, +// ensuring that it is not a duplicate, and properly space separated. +// Returns Layer so it can be chained to set other properties too. +func (ly *LayerBase) AddClass(cls ...string) *LayerBase { + ly.Class = params.AddClass(ly.Class, cls...) + return ly +} + +func (ly *LayerBase) Label() string { return ly.Name } + +// Is2D() returns true if this is a 2D layer (no Pools) +func (ly *LayerBase) Is2D() bool { return ly.Shape.NumDims() == 2 } + +// Is4D() returns true if this is a 4D layer (has Pools as inner 2 dimensions) +func (ly *LayerBase) Is4D() bool { return ly.Shape.NumDims() == 4 } + +func (ly *LayerBase) NumUnits() int { return ly.Shape.Len() } + +// Index4DFrom2D returns the 4D index from 2D coordinates +// within which inner dims are interleaved. Returns false if 2D coords are invalid. +func (ly *LayerBase) Index4DFrom2D(x, y int) ([]int, bool) { + lshp := ly.Shape + nux := lshp.DimSize(3) + nuy := lshp.DimSize(2) + ux := x % nux + uy := y % nuy + px := x / nux + py := y / nuy + idx := []int{py, px, uy, ux} + if !lshp.IndexIsValid(idx) { + return nil, false + } + return idx, true +} + +// PlaceRightOf positions the layer to the right of the other layer, +// with given spacing, using default YAlign = Front alignment. +func (ly *LayerBase) PlaceRightOf(other Layer, space float32) { + ly.Pos.SetRightOf(other.AsEmer().Name, space) +} + +// PlaceBehind positions the layer behind the other layer, +// with given spacing, using default XAlign = Left alignment. +func (ly *LayerBase) PlaceBehind(other Layer, space float32) { + ly.Pos.SetBehind(other.AsEmer().Name, space) +} + +// PlaceAbove positions the layer above the other layer, +// using default XAlign = Left, YAlign = Front alignment. +func (ly *LayerBase) PlaceAbove(other Layer) { + ly.Pos.SetAbove(other.AsEmer().Name) +} + +// Size returns the display size of this layer for the 3D view. +// see Pos field for general info. +// This is multiplied by the Pos.Scale factor to rescale +// layer sizes, and takes into account 2D and 4D layer structures. +func (ly *LayerBase) Size() math32.Vector2 { + if ly.Pos.Scale == 0 { + ly.Pos.Defaults() + } + var sz math32.Vector2 + switch { + case ly.Is2D(): + sz = math32.Vec2(float32(ly.Shape.DimSize(1)), float32(ly.Shape.DimSize(0))) // Y, X + case ly.Is4D(): + // note: pool spacing is handled internally in display and does not affect overall size + sz = math32.Vec2(float32(ly.Shape.DimSize(1)*ly.Shape.DimSize(3)), float32(ly.Shape.DimSize(0)*ly.Shape.DimSize(2))) // Y, X + default: + sz = math32.Vec2(float32(ly.Shape.Len()), 1) + } + return sz.MulScalar(ly.Pos.Scale) +} + +// SetShape sets the layer shape and also uses default dim names. +func (ly *LayerBase) SetShape(shape []int) { + var dnms []string + if len(shape) == 2 { + dnms = LayerDimNames2D + } else if len(shape) == 4 { + dnms = LayerDimNames4D + } + ly.Shape.SetShape(shape, dnms...) +} + +// SetSampleIndexesShape sets the SampleIndexes, +// and SampleShape and as list of dimension sizes, +// for a subset sample of units to represent the entire layer. +// This is critical for large layers that are otherwise unwieldy +// to visualize and for computationally-intensive statistics. +func (ly *LayerBase) SetSampleIndexesShape(idxs, shape []int) { + ly.SampleIndexes = idxs + var dnms []string + if len(shape) == 2 { + dnms = LayerDimNames2D + } else if len(shape) == 4 { + dnms = LayerDimNames4D + } + ly.SampleShape.SetShape(shape, dnms...) +} + +// GetSampleShape returns the shape to use for representative units. +func (ly *LayerBase) GetSampleShape() *tensor.Shape { + sz := len(ly.SampleIndexes) + if sz == 0 { + return &ly.Shape + } + if ly.SampleShape.Len() != sz { + ly.SampleShape.SetShape([]int{sz}) + } + return &ly.SampleShape +} + +// NSubPools returns the number of sub-pools of neurons +// according to the shape parameters. 2D shapes have 0 sub pools. +// For a 4D shape, the pools are the first set of 2 Y,X dims +// and then the neurons within the pools are the 2nd set of 2 Y,X dims. +func (ly *LayerBase) NumPools() int { + if ly.Shape.NumDims() != 4 { + return 0 + } + return ly.Shape.DimSize(0) * ly.Shape.DimSize(1) +} + +// UnitValues fills in values of given variable name on unit, +// for each unit in the layer, into given float32 slice +// (only resized if not big enough). +// di is a data parallel index di, for networks capable of +// processing input patterns in parallel. +// Returns error on invalid var name. +func (ly *LayerBase) UnitValues(vals *[]float32, varNm string, di int) error { + nn := ly.NumUnits() + if *vals == nil || cap(*vals) < int(nn) { + *vals = make([]float32, nn) + } else if len(*vals) < int(nn) { + *vals = (*vals)[0:nn] + } + vidx, err := ly.EmerLayer.UnitVarIndex(varNm) + if err != nil { + nan := math32.NaN() + for lni := range nn { + (*vals)[lni] = nan + } + return err + } + for lni := range nn { + (*vals)[lni] = ly.EmerLayer.UnitVal1D(vidx, lni, di) + } + return nil +} + +// UnitValuesTensor fills in values of given variable name +// on unit for each unit in the layer, into given tensor. +// di is a data parallel index di, for networks capable of +// processing input patterns in parallel. +// If tensor is not already big enough to hold the values, it is +// set to the same shape as the layer. +// Returns error on invalid var name. +func (ly *LayerBase) UnitValuesTensor(tsr tensor.Tensor, varNm string, di int) error { + if tsr == nil { + err := fmt.Errorf("emer.UnitValuesTensor: Tensor is nil") + log.Println(err) + return err + } + nn := ly.NumUnits() + tsr.SetShape(ly.Shape.Sizes, ly.Shape.Names...) + vidx, err := ly.EmerLayer.UnitVarIndex(varNm) + if err != nil { + nan := math.NaN() + for lni := 0; lni < nn; lni++ { + tsr.SetFloat1D(lni, nan) + } + return err + } + for lni := 0; lni < nn; lni++ { + v := ly.EmerLayer.UnitVal1D(vidx, lni, di) + if math32.IsNaN(v) { + tsr.SetFloat1D(lni, math.NaN()) + } else { + tsr.SetFloat1D(lni, float64(v)) + } + } + return nil +} + +// UnitValuesSampleTensor fills in values of given variable name +// on unit for a smaller subset of representative units +// in the layer, into given tensor. +// di is a data parallel index di, for networks capable of +// processing input patterns in parallel. +// This is used for computationally intensive stats or displays that work +// much better with a smaller number of units. +// The set of representative units are defined by SetSampleIndexes -- all units +// are used if no such subset has been defined. +// If tensor is not already big enough to hold the values, it is +// set to SampleShape to hold all the values if subset is defined, +// otherwise it calls UnitValuesTensor and is identical to that. +// Returns error on invalid var name. +func (ly *LayerBase) UnitValuesSampleTensor(tsr tensor.Tensor, varNm string, di int) error { + nu := len(ly.SampleIndexes) + if nu == 0 { + return ly.UnitValuesTensor(tsr, varNm, di) + } + if tsr == nil { + err := fmt.Errorf("emer.UnitValuesSampleTensor: Tensor is nil") + log.Println(err) + return err + } + if tsr.Len() != nu { + rs := ly.GetSampleShape() + tsr.SetShape(rs.Sizes, rs.Names...) + } + vidx, err := ly.EmerLayer.UnitVarIndex(varNm) + if err != nil { + nan := math.NaN() + for i, _ := range ly.SampleIndexes { + tsr.SetFloat1D(i, nan) + } + return err + } + for i, ui := range ly.SampleIndexes { + v := ly.EmerLayer.UnitVal1D(vidx, ui, di) + if math32.IsNaN(v) { + tsr.SetFloat1D(i, math.NaN()) + } else { + tsr.SetFloat1D(i, float64(v)) + } + } + return nil +} + +// UnitValue returns value of given variable name on given unit, +// using shape-based dimensional index. +// Returns NaN on invalid var name or index. +// di is a data parallel index di, for networks capable of +// processing input patterns in parallel. +func (ly *LayerBase) UnitValue(varNm string, idx []int, di int) float32 { + vidx, err := ly.EmerLayer.UnitVarIndex(varNm) + if err != nil { + return math32.NaN() + } + fidx := ly.Shape.Offset(idx) + return ly.EmerLayer.UnitVal1D(vidx, fidx, di) +} // CenterPoolIndexes returns the indexes for n x n center pools of given 4D layer. -// Useful for setting RepIndexes on Layer. +// Useful for setting SampleIndexes on Layer. // Will crash if called on non-4D layers. func CenterPoolIndexes(ly Layer, n int) []int { - nPy := ly.Shape().DimSize(0) - nPx := ly.Shape().DimSize(1) + lb := ly.AsEmer() + nPy := lb.Shape.DimSize(0) + nPx := lb.Shape.DimSize(1) sPy := (nPy - n) / 2 sPx := (nPx - n) / 2 - nu := ly.Shape().DimSize(2) * ly.Shape().DimSize(3) + nu := lb.Shape.DimSize(2) * lb.Shape.DimSize(3) nt := n * n * nu idxs := make([]int, nt) ix := 0 @@ -321,19 +526,21 @@ func CenterPoolIndexes(ly Layer, n int) []int { } // CenterPoolShape returns shape for n x n center pools of given 4D layer. -// Useful for setting RepShape on Layer. +// Useful for setting SampleShape on Layer. func CenterPoolShape(ly Layer, n int) []int { - return []int{n, n, ly.Shape().DimSize(2), ly.Shape().DimSize(3)} + lb := ly.AsEmer() + return []int{n, n, lb.Shape.DimSize(2), lb.Shape.DimSize(3)} } -// Layer2DRepIndexes returns neuron indexes and corresponding 2D shape +// Layer2DSampleIndexes returns neuron indexes and corresponding 2D shape // for the representative neurons within a large 2D layer, for passing to -// [SetRepIndexesShape]. These neurons are used for the raster plot +// [SetSampleIndexesShape]. These neurons are used for the raster plot // in the GUI and for computing PCA, among other cases where the full set // of neurons is problematic. The lower-left corner of neurons up to // given maxSize is selected. -func Layer2DRepIndexes(ly Layer, maxSize int) (idxs, shape []int) { - sh := ly.Shape() +func Layer2DSampleIndexes(ly Layer, maxSize int) (idxs, shape []int) { + lb := ly.AsEmer() + sh := lb.Shape my := min(maxSize, sh.DimSize(0)) mx := min(maxSize, sh.DimSize(1)) shape = []int{my, mx} @@ -351,45 +558,12 @@ func Layer2DRepIndexes(ly Layer, maxSize int) (idxs, shape []int) { ////////////////////////////////////////////////////////////////////////////////////// // Layers -// Layers is a slice of layers -type Layers []Layer - -// ElemLabel satisfies the core.SliceLabeler interface to provide labels for slice elements -func (ls *Layers) ElemLabel(idx int) string { - return (*ls)[idx].Name() -} - -////////////////////////////////////////////////////////////////////////////////////// -// LayerType - -// LayerType is the type of the layer: Input, Hidden, Target, Compare. -// Class parameter styles automatically key off of these types. -// Specialized algorithms can extend this to other types, but these types encompass -// most standard neural network models. -type LayerType int32 //enums:enum - -// The layer types -const ( - // Hidden is an internal representational layer that does not receive direct input / targets - Hidden LayerType = iota - - // Input is a layer that receives direct external input in its Ext inputs - Input - - // Target is a layer that receives direct external target inputs used for driving plus-phase learning - Target - - // Compare is a layer that receives external comparison inputs, which drive statistics but - // do NOT drive activation or learning directly - Compare -) - // we keep these here to make it easier for other packages to implement the emer.Layer interface // by just calling these methods func SendNameTry(l Layer, sender string) (Path, error) { for pi := 0; pi < l.NRecvPaths(); pi++ { pj := l.RecvPath(pi) - if pj.SendLay().Name() == sender { + if pj.SendLay().AsEmer().Name == sender { return pj, nil } } @@ -399,7 +573,7 @@ func SendNameTry(l Layer, sender string) (Path, error) { func RecvNameTry(l Layer, recv string) (Path, error) { for pi := 0; pi < l.NSendPaths(); pi++ { pj := l.SendPath(pi) - if pj.RecvLay().Name() == recv { + if pj.RecvLay().AsEmer().Name == recv { return pj, nil } } @@ -409,7 +583,7 @@ func RecvNameTry(l Layer, recv string) (Path, error) { func SendNameTypeTry(l Layer, sender, typ string) (Path, error) { for pi := 0; pi < l.NRecvPaths(); pi++ { pj := l.RecvPath(pi) - if pj.SendLay().Name() == sender && pj.PathTypeName() == typ { + if pj.SendLay().AsEmer().Name == sender && pj.PathTypeName() == typ { return pj, nil } } @@ -419,7 +593,7 @@ func SendNameTypeTry(l Layer, sender, typ string) (Path, error) { func RecvNameTypeTry(l Layer, recv, typ string) (Path, error) { for pi := 0; pi < l.NSendPaths(); pi++ { pj := l.SendPath(pi) - if pj.RecvLay().Name() == recv && pj.PathTypeName() == typ { + if pj.RecvLay().AsEmer().Name == recv && pj.PathTypeName() == typ { return pj, nil } } diff --git a/emer/laynames.go b/emer/laynames.go index 0b92fc2f..c443f0c6 100644 --- a/emer/laynames.go +++ b/emer/laynames.go @@ -31,11 +31,6 @@ func (ln *LayNames) Add(laynm ...string) { *ln = append(*ln, laynm...) } -// AddOne adds one layer name to list -- python version -- doesn't support varargs -func (ln *LayNames) AddOne(laynm string) { - *ln = append(*ln, laynm) -} - // AddAllBut adds all layers in network except those in exlude list func (ln *LayNames) AddAllBut(net Network, excl ...string) { exmap := make(map[string]struct{}) @@ -46,7 +41,7 @@ func (ln *LayNames) AddAllBut(net Network, excl ...string) { nl := net.NLayers() for li := 0; li < nl; li++ { aly := net.Layer(li) - nm := aly.Name() + nm := aly.AsEmer().Name if _, on := exmap[nm]; on { continue } diff --git a/emer/netparams.go b/emer/netparams.go index 0f31fcfd..53394a36 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -10,19 +10,18 @@ import ( "strings" "cogentcore.org/core/base/mpi" - "github.com/emer/emergent/v2/netparams" "github.com/emer/emergent/v2/params" ) -// NetParams handles standard parameters for a Network only -// (use econfig and a Config struct for other configuration params) +// NetParams handles standard parameters for a Network +// (use econfig and a Config struct for other configuration params). // Assumes a Set named "Base" has the base-level parameters, which are // always applied first, followed optionally by additional Set(s) // that can have different parameters to try. type NetParams struct { // full collection of param sets to use - Params netparams.Sets `display:"no-inline"` + Params params.Sets `display:"no-inline"` // optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!) ExtraSheets string @@ -41,7 +40,7 @@ type NetParams struct { } // Config configures the ExtraSheets, Tag, and Network fields -func (pr *NetParams) Config(pars netparams.Sets, extraSheets, tag string, net Network) { +func (pr *NetParams) Config(pars params.Sets, extraSheets, tag string, net Network) { pr.Params = pars report := "" if extraSheets != "" { diff --git a/emer/params.go b/emer/params.go deleted file mode 100644 index 4929b4b2..00000000 --- a/emer/params.go +++ /dev/null @@ -1,354 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package emer - -import ( - "fmt" - "log" - "log/slog" - "strings" - - "github.com/emer/emergent/v2/params" -) - -// Params handles standard parameters for a Network and other objects. -// Assumes a Set named "Base" has the base-level parameters, which are -// always applied first, followed optionally by additional Set(s) -// that can have different parameters to try. -type Params struct { - - // full collection of param sets to use - Params params.Sets `display:"no-inline"` - - // optional additional set(s) of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Set names!) - ExtraSets string - - // optional additional tag to add to file names, logs to identify params / run config - Tag string - - // map of objects to apply parameters to -- the key is the name of the Sheet for each object, e.g., - Objects map[string]any `display:"-" Network", "Sim" are typically used"` - - // list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used - NetHypers params.Flex `display:"-"` - - // print out messages for each parameter that is set - SetMsg bool -} - -// AddNetwork adds network to those configured by params -- replaces any existing -// network that was set previously. -func (pr *Params) AddNetwork(net Network) { - pr.AddObject("Network", net) -} - -// AddSim adds Sim object to those configured by params -- replaces any existing. -func (pr *Params) AddSim(sim any) { - pr.AddObject("Sim", sim) -} - -// AddNetSize adds a new Network Schema object to those configured by params. -// The network schema can be retrieved using NetSize() method, and also the -// direct LayX, ..Y, PoolX, ..Y methods can be used to directly access values. -func (pr *Params) AddNetSize() *NetSize { - ns := &NetSize{} - pr.AddObject("NetSize", ns) - return ns -} - -// NetSize returns the NetSize network size configuration object -// nil if it was not added -func (pr *Params) NetSize() *NetSize { - ns, has := pr.Objects["NetSize"] - if !has { - return nil - } - return ns.(*NetSize) -} - -// AddLayers adds layer(s) of given class to the NetSize for sizing params. -// Most efficient to add each class separately en-mass. -func (pr *Params) AddLayers(names []string, class string) { - nsi, has := pr.Objects["NetSize"] - var ns *NetSize - if !has { - ns = pr.AddNetSize() - } else { - ns = nsi.(*NetSize) - } - ns.AddLayers(names, class) -} - -// LayX returns the X value = horizontal size of 2D layer or number of pools -// (outer dimension) for 4D layer, for given layer from NetSize, if it set there. -// Otherwise returns the provided default value -func (pr *Params) LayX(name string, def int) int { - ns := pr.NetSize() - if ns == nil { - return def - } - return ns.LayX(name, def) -} - -// LayY returns the Y value = vertical size of 2D layer or number of pools -// (outer dimension) for 4D layer, for given layer from NetSize, if it set there. -// Otherwise returns the provided default value -func (pr *Params) LayY(name string, def int) int { - ns := pr.NetSize() - if ns == nil { - return def - } - return ns.LayY(name, def) -} - -// PoolX returns the Pool X value (4D inner dim) = size of pool in units -// for given layer from NetSize if it set there. -// Otherwise returns the provided default value -func (pr *Params) PoolX(name string, def int) int { - ns := pr.NetSize() - if ns == nil { - return def - } - return ns.PoolX(name, def) -} - -// PoolY returns the Pool X value (4D inner dim) = size of pool in units -// for given layer from NetSize if it set there. -// Otherwise returns the provided default value -func (pr *Params) PoolY(name string, def int) int { - ns := pr.NetSize() - if ns == nil { - return def - } - return ns.PoolY(name, def) -} - -// AddObject adds given object with given sheet name that applies to this object. -// It is based on a map keyed on the name, so any existing object is replaced -// (safe to call repeatedly). -func (pr *Params) AddObject(name string, object any) { - if pr.Objects == nil { - pr.Objects = make(map[string]any) - } - pr.Objects[name] = object -} - -// Name returns name of current set of parameters, including Tag. -// if ExtraSets is empty then it returns "Base", otherwise returns ExtraSets -func (pr *Params) Name() string { - rn := "" - if pr.Tag != "" { - rn += pr.Tag + "_" - } - if pr.ExtraSets == "" { - rn += "Base" - } else { - rn += pr.ExtraSets - } - return rn -} - -// RunName returns standard name simulation run based on params Name() -// and starting run number if > 0 (large models are often run separately) -func (pr *Params) RunName(startRun int) string { - rn := pr.Name() - if startRun > 0 { - rn += fmt.Sprintf("_%03d", startRun) - } - return rn -} - -// Validate checks that there are sheets with the names for the -// Objects that have been added. -func (pr *Params) Validate() error { - names := []string{} - for nm := range pr.Objects { - names = append(names, nm) - } - return pr.Params.ValidateSheets(names) -} - -// SetAll sets all parameters, using "Base" Set then any ExtraSets, -// for all the Objects that have been added. Does a Validate call first. -func (pr *Params) SetAll() error { - err := pr.Validate() - if err != nil { - return err - } - for _, obj := range pr.Objects { - if hist, ok := obj.(params.History); ok { - hist.ParamsHistoryReset() - } - } - err = pr.SetAllSet("Base") - if pr.ExtraSets != "" && pr.ExtraSets != "Base" { - sps := strings.Fields(pr.ExtraSets) - for _, ps := range sps { - err = pr.SetAllSet(ps) - } - } - return err -} - -// SetAllSet sets parameters for given Set name to all Objects -func (pr *Params) SetAllSet(setName string) error { - pset, err := pr.Params.SetByNameTry(setName) - if err != nil { - return err - } - for nm, obj := range pr.Objects { - sh, ok := pset.Sheets[nm] - if !ok { - continue - } - sh.SelMatchReset(setName) - if nm == "Network" { - net := obj.(Network) - pr.SetNetworkSheet(net, sh, setName) - } else if nm == "NetSize" { - ns := obj.(*NetSize) - ns.ApplySheet(sh, pr.SetMsg) - } else { - sh.Apply(obj, pr.SetMsg) - } - err = sh.SelNoMatchWarn(setName, nm) - } - return err -} - -// SetObject sets parameters, using "Base" Set then any ExtraSets, -// for the given object name (e.g., "Network" or "Sim" etc). -// Does not do Validate or collect hyper parameters. -func (pr *Params) SetObject(objName string) error { - err := pr.SetObjectSet(objName, "Base") - if pr.ExtraSets != "" && pr.ExtraSets != "Base" { - sps := strings.Fields(pr.ExtraSets) - for _, ps := range sps { - err = pr.SetObjectSet(objName, ps) - } - } - return err -} - -// SetNetworkMap applies params from given map of values -// The map keys are Selector:Path and the value is the value to apply, as a string. -func (pr *Params) SetNetworkMap(net Network, vals map[string]any) error { - sh, err := params.MapToSheet(vals) - if err != nil { - log.Println(err) - return err - } - pr.SetNetworkSheet(net, sh, "ApplyMap") - return nil -} - -// SetNetworkSheet applies params from given sheet -func (pr *Params) SetNetworkSheet(net Network, sh *params.Sheet, setName string) { - net.ApplyParams(sh, pr.SetMsg) - hypers := NetworkHyperParams(net, sh) - if setName == "Base" { - pr.NetHypers = hypers - } else { - pr.NetHypers.CopyFrom(hypers) - } -} - -// SetObjectSet sets parameters for given Set name to given object -func (pr *Params) SetObjectSet(objName, setName string) error { - pset, err := pr.Params.SetByNameTry(setName) - if err != nil { - return err - } - sh, ok := pset.Sheets[objName] - if !ok { - err = fmt.Errorf("Params.SetObjectSet: sheet named: %s not found", objName) - return err - } - obj, ok := pr.Objects[objName] - if !ok { - err = fmt.Errorf("Params.SetObjectSet: Object named: %s not found", objName) - return err - } - sh.SelMatchReset(setName) - if objName == "Network" { - net := obj.(Network) - pr.SetNetworkSheet(net, sh, setName) - } else if objName == "NetSize" { - ns := obj.(*NetSize) - ns.ApplySheet(sh, pr.SetMsg) - } else { - sh.Apply(obj, pr.SetMsg) - } - err = sh.SelNoMatchWarn(setName, objName) - return err -} - -// NetworkHyperParams returns the compiled hyper parameters from given Sheet -// for each layer and pathway in the network -- applies the standard css -// styling logic for the hyper parameters. -func NetworkHyperParams(net Network, sheet *params.Sheet) params.Flex { - hypers := params.Flex{} - nl := net.NLayers() - for li := 0; li < nl; li++ { - ly := net.Layer(li) - nm := ly.Name() - // typ := ly.Type().String() - hypers[nm] = ¶ms.FlexVal{Nm: nm, Type: "Layer", Cls: ly.Class(), Obj: params.Hypers{}} - } - // separate pathways - for li := 0; li < nl; li++ { - ly := net.Layer(li) - np := ly.NRecvPaths() - for pi := 0; pi < np; pi++ { - pj := ly.RecvPath(pi) - nm := pj.Name() - // typ := pj.Type().String() - hypers[nm] = ¶ms.FlexVal{Nm: nm, Type: "Path", Cls: pj.Class(), Obj: params.Hypers{}} - } - } - for nm, vl := range hypers { - sheet.Apply(vl, false) - hv := vl.Obj.(params.Hypers) - hv.DeleteValOnly() - if len(hv) == 0 { - delete(hypers, nm) - } - } - return hypers -} - -// SetFloatParam sets given float32 param value to layer or pathway -// (typ = Layer or Path) of given name, at given path (which can start -// with the typ name). -// Returns an error (and logs it automatically) for any failure. -func SetFloatParam(net Network, name, typ, path string, val float32) error { - rpath := params.PathAfterType(path) - prs := fmt.Sprintf("%g", val) - switch typ { - case "Layer": - ly, err := net.LayerByNameTry(name) - if err != nil { - slog.Error(err.Error()) - return err - } - err = ly.SetParam(rpath, prs) - if err != nil { - slog.Error(err.Error()) - return err - } - case "Path": - pj, err := net.PathByNameTry(name) - if err != nil { - slog.Error(err.Error()) - return err - } - err = pj.SetParam(rpath, prs) - if err != nil { - slog.Error(err.Error()) - return err - } - } - return nil -} diff --git a/emer/typegen.go b/emer/typegen.go index e28c56b8..79510692 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,15 +6,15 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the basic interface for neural network layers, used for managing the structural\nelements of a network, and for visualization, I/O, etc.\nInterfaces are automatically pointers -- think of this as a pointer to your specific layer\ntype, with a very basic interface for accessing general structural properties. Nothing\nalgorithm-specific is implemented here -- all of that goes in your specific layer struct.", Methods: []types.Method{{Name: "InitName", Doc: "InitName MUST be called to initialize the layer's pointer to itself as an emer.Layer\nwhich enables the proper interface methods to be called. Also sets the name, and\nthe parent network that this layer belongs to (which layers may want to retain).", Args: []string{"lay", "name", "net"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "SetName", Doc: "SetName sets name of layer", Args: []string{"nm"}}, {Name: "AddClass", Doc: "AddClass adds a CSS-style class name(s) for this layer,\nensuring that it is not a duplicate, and properly space separated.\nReturns Layer so it can be chained to set other properties too", Args: []string{"cls"}, Returns: []string{"Layer"}}, {Name: "IsOff", Doc: "IsOff returns true if layer has been turned Off (lesioned) -- for experimentation", Returns: []string{"bool"}}, {Name: "SetOff", Doc: "SetOff sets the \"off\" (lesioned) status of layer. Also sets the Off state of all\npathways from this layer to other layers.", Args: []string{"off"}}, {Name: "Shape", Doc: "Shape returns the organization of units in the layer, in terms of an array of dimensions.\nRow-major ordering is default (Y then X), outer-most to inner-most.\nif 2D, then it is a simple Y,X layer with no sub-structure (pools).\nIf 4D, then it number of pools Y, X and then number of units per pool Y, X", Returns: []string{"Shape"}}, {Name: "Is2D", Doc: "Is2D() returns true if this is a 2D layer (no Pools)", Returns: []string{"bool"}}, {Name: "Is4D", Doc: "Is4D() returns true if this is a 4D layer (has Pools as inner 2 dimensions)", Returns: []string{"bool"}}, {Name: "Index4DFrom2D", Doc: "Index4DFrom2D returns the 4D index from 2D coordinates\nwithin which inner dims are interleaved. Returns false if 2D coords are invalid.", Args: []string{"x", "y"}, Returns: []string{"[]int", "bool"}}, {Name: "Type", Doc: "Type returns the functional type of layer according to LayerType (extensible in\nmore specialized algorithms)", Returns: []string{"LayerType"}}, {Name: "SetType", Doc: "SetType sets the functional type of layer", Args: []string{"typ"}}, {Name: "Config", Doc: "Config configures the basic parameters of the layer", Args: []string{"shape", "typ"}}, {Name: "RelPos", Doc: "RelPos returns the relative 3D position specification for this layer\nfor display in the 3D NetView -- see Pos() for display conventions.", Returns: []string{"Rel"}}, {Name: "SetRelPos", Doc: "SetRelPos sets the the relative 3D position specification for this layer", Args: []string{"r"}}, {Name: "Pos", Doc: "Pos returns the 3D position of the lower-left-hand corner of the layer.\nThe 3D view has layers arranged in X-Y planes stacked vertically along the Z axis.\nSomewhat confusingly, this differs from the standard 3D graphics convention,\nwhere the vertical dimension is Y and Z is the depth dimension. However, in the\nmore \"layer-centric\" way of thinking about it, it is natural for the width & height\nto map onto X and Y, and then Z is left over for stacking vertically.", Returns: []string{"Vector3"}}, {Name: "SetPos", Doc: "SetPos sets the 3D position of this layer -- will generally be overwritten by\nautomatic RelPos setting, unless that doesn't specify a valid relative position.", Args: []string{"pos"}}, {Name: "Size", Doc: "Size returns the display size of this layer for the 3D view -- see Pos() for general info.\nThis is multiplied by the RelPos.Scale factor to rescale layer sizes, and takes\ninto account 2D and 4D layer structures.", Returns: []string{"Vector2"}}, {Name: "Index", Doc: "Index returns a 0..n-1 index of the position of the layer within list of layers\nin the network. For backprop networks, index position has computational significance.\nFor Leabra networks, it only has significance in determining who gets which weights for\nenforcing initial weight symmetry -- higher layers get weights from lower layers.", Returns: []string{"int"}}, {Name: "SetIndex", Doc: "SetIndex sets the layer index", Args: []string{"idx"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within the Neuron,\naccording to *this layer's* UnitVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns in parallel.\nreturns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "UnitValues", Doc: "UnitValues fills in values of given variable name on unit,\nfor each unit in the layer, into given float32 slice (only resized if not big enough).\ndi is a data parallel index di, for networks capable of processing input patterns in parallel.\nReturns error on invalid var name.", Args: []string{"vals", "varNm", "di"}, Returns: []string{"error"}}, {Name: "UnitValuesTensor", Doc: "UnitValuesTensor fills in values of given variable name on unit\nfor each unit in the layer, into given tensor.\ndi is a data parallel index di, for networks capable of processing input patterns in parallel.\nIf tensor is not already big enough to hold the values, it is\nset to the same shape as the layer.\nReturns error on invalid var name.", Args: []string{"tsr", "varNm", "di"}, Returns: []string{"error"}}, {Name: "UnitValuesRepTensor", Doc: "UnitValuesRepTensor fills in values of given variable name on unit\nfor a smaller subset of representative units in the layer, into given tensor.\ndi is a data parallel index di, for networks capable of processing input patterns in parallel.\nThis is used for computationally intensive stats or displays that work\nmuch better with a smaller number of units.\nThe set of representative units are defined by SetRepIndexes -- all units\nare used if no such subset has been defined.\nIf tensor is not already big enough to hold the values, it is\nset to RepShape to hold all the values if subset is defined,\notherwise it calls UnitValuesTensor and is identical to that.\nReturns error on invalid var name.", Args: []string{"tsr", "varNm", "di"}, Returns: []string{"error"}}, {Name: "RepIndexes", Doc: "RepIndexes returns the current set of representative unit indexes.\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters).\nReturns nil if none has been set (in which case all units should be used).\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer.", Returns: []string{"[]int"}}, {Name: "RepShape", Doc: "RepShape returns the shape to use for the subset of representative\nunit indexes, in terms of an array of dimensions. See Shape() for more info.\nLayers that set RepIndexes should also set this, otherwise a 1D array\nof len RepIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer.", Returns: []string{"Shape"}}, {Name: "SetRepIndexesShape", Doc: "SetRepIndexesShape sets the RepIndexes, and RepShape and as list of dimension sizes", Args: []string{"idxs", "shape"}}, {Name: "UnitValue", Doc: "UnitVal returns value of given variable name on given unit,\nusing shape-based dimensional index.\nReturns NaN on invalid var name or index.\ndi is a data parallel index di, for networks capable of processing input patterns in parallel.", Args: []string{"varNm", "idx", "di"}, Returns: []string{"float32"}}, {Name: "NRecvPaths", Doc: "NRecvPaths returns the number of receiving pathways", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NSendPaths", Doc: "NSendPaths returns the number of sending pathways", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "SendNameTry", Doc: "SendNameTry looks for a pathway connected to this layer whose sender layer has a given name", Args: []string{"sender"}, Returns: []string{"Path", "error"}}, {Name: "SendNameTypeTry", Doc: "SendNameTypeTry looks for a pathway connected to this layer whose sender layer has a given name and type", Args: []string{"sender", "typ"}, Returns: []string{"Path", "error"}}, {Name: "RecvNameTry", Doc: "RecvNameTry looks for a pathway connected to this layer whose receiver layer has a given name", Args: []string{"recv"}, Returns: []string{"Path", "error"}}, {Name: "RecvNameTypeTry", Doc: "RecvNameTypeTry looks for a pathway connected to this layer whose receiver layer has a given name and type", Args: []string{"recv", "typ"}, Returns: []string{"Path", "error"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for all Layer and recv pathway parameters"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer and recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this layer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the receiver-side perspective\nin a JSON text format. We build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the receiver-side perspective\nin a JSON text format. This is for a set of weights that were saved *for one layer only*\nand is not used for the network-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer decoded values", Args: []string{"lw"}, Returns: []string{"error"}}, {Name: "Build", Doc: "Build constructs the layer and pathway state based on the layer shapes\nand patterns of interconnectivity", Returns: []string{"error"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the basic interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nInterfaces are automatically pointers, so think of this\nas a pointer to your specific layer type,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NRecvPaths", Doc: "NRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NSendPaths", Doc: "NSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layers", IDName: "layers", Doc: "Layers is a slice of layers"}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here;\nall of that goes in your specific layer struct.", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes returns the current set of representative unit indexes.\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters).\nReturns nil if none has been set (in which case all units should be used).\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape returns the shape to use for the subset of representative\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this, otherwise a 1D array\nof len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerType", IDName: "layer-type", Doc: "LayerType is the type of the layer: Input, Hidden, Target, Compare.\nClass parameter styles automatically key off of these types.\nSpecialized algorithms can extend this to other types, but these types encompass\nmost standard neural network models."}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layers", IDName: "layers", Doc: "Layers is a slice of layers"}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayNames", IDName: "lay-names", Doc: "LayNames is a list of layer names.\nHas convenience methods for adding, validating."}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetParams", IDName: "net-params", Doc: "NetParams handles standard parameters for a Network only\n(use econfig and a Config struct for other configuration params)\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSheets", Doc: "optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Network", Doc: "the network to apply parameters to"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetParams", IDName: "net-params", Doc: "NetParams handles standard parameters for a Network\n(use econfig and a Config struct for other configuration params).\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSheets", Doc: "optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Network", Doc: "the network to apply parameters to"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LaySize", IDName: "lay-size", Doc: "LaySize contains parameters for size of layers", Fields: []types.Field{{Name: "Y", Doc: "Y (vertical) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer"}, {Name: "X", Doc: "X (horizontal) size of layer -- in units for 2D, or number of pools (outer dimension) for 4D layer"}, {Name: "PoolY", Doc: "Y (vertical) size of each pool in units, only for 4D layers (inner dimension)"}, {Name: "PoolX", Doc: "Y (horizontal) size of each pool in units, only for 4D layers (inner dimension)"}}}) @@ -22,8 +22,6 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetSiz var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the basic interface for a neural network, used for managing the structural\nelements of a network, and for visualization, I/O, etc", Methods: []types.Method{{Name: "InitName", Doc: "InitName MUST be called to initialize the network's pointer to itself as an emer.Network\nwhich enables the proper interface methods to be called. Also sets the name.", Args: []string{"net", "name"}}, {Name: "Name", Doc: "Name() returns name of the network", Returns: []string{"string"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "NLayers", Doc: "NLayers returns the number of layers in the network", Returns: []string{"int"}}, {Name: "Layer", Doc: "Layer returns layer (as emer.Layer interface) at given index -- does not\ndo extra bounds checking", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "LayerByName", Doc: "LayerByName returns layer of given name, nil if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer"}}, {Name: "LayerByNameTry", Doc: "LayerByNameTry returns layer of given name, returns error if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer", "error"}}, {Name: "PathByNameTry", Doc: "PathByNameTry returns path of given name, returns error if not found.\nPath names are SendToRecv, and are looked up by parsing the name", Args: []string{"name"}, Returns: []string{"Path", "error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to layers and paths in this network.\nCalls UpdateParams on anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Network that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Network", Returns: []string{"string"}}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on the units in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all layers need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapses in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all pathways need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes network weights (and any other state that adapts with learning)\nto JSON-formatted output.", Args: []string{"w"}, Returns: []string{"error"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads network weights (and any other state that adapts with learning)\nfrom JSON-formatted input. Reads into a temporary weights.Network structure that\nis then passed to SetWts to actually set the weights.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this network from weights.Network decoded values", Args: []string{"nw"}, Returns: []string{"error"}}, {Name: "SaveWtsJSON", Doc: "SaveWtsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWtsJSON", Doc: "OpenWtsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "Bounds", Doc: "Bounds returns the minimum and maximum display coordinates of the network for 3D display", Returns: []string{"min", "max"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "LayersByClass", Doc: "LayersByClass returns a list of layer names by given class(es).\nLists are compiled when network Build() function called.\nThe layer Type is always included as a Class, along with any other\nspace-separated strings specified in Class for parameter styling, etc.\nIf no classes are passed, all layer names in order are returned.", Args: []string{"classes"}, Returns: []string{"[]string"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Params", IDName: "params", Doc: "Params handles standard parameters for a Network and other objects.\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSets", Doc: "optional additional set(s) of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Set names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Objects", Doc: "map of objects to apply parameters to -- the key is the name of the Sheet for each object, e.g.,"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) - var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the basic interface for a pathway which connects two layers.\nName is set automatically to: SendLay().Name() + \"To\" + RecvLay().Name()", Methods: []types.Method{{Name: "Init", Doc: "Init MUST be called to initialize the path's pointer to itself as an emer.Path\nwhich enables the proper interface methods to be called.", Args: []string{"path"}}, {Name: "SendLay", Doc: "SendLay returns the sending layer for this pathway", Returns: []string{"Layer"}}, {Name: "RecvLay", Doc: "RecvLay returns the receiving layer for this pathway", Returns: []string{"Layer"}}, {Name: "Pattern", Doc: "Pattern returns the pattern of connectivity for interconnecting the layers", Returns: []string{"Pattern"}}, {Name: "SetPattern", Doc: "SetPattern sets the pattern of connectivity for interconnecting the layers.\nReturns Path so it can be chained to set other properties too", Args: []string{"pat"}, Returns: []string{"Path"}}, {Name: "Type", Doc: "Type returns the functional type of pathway according to PathType (extensible in\nmore specialized algorithms)", Returns: []string{"PathType"}}, {Name: "SetType", Doc: "SetType sets the functional type of pathway according to PathType\nReturns Path so it can be chained to set other properties too", Args: []string{"typ"}, Returns: []string{"Path"}}, {Name: "PathTypeName", Doc: "PathTypeName returns the string rep of functional type of pathway\naccording to PathType (extensible in more specialized algorithms, by\nredefining this method as needed).", Returns: []string{"string"}}, {Name: "AddClass", Doc: "AddClass adds a CSS-style class name(s) for this path,\nensuring that it is not a duplicate, and properly space separated.\nReturns Path so it can be chained to set other properties too", Args: []string{"cls"}, Returns: []string{"Path"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "IsOff", Doc: "IsOff returns true if pathway or either send or recv layer has been turned Off.\nUseful for experimentation", Returns: []string{"bool"}}, {Name: "SetOff", Doc: "SetOff sets the pathway Off status (i.e., lesioned). Careful: Layer.SetOff(true) will\nreactivate that layer's pathways, so pathway-level lesioning should always be called\nafter layer-level lesioning.", Args: []string{"off"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "Syn1DNum", Doc: "Syn1DNum returns the number of synapses for this path as a 1D array.\nThis is the max idx for SynVal1D and the number of vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynVal1D", Doc: "SynVal1D returns value of given variable index (from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse, using the natural ordering\nof the synapses (sender based for Leabra),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue", Doc: "SynVal returns value of given variable name on the synapse\nbetween given send, recv unit indexes (1D, flat indexes).\nReturns math32.NaN() for access errors.", Args: []string{"varNm", "sidx", "ridx"}, Returns: []string{"float32"}}, {Name: "SetSynValue", Doc: "SetSynVal sets value of given variable name on the synapse\nbetween given send, recv unit indexes (1D, flat indexes).\nTypically only supports base synapse variables and is not extended\nfor derived types.\nReturns error for access errors.", Args: []string{"varNm", "sidx", "ridx", "val"}, Returns: []string{"error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for all Path parameters"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Projection that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Projection", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway from the receiver-side perspective\nin a JSON text format. We build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway from the receiver-side perspective\nin a JSON text format. This is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path decoded values", Args: []string{"pw"}, Returns: []string{"error"}}, {Name: "Build", Doc: "Build constructs the full connectivity among the layers as specified in this pathway.", Returns: []string{"error"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Paths", IDName: "paths", Doc: "Paths is a slice of pathways"}) diff --git a/go.mod b/go.mod index 899f428a..befefded 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/emer/emergent/v2 go 1.22 require ( - cogentcore.org/core v0.3.2-0.20240808234819-3fb8d2cbe011 + cogentcore.org/core v0.3.2-0.20240809164801-7009c15e5272 github.com/BurntSushi/toml v1.3.2 github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 github.com/iancoleman/strcase v0.3.0 @@ -19,7 +19,7 @@ require ( github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/chewxy/math32 v1.10.1 // indirect - github.com/cogentcore/webgpu v0.0.0-20240808191559-eb41fd7f1eac // indirect + github.com/cogentcore/webgpu v0.0.0-20240809053032-52c74d0c674c // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/dlclark/regexp2 v1.11.0 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect diff --git a/go.sum b/go.sum index b2a35f65..1c087027 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,5 @@ -cogentcore.org/core v0.3.2-0.20240808234819-3fb8d2cbe011 h1:Zg1BELMsAL4mLdraAqRqrTx+7tqJYAMqngW92iyndqs= -cogentcore.org/core v0.3.2-0.20240808234819-3fb8d2cbe011/go.mod h1:3f1LPfTarLmmtfz8pJ6stWNIxj+I7NIxr4xwOXKLpfk= +cogentcore.org/core v0.3.2-0.20240809164801-7009c15e5272 h1:wdM6lmuuqOmC/ikIr+lWj7rTilGVZ7KdbH/JeCL5d5w= +cogentcore.org/core v0.3.2-0.20240809164801-7009c15e5272/go.mod h1:dyArrrZBlucGMUCMl8dAX/22nY32Bv7Q16vwE0K/daY= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4 h1:6lx9xzJAhdjq0LvVfbITeC3IH9Fzvo1aBahyPu2FuG8= github.com/Bios-Marcel/wastebasket v0.0.4-0.20240213135800-f26f1ae0a7c4/go.mod h1:FChzXi1izqzdPb6BiNZmcZLGyTYiT61iGx9Rxx9GNeI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -24,8 +24,8 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/chewxy/math32 v1.10.1 h1:LFpeY0SLJXeaiej/eIp2L40VYfscTvKh/FSEZ68uMkU= github.com/chewxy/math32 v1.10.1/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs= -github.com/cogentcore/webgpu v0.0.0-20240808191559-eb41fd7f1eac h1:idqcxylMuWPmyQ3FIz2rtt7zYKlqxDLyEE8NQoCydgk= -github.com/cogentcore/webgpu v0.0.0-20240808191559-eb41fd7f1eac/go.mod h1:ciqaxChrmRRMU1SnI5OE12Cn3QWvOKO+e5nSy+N9S1o= +github.com/cogentcore/webgpu v0.0.0-20240809053032-52c74d0c674c h1:2z7O6f9N0WDOKlz4Pjf0VlGgwGrqdmM9PmK2Ug9h1yQ= +github.com/cogentcore/webgpu v0.0.0-20240809053032-52c74d0c674c/go.mod h1:ciqaxChrmRRMU1SnI5OE12Cn3QWvOKO+e5nSy+N9S1o= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= diff --git a/params/README.md b/params/README.md index 7ac4e21b..2f912f12 100644 --- a/params/README.md +++ b/params/README.md @@ -4,51 +4,40 @@ See [Wiki Params](https://github.com/emer/emergent/wiki/Params) page for detaile Package `params` provides general-purpose parameter management functionality for organizing multiple sets of parameters efficiently, and basic IO for saving / loading from JSON files and generating Go code to embed into applications, and a basic GUI for viewing and editing. -IMPORTANT: as of July, 2023, `params` has been deprecated in favor of [netparams](../netparams) which is focused only on `Network` params, for which the styling and structure of this params system makes the most sense. Use [econfig](../econfig) for setting params on standard struct objects such as Config structs. - -The main overall unit that is generally operated upon at run-time is the `params.Set`, which is a collection of `params.Sheet`'s (akin to CSS style sheets) that constitute a coherent set of parameters. Here's the structure: +The main overall unit that is generally operated upon at run-time is the `params.Sheet`, (similar to CSS style sheets) that constitute a coherent set of parameters. Here's the structure: ``` Sets { - Set: "Base" { - Sheets { - "Network": { - Sel: "Layer" { - Params: { - "Layer.Inhib.Layer.Gi": "1.1", - ... - } - }, - Sel: ".Back" { - Params: { - "Path.PathScale.Rel": "0.2", - ... - } - } - }, - "Sim": { + "Base" { + Sel: "Layer" { + Params: { + "Layer.Inhib.Layer.Gi": "1.1", + ... + } + }, + Sel: ".Back" { + Params: { + "Path.PathScale.Rel": "0.2", ... } } }, - Set: "Option1" { + "Option1" { ... } } ``` -A good strategy is to have a "Base" Set that has all the best parameters so far, and then other sets can modify specific params relative to that one. Order of application is critical, as subsequent params applications overwrite earlier ones, and the typical order is: +A good strategy is to have a "Base" Sheet that has all the best parameters so far, and then other sets can modify specific params relative to that one. Order of application is critical, as subsequent params applications overwrite earlier ones, and the typical order is: * `Defaults()` method called that establishes the hard-coded default parameters. -* Then apply "Base" `params.Set` for any changes relative to those. -* Then optionally apply one or more additional `params.Set`'s with current experimental parameters. +* Then apply "Base" sheet for any changes relative to those. +* Then optionally apply one or more additional sheets with current experimental parameters. Critically, all of this is entirely up to the particular model program(s) to determine and control -- this package just provides the basic data structures for holding all of the parameters, and the IO / and Apply infrastructure. -Within a params.Set, multiple different params.Sheet's can be organized, with each CSS-style sheet achieving a relatively complete parameter styling of a given element of the overal model, e.g., "Network", "Sim", "Env". Or Network could be further broken down into "Learn" vs. "Act" etc, or according to different brain areas ("Hippo", "PFC", "BG", etc). Again, this is entirely at the discretion of the modeler and must be performed under explict program control, especially because order is so critical. - -Each `params.Sheet` consists of a collection of params.Sel elements which actually finally contain the parameters. The `Sel` field specifies a CSS-style selector determining over what scope the parameters should be applied: +Each `params.Sheet` consists of a collection of `params.Sel` elements which actually finally contain the parameters. The `Sel` field specifies a CSS-style selector determining over what scope the parameters should be applied: * `Type` (no prefix) = name of a type -- anything having this type name will get these params. @@ -58,7 +47,7 @@ Each `params.Sheet` consists of a collection of params.Sel elements which actual The order of application within a given Sheet is also critical -- typically put the most general Type params first, then .Class, then the most specific #Name cases, to achieve within a given Sheet the same logic of establishing Base params for all types and then more specific overrides for special cases (e.g., an overall learning rate that appplies across all pathways, but maybe a faster or slower one for a .Class or specific #Name'd pathway). -There is a params.Styler interface with methods that any Go type can implement to provide these different labels. The emer.Network, .Layer, and .Path interfaces each implement this interface. +There is a `params.Styler` interface with methods that any Go type can implement to provide these different labels. The emer.Network, .Layer, and .Path interfaces each implement this interface. Otherwise, the Apply method will just directly apply params to a given struct type if it does not implement the Styler interface. diff --git a/params/apply.go b/params/apply.go index d185476b..e9f002c3 100644 --- a/params/apply.go +++ b/params/apply.go @@ -46,9 +46,9 @@ func (pr *Params) Path(path string) string { // parameter at given path, and returns error if so). func (pr *Params) Apply(obj any, setMsg bool) error { objNm := "" - if stylr, has := obj.(Styler); has { - objNm = stylr.Name() - if styob, has := obj.(StylerObj); has { + if styler, has := obj.(Styler); has { + objNm = styler.StyleName() + if styob, has := obj.(StylerObject); has { obj = styob.Object() } } else if lblr, has := obj.(labels.Labeler); has { @@ -104,9 +104,9 @@ func (pr *Hypers) Path(path string) string { // parameter at given path, and returns error if so). func (pr *Hypers) Apply(obj any, setMsg bool) error { objNm := "" - if stylr, has := obj.(Styler); has { - objNm = stylr.Name() - if styob, has := obj.(StylerObj); has { + if styler, has := obj.(Styler); has { + objNm = styler.StyleName() + if styob, has := obj.(StylerObject); has { obj = styob.Object() } } else if lblr, has := obj.(labels.Labeler); has { @@ -162,15 +162,15 @@ func (ps *Sel) Apply(obj any, setMsg bool) (bool, error) { // TargetTypeMatch return true if target type applies to object func (ps *Sel) TargetTypeMatch(obj any) bool { trg := ps.Params.TargetType() - if stylr, has := obj.(Styler); has { - tnm := stylr.TypeName() + if styler, has := obj.(Styler); has { + tnm := styler.StyleType() if tnm == trg { return true } } trgh := ps.Hypers.TargetType() - if stylr, has := obj.(Styler); has { - tnm := stylr.TypeName() + if styler, has := obj.(Styler); has { + tnm := styler.StyleType() if tnm == trgh { return true } @@ -181,15 +181,15 @@ func (ps *Sel) TargetTypeMatch(obj any) bool { // SelMatch returns true if Sel selector matches the target object properties func (ps *Sel) SelMatch(obj any) bool { - stylr, has := obj.(Styler) + styler, has := obj.(Styler) if !has { return true // default match if no styler.. } - if styob, has := obj.(StylerObj); has { + if styob, has := obj.(StylerObject); has { obj = styob.Object() } gotyp := reflectx.NonPointerType(reflect.TypeOf(obj)).Name() - return SelMatch(ps.Sel, stylr.Name(), stylr.Class(), stylr.TypeName(), gotyp) + return SelMatch(ps.Sel, styler.StyleName(), styler.StyleClass(), styler.StyleType(), gotyp) } // SelMatch returns true if Sel selector matches the target object properties diff --git a/params/diff.go b/params/diff.go index a4c978b7..2939a6da 100644 --- a/params/diff.go +++ b/params/diff.go @@ -63,61 +63,13 @@ func (ps *Sets) DiffsFirst() string { } // DiffsWithin reports all the cases where the same param path is being set -// to different values within different sheets in given set -func (ps *Sets) DiffsWithin(setName string) string { - set, err := ps.SetByNameTry(setName) +// to different values within given sheet. +func (ps *Sets) DiffsWithin(sheetName string) string { + sht, err := ps.SheetByNameTry(sheetName) if err != nil { return err.Error() } - return set.DiffsWithin() -} - -///////////////////////////////////////////////////////// -// Set - -// Diffs reports all the cases where the same param path is being set -// to different values between this set and the other set. -func (ps *Set) Diffs(ops *Set, name, otherName string) string { - pd := "" - for snm, sht := range ps.Sheets { - for osnm, osht := range ops.Sheets { - spd := sht.Diffs(osht, name+"."+snm, otherName+"."+osnm) - pd += spd - } - } - return pd -} - -// DiffsWithin reports all the cases where the same param path is being set -// to different values within different sheets -func (ps *Set) DiffsWithin() string { - return ps.Sheets.DiffsWithin() -} - -///////////////////////////////////////////////////////// -// Sheets - -// DiffsWithin reports all the cases where the same param path is being set -// to different values within different sheets -func (ps *Sheets) DiffsWithin() string { - pd := "Within Sheet Diffs (Same param path set differentially within a Sheet):\n\n" - for snm, sht := range *ps { - spd := sht.DiffsWithin(snm) - pd += spd - } - got := false - for snm, sht := range *ps { - for osnm, osht := range *ps { - spd := sht.Diffs(osht, snm, osnm) - if !got { - pd += "////////////////////////////////////////////////////////////////////////////////////\n" - pd += "Between Sheet Diffs (Same param path set differentially between two Sheets):\n\n" - got = true - } - pd += spd - } - } - return pd + return sht.DiffsWithin(sheetName) } ///////////////////////////////////////////////////////// @@ -138,13 +90,13 @@ func (ps *Sheet) Diffs(ops *Sheet, setNm1, setNm2 string) string { // DiffsWithin reports all the cases where the same param path is being set // to different values within different Sel's in this Sheet. -func (ps *Sheet) DiffsWithin(shtNm string) string { +func (ps *Sheet) DiffsWithin(sheetName string) string { pd := "" sz := len(*ps) for i, sel := range *ps { for j := i + 1; j < sz; j++ { osel := (*ps)[j] - spd := sel.Params.Diffs(&sel.Params, shtNm+":"+sel.Sel, shtNm+":"+osel.Sel) + spd := sel.Params.Diffs(&sel.Params, sheetName+":"+sel.Sel, sheetName+":"+osel.Sel) pd += spd } } diff --git a/params/flex.go b/params/flex.go index fd24535e..9cc7a2ea 100644 --- a/params/flex.go +++ b/params/flex.go @@ -15,8 +15,8 @@ import ( ) // FlexVal is a specific flexible value for the Flex parameter map -// that implements the StylerObj interface for CSS-style selection logic. -// The field names are abbreviated because full names are used in StylerObj. +// that implements the StylerObject interface for CSS-style selection logic. +// The field names are abbreviated because full names are used in StylerObject. type FlexVal struct { // name of this specific object, matches #Name selections Nm string @@ -34,11 +34,11 @@ type FlexVal struct { History HistoryImpl `table:"-"` } -func (fv *FlexVal) TypeName() string { +func (fv *FlexVal) StyleType() string { return fv.Type } -func (fv *FlexVal) Class() string { +func (fv *FlexVal) StyleClass() string { return fv.Cls } @@ -85,14 +85,14 @@ func (fl *Flex) Make() { *fl = make(Flex) } -func (fl *Flex) TypeName() string { // note: assuming all same type for this purpose +func (fl *Flex) StyleType() string { // note: assuming all same type for this purpose for _, fv := range *fl { - return fv.TypeName() + return fv.StyleType() } return "Flex" } -func (fl *Flex) Class() string { +func (fl *Flex) StyleClass() string { return "" } diff --git a/params/io.go b/params/io.go index ac1526f5..65b0f034 100644 --- a/params/io.go +++ b/params/io.go @@ -306,118 +306,6 @@ func (pr *Sheet) SaveGoCode(filename core.Filename) error { return nil } -///////////////////////////////////////////////////////// -// Sheets - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Sheets) OpenJSON(filename core.Filename) error { - *pr = make(Sheets) // reset - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Sheets) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Sheets) OpenTOML(filename core.Filename) error { - *pr = make(Sheets) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Sheets) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Sheets) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("params.Sheets{\n")) - depth++ - nms := make([]string, len(*pr)) // alpha-sort names for consistent output - ctr := 0 - for nm := range *pr { - nms[ctr] = nm - ctr++ - } - sort.StringSlice(nms).Sort() - for _, nm := range nms { - pv := (*pr)[nm] - w.Write(indent.TabBytes(depth)) - w.Write([]byte(fmt.Sprintf("%q: &", nm))) - pv.WriteGoCode(w, depth) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("}")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Sheets) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Sheets) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - WriteGoPrelude(fp, "SavedParamsSheets") - pr.WriteGoCode(fp, 0) - return nil -} - -///////////////////////////////////////////////////////// -// Set - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Set) OpenJSON(filename core.Filename) error { - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Set) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Set) OpenTOML(filename core.Filename) error { - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Set) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Set) WriteGoCode(w io.Writer, depth int, name string) { - w.Write([]byte(fmt.Sprintf("Name: %q, Desc: %q, Sheets: ", name, pr.Desc))) - pr.Sheets.WriteGoCode(w, depth) -} - ///////////////////////////////////////////////////////// // Sets @@ -455,10 +343,10 @@ func (pr *Sets) SaveTOML(filename core.Filename) error { func (pr *Sets) WriteGoCode(w io.Writer, depth int) { w.Write([]byte("params.Sets{\n")) depth++ - for nm, st := range *pr { + for _, st := range *pr { w.Write(indent.TabBytes(depth)) w.Write([]byte("{")) - st.WriteGoCode(w, depth, nm) + st.WriteGoCode(w, depth) w.Write([]byte("},\n")) } depth-- @@ -485,475 +373,3 @@ func (pr *Sets) SaveGoCode(filename core.Filename) error { pr.WriteGoCode(fp, 0) return nil } - -/* -var ParamsProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - }, -} - -var HypersProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - }, -} - -var SelProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - }, -} - -var SheetProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - }, -} - -var SheetsProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - {"sep-diffs", tree.BlankProp{}}, - {"DiffsWithin", tree.Props{ - "desc": "reports where the same param path is being set to different values within this set (both within the same Sheet and betwen sheets)", - "icon": "search", - "show-return": true, - }}, - }, -} - -var SetProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"sep-diffs", tree.BlankProp{}}, - {"DiffsWithin", tree.Props{ - "desc": "reports where the same param path is being set to different values within this set (both within the same Sheet and betwen sheets)", - "icon": "search", - "show-return": true, - }}, - }, -} - -var SetsProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - {"sep-diffs", tree.BlankProp{}}, - {"DiffsAll", tree.Props{ - "desc": "between all sets, reports where the same param path is being set to different values", - "icon": "search", - "show-return": true, - }}, - {"DiffsFirst", tree.Props{ - "desc": "between first set (e.g., the Base set) and rest of sets, reports where the same param path is being set to different values", - "icon": "search", - "show-return": true, - }}, - {"DiffsWithin", tree.Props{ - "desc": "reports all the cases where the same param path is being set to different values within different sheets in given set", - "icon": "search", - "show-return": true, - "Args": tree.PropSlice{ - {"Set Name", tree.Props{}}, - }, - }}, - }, -} -*/ diff --git a/params/params.go b/params/params.go index ddd2b26f..ab7247ca 100644 --- a/params/params.go +++ b/params/params.go @@ -169,183 +169,56 @@ func (sh *Sheet) ParamValue(sel, param string) (string, error) { /////////////////////////////////////////////////////////////////////// -// Sheets is a map of named sheets -- used in the Set -type Sheets map[string]*Sheet //types:add - -/////////////////////////////////////////////////////////////////////// - -// Set is a collection of Sheets that constitute a coherent set of parameters -- -// a particular specific configuration of parameters, which the user selects to use. -// The Set name is stored in the Sets map from which it is typically accessed. -// A good strategy is to have a "Base" set that has all the best parameters so far, -// and then other sets can modify relative to that one. It is up to the Sim code to -// apply parameter sets in whatever order is desired. -// -// Within a params.Set, multiple different params.Sheets can be organized, -// with each CSS-style sheet achieving a relatively complete parameter styling -// of a given element of the overal model, e.g., "Network", "Sim", "Env". -// Or Network could be further broken down into "Learn" vs. "Act" etc, -// or according to different brain areas ("Hippo", "PFC", "BG", etc). -// Again, this is entirely at the discretion of the modeler and must be -// performed under explict program control, especially because order is so critical. -// -// Note that there is NO deterministic ordering of the Sheets due to the use of -// a Go map structure, which specifically randomizes order, so simply iterating over them -// and applying may produce unexpected results -- it is better to lookup by name. -type Set struct { //types:add - - // description of this param set -- when should it be used? how is it different from the other sets? - Desc string `width:"60"` - - // Sheet's grouped according to their target and / or function. For example, - // "Network" for all the network params (or "Learn" vs. "Act" for more fine-grained), and "Sim" for overall simulation control parameters, "Env" for environment parameters, etc. It is completely up to your program to lookup these names and apply them as appropriate. - Sheets Sheets -} - -// SheetByNameTry tries to find given sheet by name, and returns error -// if not found (also logs the error) -func (ps *Set) SheetByNameTry(name string) (*Sheet, error) { - psht, ok := ps.Sheets[name] - if !ok { - err := fmt.Errorf("params.Set: Sheet named %v not found", name) - log.Println(err) - return nil, err - } - return psht, nil -} - -// SheetByName finds given sheet by name -- returns nil if not found. -// Use this when sure the sheet exists -- otherwise use Try version. -func (ps *Set) SheetByName(name string) *Sheet { - return ps.Sheets[name] -} - -// ValidateSheets ensures that the sheet names are among those listed -- returns -// error message for any that are not. Helps catch typos and makes sure params are -// applied properly. Automatically logs errors. -func (ps *Set) ValidateSheets(valids []string) error { - var invalids []string - for nm := range ps.Sheets { - got := false - for _, vl := range valids { - if nm == vl { - got = true - break - } - } - if !got { - invalids = append(invalids, nm) - } - } - if len(invalids) > 0 { - err := fmt.Errorf("params.Set: Invalid sheet names: %v", invalids) - log.Println(err) - return err - } - return nil -} - -// SetFloat sets the value of given parameter, in selection sel, -// in sheet -func (ps *Set) SetFloat(sheet, sel, param string, val float64) error { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return err - } - return sp.SetFloat(sel, param, val) -} - -// SetString sets the value of given parameter, in selection sel, -// in sheet -func (ps *Set) SetString(sheet, sel, param string, val string) error { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return err - } - return sp.SetString(sel, param, val) -} - -// ParamVal returns the value of given parameter, in selection sel, -// in sheet -func (ps *Set) ParamValue(sheet, sel, param string) (string, error) { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return "", err - } - return sp.ParamValue(sel, param) -} - -/////////////////////////////////////////////////////////////////////// - -// Sets is a collection of Set's that can be chosen among +// Sets is a collection of Sheets that can be chosen among // depending on different desired configurations etc. Thus, each Set // represents a collection of different possible specific configurations, // and different such configurations can be chosen by name to apply as desired. -type Sets map[string]*Set //types:add +type Sets map[string]*Sheet //git:add -// SetByNameTry tries to find given set by name, and returns error +// SheetByNameTry tries to find given set by name, and returns error // if not found (also logs the error) -func (ps *Sets) SetByNameTry(name string) (*Set, error) { +func (ps *Sets) SheetByNameTry(name string) (*Sheet, error) { st, ok := (*ps)[name] if ok { return st, nil } - err := fmt.Errorf("params.Sets: Param Set named %s not found", name) + err := fmt.Errorf("params.Sets: Param Sheet named %s not found", name) log.Println(err) return nil, err } -// SetByName returns given set by name -- for use when confident +// SheetByName returns given sheet by name -- for use when confident // that it exists, as a nil will return if not found with no error -func (ps *Sets) SetByName(name string) *Set { +func (ps *Sets) SheetByName(name string) *Sheet { return (*ps)[name] } -// ValidateSheets ensures that the sheet names are among those listed -- returns -// error message for any that are not. Helps catch typos and makes sure params are -// applied properly. Automatically logs errors. -func (ps *Sets) ValidateSheets(valids []string) error { - var err error - for _, st := range *ps { - er := st.ValidateSheets(valids) - if er != nil { - err = er - } - } - return err -} - -// // ElemLabel satisfies the core.SliceLabeler interface to provide labels for slice elements -// func (ps *Sets) ElemLabel(idx int) string { -// return (*ps)[idx].Name -// } - // SetFloat sets the value of given parameter, in selection sel, // in sheet and set. -func (ps *Sets) SetFloat(set, sheet, sel, param string, val float64) error { - sp, err := ps.SetByNameTry(set) +func (ps *Sets) SetFloat(sheet, sel, param string, val float64) error { + sp, err := ps.SheetByNameTry(sheet) if err != nil { return err } - return sp.SetFloat(sheet, sel, param, val) + return sp.SetFloat(sel, param, val) } // SetString sets the value of given parameter, in selection sel, // in sheet and set. Returns error if anything is not found. -func (ps *Sets) SetString(set, sheet, sel, param string, val string) error { - sp, err := ps.SetByNameTry(set) +func (ps *Sets) SetString(sheet, sel, param string, val string) error { + sp, err := ps.SheetByNameTry(sheet) if err != nil { return err } - return sp.SetString(sheet, sel, param, val) + return sp.SetString(sel, param, val) } // ParamVal returns the value of given parameter, in selection sel, // in sheet and set. Returns error if anything is not found. -func (ps *Sets) ParamValue(set, sheet, sel, param string) (string, error) { - sp, err := ps.SetByNameTry(set) +func (ps *Sets) ParamValue(sheet, sel, param string) (string, error) { + sp, err := ps.SheetByNameTry(sheet) if err != nil { return "", err } - return sp.ParamValue(sheet, sel, param) + return sp.ParamValue(sel, param) } diff --git a/params/params_test.go b/params/params_test.go index f48c34d4..b40ca7d5 100644 --- a/params/params_test.go +++ b/params/params_test.go @@ -12,137 +12,95 @@ import ( ) var paramSets = Sets{ - "Base": {Desc: "these are the best params", Sheets: Sheets{ - "Network": &Sheet{ - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: Params{ - "Path.Learn.Norm.On": "true", - "Path.Learn.Momentum.On": "true", - "Path.Learn.WtBal.On": "false", - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, - }, + "Base": { + {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", + Params: Params{ + "Path.Learn.Norm.On": "true", + "Path.Learn.Momentum.On": "true", + "Path.Learn.WtBal.On": "false", + }}, + {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", + Params: Params{ + "Layer.Inhib.Layer.Gi": "1.8", + }, + Hypers: Hypers{ + "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, }, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "Sim": &Sheet{ // sim params apply to sim object - {Sel: "Sim", Desc: "best params always finish in this time", - Params: Params{ - "Sim.MaxEpcs": "50", - }}, - }, - }}, - "DefaultInhib": {Desc: "output uses default inhib instead of lower", Sheets: Sheets{ - "Network": &Sheet{ - {Sel: "#Output", Desc: "go back to default", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }}, - }, - "Sim": &Sheet{ // sim params apply to sim object - {Sel: "Sim", Desc: "takes longer -- generally doesn't finish..", - Params: Params{ - "Sim.MaxEpcs": "100", - }, Hypers: Hypers{ - "Sim.MaxEps": {"Val": "90", "Min": "40", "Max": "2000"}, - }}, - }, - }}, - "NoMomentum": {Desc: "no momentum or normalization", Sheets: Sheets{ - "Network": &Sheet{ - {Sel: "Path", Desc: "no norm or momentum", - Params: Params{ - "Path.Learn.Norm.On": "false", - "Path.Learn.Momentum.On": "false", - }}, - }, - }}, - "WtBalOn": {Desc: "try with weight bal on", Sheets: Sheets{ - "Network": &Sheet{ - {Sel: "Path", Desc: "weight bal on", - Params: Params{ - "Path.Learn.WtBal.On": "true", - }}, }, - }}, + {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", + Params: Params{ + "Layer.Inhib.Layer.Gi": "1.4", + }}, + {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + Params: Params{ + "Path.WtScale.Rel": "0.2", + }}, + }, + "DefaultInhib": { + {Sel: "#Output", Desc: "go back to default", + Params: Params{ + "Layer.Inhib.Layer.Gi": "1.8", + }}, + }, + "NoMomentum": { + {Sel: "Path", Desc: "no norm or momentum", + Params: Params{ + "Path.Learn.Norm.On": "false", + "Path.Learn.Momentum.On": "false", + }}, + }, + "WtBalOn": { + {Sel: "Path", Desc: "weight bal on", + Params: Params{ + "Path.Learn.WtBal.On": "true", + }}, + }, } var trgCode = `params.Sets{ - {Desc: "these are the best params", Sheets: params.Sheets{ - "Network": ¶ms.Sheet{ - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: params.Params{ - "Path.Learn.Momentum.On": "true", - "Path.Learn.Norm.On": "true", - "Path.Learn.WtBal.On": "false", - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }params.Hypers{ - "Layer.Inhib.Layer.Gi": map["Min":"0.5" "StdDev":"0.1"], - }}, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: params.Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "Sim": ¶ms.Sheet{ - {Sel: "Sim", Desc: "best params always finish in this time", - Params: params.Params{ - "Sim.MaxEpcs": "50", - }}, - }, - }}, - {Desc: "output uses default inhib instead of lower", Sheets: params.Sheets{ - "Network": ¶ms.Sheet{ - {Sel: "#Output", Desc: "go back to default", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }}, - }, - "Sim": ¶ms.Sheet{ - {Sel: "Sim", Desc: "takes longer -- generally doesn't finish..", - Params: params.Params{ - "Sim.MaxEpcs": "100", - }params.Hypers{ - "Sim.MaxEps": map["Max":"2000" "Min":"40" "Val":"90"], - }}, - }, - }}, - {Desc: "no momentum or normalization", Sheets: params.Sheets{ - "Network": ¶ms.Sheet{ - {Sel: "Path", Desc: "no norm or momentum", - Params: params.Params{ - "Path.Learn.Momentum.On": "false", - "Path.Learn.Norm.On": "false", - }}, - }, - }}, - {Desc: "try with weight bal on", Sheets: params.Sheets{ - "Network": ¶ms.Sheet{ - {Sel: "Path", Desc: "weight bal on", - Params: params.Params{ - "Path.Learn.WtBal.On": "true", - }}, + "Base": { + {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", + Params: params.Params{ + "Path.Learn.Norm.On": "true", + "Path.Learn.Momentum.On": "true", + "Path.Learn.WtBal.On": "false", + }}, + {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", + Params: params.Params{ + "Layer.Inhib.Layer.Gi": "1.8", + }, + Hypers: params.Hypers{ + "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, + }, }, - }}, + {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", + Params: params.Params{ + "Layer.Inhib.Layer.Gi": "1.4", + }}, + {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + Params: params.Params{ + "Path.WtScale.Rel": "0.2", + }}, + }, + "DefaultInhib": { + {Sel: "#Output", Desc: "go back to default", + Params: params.Params{ + "Layer.Inhib.Layer.Gi": "1.8", + }}, + }, + "NoMomentum": { + {Sel: "Path", Desc: "no norm or momentum", + Params: params.Params{ + "Path.Learn.Norm.On": "false", + "Path.Learn.Momentum.On": "false", + }}, + }, + "WtBalOn": { + {Sel: "Path", Desc: "weight bal on", + Params: params.Params{ + "Path.Learn.WtBal.On": "true", + }}, + }, } ` @@ -160,7 +118,7 @@ func TestParamSetsWriteGo(t *testing.T) { } func TestParamSetsSet(t *testing.T) { - cval, err := paramSets.ParamValue("Base", "Network", "Path", "Path.Learn.WtBal.On") + cval, err := paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") if err != nil { t.Error(err) } @@ -168,34 +126,30 @@ func TestParamSetsSet(t *testing.T) { if cval != "false" { t.Errorf("value should have been false: %s\n", cval) } - err = paramSets.SetString("Base", "Network", "Path", "Path.Learn.WtBal.On", "true") + err = paramSets.SetString("Base", "Path", "Path.Learn.WtBal.On", "true") if err != nil { t.Error(err) } - cval, err = paramSets.ParamValue("Base", "Network", "Path", "Path.Learn.WtBal.On") + cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") // fmt.Printf("new value: %s\n", cval) if cval != "true" { t.Errorf("value should have been true: %s\n", cval) } - err = paramSets.SetFloat("Base", "Network", "Path", "Path.Learn.WtBal.On", 5.1) + err = paramSets.SetFloat("Base", "Path", "Path.Learn.WtBal.On", 5.1) if err != nil { t.Error(err) } - cval, err = paramSets.ParamValue("Base", "Network", "Path", "Path.Learn.WtBal.On") + cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") // fmt.Printf("new value: %s\n", cval) if cval != "5.1" { t.Errorf("value should have been 5.1: %s\n", cval) } - cval, err = paramSets.ParamValue("Basre", "Network2", "Path", "Path.Learn.WtBal.On") + cval, err = paramSets.ParamValue("Basre", "Path", "Path.Learn.WtBal.On") if err == nil { t.Errorf("Should have had an error") } // fmt.Printf("error: %s\n", err) - cval, err = paramSets.ParamValue("Base", "Network2", "Path", "Path.Learn.WtBal.On") - if err == nil { - t.Errorf("Should have had an error") - } - cval, err = paramSets.ParamValue("Base", "Network", "Paths", "Path.Learn.WtBal.On") + cval, err = paramSets.ParamValue("Base", "Paths", "Path.Learn.WtBal.On") if err == nil { t.Errorf("Should have had an error") } @@ -329,7 +283,7 @@ func TestFlexHypers(t *testing.T) { FlexVal{Nm: "Hidden2", Type: "Layer", Cls: "Hidden", Obj: Hypers{}}, FlexVal{Nm: "Output", Type: "Layer", Cls: "Target", Obj: Hypers{}}, }) - basenet := paramSets.SetByName("Base").Sheets["Network"] + basenet := paramSets["Base"] hypers.ApplySheet(basenet, false) dfs := hypers.JSONString() diff --git a/params/styler.go b/params/styler.go index adf2091b..ed50d63f 100644 --- a/params/styler.go +++ b/params/styler.go @@ -12,27 +12,29 @@ import "strings" // Adding Set versions of Name and Class methods is a good idea but not // needed for this interface, so they are not included here. type Styler interface { - // TypeName returns the name of this type. CSS Sel selector with no prefix - // operates on type name. This type is used *in addition* to the actual - // Go type name of the object, and is a kind of type-category (e.g., Layer - // or Path in emergent network objects) - TypeName() string - - // Class returns the space-separated list of class selectors (tags). + // StyleType returns the name of this type for CSS-style matching. + // This is used for CSS Sel selector with no prefix. + // This type is used *in addition* to the actual Go type name + // of the object, and is a kind of type-category (e.g., Layer + // or Path in emergent network objects). + StyleType() string + + // StyleClass returns the space-separated list of class selectors (tags). // Parameters with a . prefix target class tags. - // Do NOT include the. in the Class tags on Styler objects however - // -- those are only in the Sel selector on the params.Sel. - Class() string + // Do NOT include the . in the Class tags on Styler objects; + // The . is only used in the Sel selector on the params.Sel. + StyleClass() string - // Name returns the name of this object. + // StyleName returns the name of this object. // Parameters with a # prefix target object names, which are typically - // unique. Note, do not include the # prefix in the Styler name. - Name() string + // unique. Note, do not include the # prefix in the actual object name, + // only in the Sel selector on params.Sel. + StyleName() string } -// The params.StylerObj interface extends Styler to include an arbitary +// The params.StylerObject interface extends Styler to include an arbitary // function to access the underlying object type. -type StylerObj interface { +type StylerObject interface { Styler // Object returns the object that will have its field values set by diff --git a/params/tweak_test.go b/params/tweak_test.go index 075796b4..08251009 100644 --- a/params/tweak_test.go +++ b/params/tweak_test.go @@ -12,40 +12,38 @@ import ( ) var tweakSets = Sets{ - "Base": {Desc: "these are the best params", Sheets: Sheets{ - "Network": &Sheet{ - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: Params{ - "Path.Learn.LRate": "0.02", - "Path.Learn.Momentum": "0.9", - }, - Hypers: Hypers{ - "Path.Learn.LRate": {"Tweak": "log"}, - "Path.Learn.Momentum": {"Tweak": "incr"}, - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Tweak": "[1.75, 1.85]"}, - }}, - {Sel: "#Hidden", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }, - Hypers: Hypers{ - "Layer.Inhib.Layer.Gi": {"Tweak": "incr"}, - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: Params{ - "Path.WtScale.Rel": "0.2", - }, - Hypers: Hypers{ - "Path.WtScale.Rel": {"Tweak": "log"}, - }}, - }, - }}, + "Base": { + {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", + Params: Params{ + "Path.Learn.LRate": "0.02", + "Path.Learn.Momentum": "0.9", + }, + Hypers: Hypers{ + "Path.Learn.LRate": {"Tweak": "log"}, + "Path.Learn.Momentum": {"Tweak": "incr"}, + }}, + {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", + Params: Params{ + "Layer.Inhib.Layer.Gi": "1.8", + }, + Hypers: Hypers{ + "Layer.Inhib.Layer.Gi": {"Tweak": "[1.75, 1.85]"}, + }}, + {Sel: "#Hidden", Desc: "output definitely needs lower inhib -- true for smaller layers in general", + Params: Params{ + "Layer.Inhib.Layer.Gi": "1.4", + }, + Hypers: Hypers{ + "Layer.Inhib.Layer.Gi": {"Tweak": "incr"}, + }}, + {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", + Params: Params{ + "Path.WtScale.Rel": "0.2", + }, + Hypers: Hypers{ + "Path.WtScale.Rel": {"Tweak": "log"}, + }}, + }, } func TestTweak(t *testing.T) { @@ -73,8 +71,7 @@ func TestTweak(t *testing.T) { } } -var trgSearch = `[{"Param":"Layer.Inhib.Layer.Gi","Sel":{"Sel":"#Hidden","Desc":"output definitely needs lower inhib -- true for smaller layers in general","Params":{"Layer.Inhib.Layer.Gi":"1.4"},"Hypers":{"Layer.Inhib.Layer.Gi":{"Tweak":"incr"}}},"Search":[{"Name":"Hidden","Type":"Layer","Path":"Layer.Inhib.Layer.Gi","Start":1.4,"Values":[1.3,1.5]}]},{"Param":"Path.WtScale.Rel","Sel":{"Sel":".Back","Desc":"top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates","Params":{"Path.WtScale.Rel":"0.2"},"Hypers":{"Path.WtScale.Rel":{"Tweak":"log"}}},"Search":[{"Name":"HiddenToInput","Type":"Path","Path":"Path.WtScale.Rel","Start":0.2,"Values":[0.1,0.5]}]},{"Param":"Layer.Inhib.Layer.Gi","Sel":{"Sel":"Layer","Desc":"using default 1.8 inhib for all of network -- can explore","Params":{"Layer.Inhib.Layer.Gi":"1.8"},"Hypers":{"Layer.Inhib.Layer.Gi":{"Tweak":"[1.75, 1.85]"}}},"Search":[{"Name":"Input","Type":"Layer","Path":"Layer.Inhib.Layer.Gi","Start":1.8,"Values":[1.75,1.85]}]},{"Param":"Path.Learn.LRate","Sel":{"Sel":"Path","Desc":"norm and momentum on works better, but wt bal is not better for smaller nets","Params":{"Path.Learn.LRate":"0.02","Path.Learn.Momentum":"0.9"},"Hypers":{"Path.Learn.LRate":{"Tweak":"log"},"Path.Learn.Momentum":{"Tweak":"incr"}}},"Search":[{"Name":"HiddenToInput","Type":"Path","Path":"Path.Learn.LRate","Start":0.02,"Values":[0.01,0.05]},{"Name":"InputToHidden","Type":"Path","Path":"Path.Learn.LRate","Start":0.02,"Values":[0.01,0.05]}]},{"Param":"Path.Learn.Momentum","Sel":{"Sel":"Path","Desc":"norm and momentum on works better, but wt bal is not better for smaller nets","Params":{"Path.Learn.LRate":"0.02","Path.Learn.Momentum":"0.9"},"Hypers":{"Path.Learn.LRate":{"Tweak":"log"},"Path.Learn.Momentum":{"Tweak":"incr"}}},"Search":[{"Name":"HiddenToInput","Type":"Path","Path":"Path.Learn.Momentum","Start":0.9,"Values":[0.8,1]},{"Name":"InputToHidden","Type":"Path","Path":"Path.Learn.Momentum","Start":0.9,"Values":[0.8,1]}]}] -` +var trgSearch = "[\n\t{\n\t\t\"Param\": \"Layer.Inhib.Layer.Gi\",\n\t\t\"Sel\": {\n\t\t\t\"Sel\": \"#Hidden\",\n\t\t\t\"Desc\": \"output definitely needs lower inhib -- true for smaller layers in general\",\n\t\t\t\"Params\": {\n\t\t\t\t\"Layer.Inhib.Layer.Gi\": \"1.4\"\n\t\t\t},\n\t\t\t\"Hypers\": {\n\t\t\t\t\"Layer.Inhib.Layer.Gi\": {\n\t\t\t\t\t\"Tweak\": \"incr\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"Search\": [\n\t\t\t{\n\t\t\t\t\"Name\": \"Hidden\",\n\t\t\t\t\"Type\": \"Layer\",\n\t\t\t\t\"Path\": \"Layer.Inhib.Layer.Gi\",\n\t\t\t\t\"Start\": 1.4,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t1.3,\n\t\t\t\t\t1.5\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t},\n\t{\n\t\t\"Param\": \"Path.WtScale.Rel\",\n\t\t\"Sel\": {\n\t\t\t\"Sel\": \".Back\",\n\t\t\t\"Desc\": \"top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates\",\n\t\t\t\"Params\": {\n\t\t\t\t\"Path.WtScale.Rel\": \"0.2\"\n\t\t\t},\n\t\t\t\"Hypers\": {\n\t\t\t\t\"Path.WtScale.Rel\": {\n\t\t\t\t\t\"Tweak\": \"log\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"Search\": [\n\t\t\t{\n\t\t\t\t\"Name\": \"HiddenToInput\",\n\t\t\t\t\"Type\": \"Path\",\n\t\t\t\t\"Path\": \"Path.WtScale.Rel\",\n\t\t\t\t\"Start\": 0.2,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t0.1,\n\t\t\t\t\t0.5\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t},\n\t{\n\t\t\"Param\": \"Layer.Inhib.Layer.Gi\",\n\t\t\"Sel\": {\n\t\t\t\"Sel\": \"Layer\",\n\t\t\t\"Desc\": \"using default 1.8 inhib for all of network -- can explore\",\n\t\t\t\"Params\": {\n\t\t\t\t\"Layer.Inhib.Layer.Gi\": \"1.8\"\n\t\t\t},\n\t\t\t\"Hypers\": {\n\t\t\t\t\"Layer.Inhib.Layer.Gi\": {\n\t\t\t\t\t\"Tweak\": \"[1.75, 1.85]\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"Search\": [\n\t\t\t{\n\t\t\t\t\"Name\": \"Input\",\n\t\t\t\t\"Type\": \"Layer\",\n\t\t\t\t\"Path\": \"Layer.Inhib.Layer.Gi\",\n\t\t\t\t\"Start\": 1.8,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t1.75,\n\t\t\t\t\t1.85\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t},\n\t{\n\t\t\"Param\": \"Path.Learn.LRate\",\n\t\t\"Sel\": {\n\t\t\t\"Sel\": \"Path\",\n\t\t\t\"Desc\": \"norm and momentum on works better, but wt bal is not better for smaller nets\",\n\t\t\t\"Params\": {\n\t\t\t\t\"Path.Learn.LRate\": \"0.02\",\n\t\t\t\t\"Path.Learn.Momentum\": \"0.9\"\n\t\t\t},\n\t\t\t\"Hypers\": {\n\t\t\t\t\"Path.Learn.LRate\": {\n\t\t\t\t\t\"Tweak\": \"log\"\n\t\t\t\t},\n\t\t\t\t\"Path.Learn.Momentum\": {\n\t\t\t\t\t\"Tweak\": \"incr\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"Search\": [\n\t\t\t{\n\t\t\t\t\"Name\": \"HiddenToInput\",\n\t\t\t\t\"Type\": \"Path\",\n\t\t\t\t\"Path\": \"Path.Learn.LRate\",\n\t\t\t\t\"Start\": 0.02,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t0.01,\n\t\t\t\t\t0.05\n\t\t\t\t]\n\t\t\t},\n\t\t\t{\n\t\t\t\t\"Name\": \"InputToHidden\",\n\t\t\t\t\"Type\": \"Path\",\n\t\t\t\t\"Path\": \"Path.Learn.LRate\",\n\t\t\t\t\"Start\": 0.02,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t0.01,\n\t\t\t\t\t0.05\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t},\n\t{\n\t\t\"Param\": \"Path.Learn.Momentum\",\n\t\t\"Sel\": {\n\t\t\t\"Sel\": \"Path\",\n\t\t\t\"Desc\": \"norm and momentum on works better, but wt bal is not better for smaller nets\",\n\t\t\t\"Params\": {\n\t\t\t\t\"Path.Learn.LRate\": \"0.02\",\n\t\t\t\t\"Path.Learn.Momentum\": \"0.9\"\n\t\t\t},\n\t\t\t\"Hypers\": {\n\t\t\t\t\"Path.Learn.LRate\": {\n\t\t\t\t\t\"Tweak\": \"log\"\n\t\t\t\t},\n\t\t\t\t\"Path.Learn.Momentum\": {\n\t\t\t\t\t\"Tweak\": \"incr\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"Search\": [\n\t\t\t{\n\t\t\t\t\"Name\": \"HiddenToInput\",\n\t\t\t\t\"Type\": \"Path\",\n\t\t\t\t\"Path\": \"Path.Learn.Momentum\",\n\t\t\t\t\"Start\": 0.9,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t0.8,\n\t\t\t\t\t1\n\t\t\t\t]\n\t\t\t},\n\t\t\t{\n\t\t\t\t\"Name\": \"InputToHidden\",\n\t\t\t\t\"Type\": \"Path\",\n\t\t\t\t\"Path\": \"Path.Learn.Momentum\",\n\t\t\t\t\"Start\": 0.9,\n\t\t\t\t\"Values\": [\n\t\t\t\t\t0.8,\n\t\t\t\t\t1\n\t\t\t\t]\n\t\t\t}\n\t\t]\n\t}\n]\n" func TestTweakHypers(t *testing.T) { hypers := Flex{} @@ -84,7 +81,7 @@ func TestTweakHypers(t *testing.T) { FlexVal{Nm: "InputToHidden", Type: "Path", Cls: "Forward", Obj: Hypers{}}, FlexVal{Nm: "HiddenToInput", Type: "Path", Cls: "Back", Obj: Hypers{}}, }) - basenet := tweakSets.SetByName("Base").Sheets["Network"] + basenet := tweakSets["Base"] hypers.ApplySheet(basenet, false) // fmt.Println("hypers:", reflectx.StringJSON(hypers)) diff --git a/params/typegen.go b/params/typegen.go index d9e71bea..c30bd5f8 100644 --- a/params/typegen.go +++ b/params/typegen.go @@ -6,7 +6,7 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.FlexVal", IDName: "flex-val", Doc: "FlexVal is a specific flexible value for the Flex parameter map\nthat implements the StylerObj interface for CSS-style selection logic.\nThe field names are abbreviated because full names are used in StylerObj.", Fields: []types.Field{{Name: "Nm", Doc: "name of this specific object, matches #Name selections"}, {Name: "Type", Doc: "type name of this object, matches plain TypeName selections"}, {Name: "Cls", Doc: "space-separated list of class name(s), match the .Class selections"}, {Name: "Obj", Doc: "actual object with data that is set by the parameters"}, {Name: "History", Doc: "History of params applied"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.FlexVal", IDName: "flex-val", Doc: "FlexVal is a specific flexible value for the Flex parameter map\nthat implements the StylerObject interface for CSS-style selection logic.\nThe field names are abbreviated because full names are used in StylerObject.", Fields: []types.Field{{Name: "Nm", Doc: "name of this specific object, matches #Name selections"}, {Name: "Type", Doc: "type name of this object, matches plain TypeName selections"}, {Name: "Cls", Doc: "space-separated list of class name(s), match the .Class selections"}, {Name: "Obj", Doc: "actual object with data that is set by the parameters"}, {Name: "History", Doc: "History of params applied"}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Flex", IDName: "flex", Doc: "Flex supports arbitrary named parameter values that can be set\nby a Set of parameters, as a map of any objects.\nFirst initialize the map with set of names and a type to create\nblank values, then apply the Set to it."}) @@ -26,14 +26,12 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Shee var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sheets", IDName: "sheets", Doc: "Sheets is a map of named sheets -- used in the Set"}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Set", IDName: "set", Doc: "Set is a collection of Sheets that constitute a coherent set of parameters --\na particular specific configuration of parameters, which the user selects to use.\nThe Set name is stored in the Sets map from which it is typically accessed.\nA good strategy is to have a \"Base\" set that has all the best parameters so far,\nand then other sets can modify relative to that one. It is up to the Sim code to\napply parameter sets in whatever order is desired.\n\nWithin a params.Set, multiple different params.Sheets can be organized,\nwith each CSS-style sheet achieving a relatively complete parameter styling\nof a given element of the overal model, e.g., \"Network\", \"Sim\", \"Env\".\nOr Network could be further broken down into \"Learn\" vs. \"Act\" etc,\nor according to different brain areas (\"Hippo\", \"PFC\", \"BG\", etc).\nAgain, this is entirely at the discretion of the modeler and must be\nperformed under explict program control, especially because order is so critical.\n\nNote that there is NO deterministic ordering of the Sheets due to the use of\na Go map structure, which specifically randomizes order, so simply iterating over them\nand applying may produce unexpected results -- it is better to lookup by name.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Fields: []types.Field{{Name: "Desc", Doc: "description of this param set -- when should it be used? how is it different from the other sets?"}, {Name: "Sheets", Doc: "Sheet's grouped according to their target and / or function. For example,\n\"Network\" for all the network params (or \"Learn\" vs. \"Act\" for more fine-grained), and \"Sim\" for overall simulation control parameters, \"Env\" for environment parameters, etc. It is completely up to your program to lookup these names and apply them as appropriate."}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sets", IDName: "sets", Doc: "Sets is a collection of Set's that can be chosen among\ndepending on different desired configurations etc. Thus, each Set\nrepresents a collection of different possible specific configurations,\nand different such configurations can be chosen by name to apply as desired."}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sets", IDName: "sets", Doc: "Sets is a collection of Sheets that can be chosen among\ndepending on different desired configurations etc. Thus, each Set\nrepresents a collection of different possible specific configurations,\nand different such configurations can be chosen by name to apply as desired."}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.SearchValues", IDName: "search-values", Doc: "SearchValues is a list of parameter values to search for one parameter\non a given object (specified by Name), for float-valued params.", Fields: []types.Field{{Name: "Name", Doc: "name of object with the parameter"}, {Name: "Type", Doc: "type of object with the parameter. This is a Base type name (e.g., Layer, Path),\nthat is at the start of the path in Network params."}, {Name: "Path", Doc: "path to the parameter within the object"}, {Name: "Start", Doc: "starting value, e.g., for restoring after searching\nbefore moving on to another parameter, for grid search."}, {Name: "Values", Doc: "values of the parameter to search"}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Styler", IDName: "styler", Doc: "The params.Styler interface exposes TypeName, Class, and Name methods\nthat allow the params.Sel CSS-style selection specifier to determine\nwhether a given parameter applies.\nAdding Set versions of Name and Class methods is a good idea but not\nneeded for this interface, so they are not included here.", Methods: []types.Method{{Name: "TypeName", Doc: "TypeName returns the name of this type. CSS Sel selector with no prefix\noperates on type name. This type is used *in addition* to the actual\nGo type name of the object, and is a kind of type-category (e.g., Layer\nor Path in emergent network objects)", Returns: []string{"string"}}, {Name: "Class", Doc: "Class returns the space-separated list of class selectors (tags).\nParameters with a . prefix target class tags.\nDo NOT include the. in the Class tags on Styler objects however\n-- those are only in the Sel selector on the params.Sel.", Returns: []string{"string"}}, {Name: "Name", Doc: "Name returns the name of this object.\nParameters with a # prefix target object names, which are typically\nunique. Note, do not include the # prefix in the Styler name.", Returns: []string{"string"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObj", IDName: "styler-obj", Doc: "The params.StylerObj interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "Object", Doc: "Object returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObject", IDName: "styler-obj", Doc: "The params.StylerObject interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "Object", Doc: "Object returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Tweaks", IDName: "tweaks", Doc: "Tweaks holds parameter tweak values associated with one parameter selector.\nHas all the object values affected for a given parameter within one\nselector, that has a tweak hyperparameter set.", Fields: []types.Field{{Name: "Param", Doc: "the parameter path for this param"}, {Name: "Sel", Doc: "the param selector that set the specific value upon which tweak is based"}, {Name: "Search", Doc: "the search values for all objects covered by this selector"}}}) diff --git a/relpos/rel.go b/relpos/rel.go index d8d1d79a..6b2bde5d 100644 --- a/relpos/rel.go +++ b/relpos/rel.go @@ -16,11 +16,13 @@ import ( "cogentcore.org/core/math32" ) -// Rel defines a position relationship among layers, in terms of X,Y width and height of layer -// and associated position within a given X-Y plane, -// and Z vertical stacking of layers above and below each other. -type Rel struct { //git:add - +// Pos specifies the relative spatial relationship to another +// layer, which determines positioning. Every layer except one +// "anchor" layer should be positioned relative to another, +// e.g., RightOf, Above, etc. This provides robust positioning +// in the face of layer size changes etc. +// Layers are arranged in X-Y planes, stacked vertically along the Z axis. +type Pos struct { //git:add // spatial relationship between this layer and the other layer Rel Relations @@ -44,11 +46,16 @@ type Rel struct { //git:add // for horizontial (x-axis) alignment, amount we are offset relative to perfect alignment YOffset float32 + + // Pos is the computed position of lower-left-hand corner of layer + // in 3D space, computed from the relation to other layer. + Pos math32.Vector3 `edit:"-"` } -// Defaults sets default scale, space, offset values -- rel, align must be set specifically +// Defaults sets default scale, space, offset values. +// The relationship and align must be set specifically. // These are automatically applied if Scale = 0 -func (rp *Rel) Defaults() { +func (rp *Pos) Defaults() { if rp.Scale == 0 { rp.Scale = 1 } @@ -57,7 +64,7 @@ func (rp *Rel) Defaults() { } } -func (rp *Rel) ShouldDisplay(field string) bool { +func (rp *Pos) ShouldDisplay(field string) bool { switch field { case "XAlign": return rp.Rel == FrontOf || rp.Rel == Behind || rp.Rel == Above || rp.Rel == Below @@ -68,57 +75,75 @@ func (rp *Rel) ShouldDisplay(field string) bool { } } -// NewRightOf returns a RightOf relationship with default YAlign: Front alignment and given spacing -func NewRightOf(other string, space float32) Rel { - return Rel{Rel: RightOf, Other: other, YAlign: Front, Space: space, Scale: 1} +// SetRightOf sets a RightOf relationship with default YAlign: +// Front alignment and given spacing. +func (rp *Pos) SetRightOf(other string, space float32) { + rp.Rel = RightOf + rp.Other = other + rp.YAlign = Front + rp.Space = space + rp.Scale = 1 } -// NewBehind returns a Behind relationship with default XAlign: Left alignment and given spacing -func NewBehind(other string, space float32) Rel { - return Rel{Rel: Behind, Other: other, XAlign: Left, Space: space, Scale: 1} +// SetBehind sets a Behind relationship with default XAlign: +// Left alignment and given spacing. +func (rp *Pos) SetBehind(other string, space float32) { + rp.Rel = Behind + rp.Other = other + rp.XAlign = Left + rp.Space = space + rp.Scale = 1 } -// NewAbove returns an Above relationship with default XAlign: Left, YAlign: Front alignment -func NewAbove(other string) Rel { - return Rel{Rel: Above, Other: other, XAlign: Left, YAlign: Front, YOffset: 1, Scale: 1} +// SetAbove returns an Above relationship with default XAlign: +// Left, YAlign: Front alignment +func (rp *Pos) SetAbove(other string) { + rp.Rel = Above + rp.Other = other + rp.XAlign = Left + rp.YAlign = Front + rp.YOffset = 1 + rp.Scale = 1 } -// Pos returns the relative position compared to other position and size, based on settings -// osz and sz must both have already been scaled by relevant Scale factor -func (rp *Rel) Pos(op math32.Vector3, osz math32.Vector2, sz math32.Vector2) math32.Vector3 { +// SetPos sets the relative position based on other layer +// position and size, using current settings. +// osz and sz must both have already been scaled by +// relevant Scale factor. +func (rp *Pos) SetPos(op math32.Vector3, osz math32.Vector2, sz math32.Vector2) { if rp.Scale == 0 { rp.Defaults() } - rs := op + rp.Pos = op switch rp.Rel { case NoRel: - return op + return case RightOf: - rs.X = op.X + osz.X + rp.Space - rs.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) + rp.Pos.X = op.X + osz.X + rp.Space + rp.Pos.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) case LeftOf: - rs.X = op.X - sz.X - rp.Space - rs.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) + rp.Pos.X = op.X - sz.X - rp.Space + rp.Pos.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) case Behind: - rs.Y = op.Y + osz.Y + rp.Space - rs.X = rp.AlignXPos(op.X, osz.X, sz.X) + rp.Pos.Y = op.Y + osz.Y + rp.Space + rp.Pos.X = rp.AlignXPos(op.X, osz.X, sz.X) case FrontOf: - rs.Y = op.Y - sz.Y - rp.Space - rs.X = rp.AlignXPos(op.X, osz.X, sz.X) + rp.Pos.Y = op.Y - sz.Y - rp.Space + rp.Pos.X = rp.AlignXPos(op.X, osz.X, sz.X) case Above: - rs.Z += 1 - rs.X = rp.AlignXPos(op.X, osz.X, sz.X) - rs.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) + rp.Pos.Z += 1 + rp.Pos.X = rp.AlignXPos(op.X, osz.X, sz.X) + rp.Pos.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) case Below: - rs.Z -= 1 - rs.X = rp.AlignXPos(op.X, osz.X, sz.X) - rs.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) + rp.Pos.Z -= 1 + rp.Pos.X = rp.AlignXPos(op.X, osz.X, sz.X) + rp.Pos.Y = rp.AlignYPos(op.Y, osz.Y, sz.Y) } - return rs } -// AlignYPos returns the Y-axis (within-plane vertical or height) position according to alignment factors -func (rp *Rel) AlignYPos(yop, yosz, ysz float32) float32 { +// AlignYPos returns the Y-axis (within-plane vertical or height) +// position according to alignment factors. +func (rp *Pos) AlignYPos(yop, yosz, ysz float32) float32 { switch rp.YAlign { case Front: return yop + rp.YOffset @@ -130,8 +155,9 @@ func (rp *Rel) AlignYPos(yop, yosz, ysz float32) float32 { return yop } -// AlignXPos returns the X-axis (within-plane horizontal or width) position according to alignment factors -func (rp *Rel) AlignXPos(xop, xosz, xsz float32) float32 { +// AlignXPos returns the X-axis (within-plane horizontal or width) +// position according to alignment factors. +func (rp *Pos) AlignXPos(xop, xosz, xsz float32) float32 { switch rp.XAlign { case Left: return xop + rp.XOffset diff --git a/weights/json.go b/weights/json.go index 41003e71..3d10a495 100644 --- a/weights/json.go +++ b/weights/json.go @@ -10,8 +10,9 @@ import ( "log" ) -// Prec is the precision for weight output in text formats -- default is aggressive -// for Leabra models -- may need to increase for other models. +// Prec is the precision for weight output in text formats. +// The default is aggressive for Leabra models. +// May need to increase for other models. var Prec = 4 // NetReadJSON reads weights for entire network in a JSON format into Network structure diff --git a/weights/wts.go b/weights/weights.go similarity index 100% rename from weights/wts.go rename to weights/weights.go From c9cc45bfdc0cc11f84188094d07930fd4758818c Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 9 Aug 2024 14:15:56 -0700 Subject: [PATCH 02/10] emer.Path updated --- emer/enumgen.go | 48 -------- emer/layer.go | 26 ++-- emer/path.go | 307 +++++++++++++++++------------------------------- emer/typegen.go | 12 +- 4 files changed, 126 insertions(+), 267 deletions(-) delete mode 100644 emer/enumgen.go diff --git a/emer/enumgen.go b/emer/enumgen.go deleted file mode 100644 index 791f2cb4..00000000 --- a/emer/enumgen.go +++ /dev/null @@ -1,48 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package emer - -import ( - "cogentcore.org/core/enums" -) - -var _PathTypeValues = []PathType{0, 1, 2, 3} - -// PathTypeN is the highest valid value for type PathType, plus one. -const PathTypeN PathType = 4 - -var _PathTypeValueMap = map[string]PathType{`Forward`: 0, `Back`: 1, `Lateral`: 2, `Inhib`: 3} - -var _PathTypeDescMap = map[PathType]string{0: `Forward is a feedforward, bottom-up pathway from sensory inputs to higher layers`, 1: `Back is a feedback, top-down pathway from higher layers back to lower layers`, 2: `Lateral is a lateral pathway within the same layer / area`, 3: `Inhib is an inhibitory pathway that drives inhibitory synaptic inputs instead of excitatory`} - -var _PathTypeMap = map[PathType]string{0: `Forward`, 1: `Back`, 2: `Lateral`, 3: `Inhib`} - -// String returns the string representation of this PathType value. -func (i PathType) String() string { return enums.String(i, _PathTypeMap) } - -// SetString sets the PathType value from its string representation, -// and returns an error if the string is invalid. -func (i *PathType) SetString(s string) error { - return enums.SetString(i, s, _PathTypeValueMap, "PathType") -} - -// Int64 returns the PathType value as an int64. -func (i PathType) Int64() int64 { return int64(i) } - -// SetInt64 sets the PathType value from an int64. -func (i *PathType) SetInt64(in int64) { *i = PathType(in) } - -// Desc returns the description of the PathType value. -func (i PathType) Desc() string { return enums.Desc(i, _PathTypeDescMap) } - -// PathTypeValues returns all possible values for the type PathType. -func PathTypeValues() []PathType { return _PathTypeValues } - -// Values returns all possible values for the type PathType. -func (i PathType) Values() []enums.Enum { return enums.Values(_PathTypeValues) } - -// MarshalText implements the [encoding.TextMarshaler] interface. -func (i PathType) MarshalText() ([]byte, error) { return []byte(i.String()), nil } - -// UnmarshalText implements the [encoding.TextUnmarshaler] interface. -func (i *PathType) UnmarshalText(text []byte) error { return enums.UnmarshalText(i, text, "PathType") } diff --git a/emer/layer.go b/emer/layer.go index a15c13e1..ad32cbde 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -4,8 +4,6 @@ package emer -//go:generate core generate - import ( "fmt" "io" @@ -28,13 +26,14 @@ var ( LayerDimNames4D = []string{"PoolY", "PoolX", "NeurY", "NeurX"} ) -// Layer defines the basic interface for neural network layers, +// Layer defines the minimal interface for neural network layers, // necessary to support the visualization (NetView), I/O, // and parameter setting functionality provided by emergent. -// Interfaces are automatically pointers, so think of this -// as a pointer to your specific layer type, +// Most of the standard expected functionality is defined in the +// LayerBase struct, and this interface only has methods that must be +// implemented specifically for a given algorithmic implementation. type Layer interface { - // StyleType, StyleClass, and StyleClass methods for parameter styling. + // StyleType, StyleClass, and StyleName methods for parameter styling. params.Styler // AsEmer returns the layer as an *emer.LayerBase, @@ -180,8 +179,7 @@ type Layer interface { // LayerBase defines the basic shared data for neural network layers, // used for managing the structural elements of a network, // and for visualization, I/O, etc. -// Nothing algorithm-specific is implemented here; -// all of that goes in your specific layer struct. +// Nothing algorithm-specific is implemented here type LayerBase struct { // EmerLayer provides access to the emer.Layer interface // methods for functions defined in the LayerBase type. @@ -221,11 +219,11 @@ type LayerBase struct { // the list of layers in the network. Index int `display:"-" inactive:"-"` - // SampleIndexes returns the current set of "sample" unit indexes, + // SampleIndexes are the current set of "sample" unit indexes, // which are a smaller subset of units that represent the behavior // of the layer, for computationally intensive statistics and displays // (e.g., PCA, ActRF, NetView rasters), when the layer is large. - // Returns nil if none has been set (in which case all units are used). + // If none have been set, then all units are used. // See utility function CenterPoolIndexes that returns indexes of // units in the central pools of a 4D layer. SampleIndexes []int @@ -563,7 +561,7 @@ func Layer2DSampleIndexes(ly Layer, maxSize int) (idxs, shape []int) { func SendNameTry(l Layer, sender string) (Path, error) { for pi := 0; pi < l.NRecvPaths(); pi++ { pj := l.RecvPath(pi) - if pj.SendLay().AsEmer().Name == sender { + if pj.SendLayer().AsEmer().Name == sender { return pj, nil } } @@ -573,7 +571,7 @@ func SendNameTry(l Layer, sender string) (Path, error) { func RecvNameTry(l Layer, recv string) (Path, error) { for pi := 0; pi < l.NSendPaths(); pi++ { pj := l.SendPath(pi) - if pj.RecvLay().AsEmer().Name == recv { + if pj.RecvLayer().AsEmer().Name == recv { return pj, nil } } @@ -583,7 +581,7 @@ func RecvNameTry(l Layer, recv string) (Path, error) { func SendNameTypeTry(l Layer, sender, typ string) (Path, error) { for pi := 0; pi < l.NRecvPaths(); pi++ { pj := l.RecvPath(pi) - if pj.SendLay().AsEmer().Name == sender && pj.PathTypeName() == typ { + if pj.SendLayer().AsEmer().Name == sender && pj.TypeName() == typ { return pj, nil } } @@ -593,7 +591,7 @@ func SendNameTypeTry(l Layer, sender, typ string) (Path, error) { func RecvNameTypeTry(l Layer, recv, typ string) (Path, error) { for pi := 0; pi < l.NSendPaths(); pi++ { pj := l.SendPath(pi) - if pj.RecvLay().AsEmer().Name == recv && pj.PathTypeName() == typ { + if pj.RecvLayer().AsEmer().Name == recv && pj.TypeName() == typ { return pj, nil } } diff --git a/emer/path.go b/emer/path.go index 60233b19..52ab83c6 100644 --- a/emer/path.go +++ b/emer/path.go @@ -5,66 +5,47 @@ package emer import ( - "fmt" "io" - "cogentcore.org/core/base/reflectx" + "cogentcore.org/core/math32" "github.com/emer/emergent/v2/params" "github.com/emer/emergent/v2/paths" "github.com/emer/emergent/v2/weights" ) -// Path defines the basic interface for a pathway which connects two layers. -// Name is set automatically to: SendLay().Name() + "To" + RecvLay().Name() +// Path defines the minimal interface for a pathway +// which connects two layers, using a specific Pattern +// of connectivity, and with its own set of parameters. +// This supports visualization (NetView), I/O, +// and parameter setting functionality provided by emergent. +// Most of the standard expected functionality is defined in the +// PathBase struct, and this interface only has methods that must be +// implemented specifically for a given algorithmic implementation, type Path interface { - params.Styler // TypeName, Name, and Class methods for parameter styling + // StyleType, StyleClass, and StyleName methods for parameter styling. + params.Styler - // Init MUST be called to initialize the path's pointer to itself as an emer.Path - // which enables the proper interface methods to be called. - Init(path Path) + // AsEmer returns the path as an *emer.PathBase, + // to access base functionality. + AsEmer() *PathBase - // SendLay returns the sending layer for this pathway - SendLay() Layer - - // RecvLay returns the receiving layer for this pathway - RecvLay() Layer - - // Pattern returns the pattern of connectivity for interconnecting the layers - Pattern() paths.Pattern - - // SetPattern sets the pattern of connectivity for interconnecting the layers. - // Returns Path so it can be chained to set other properties too - SetPattern(pat paths.Pattern) Path - - // Type returns the functional type of pathway according to PathType (extensible in - // more specialized algorithms) - Type() PathType - - // SetType sets the functional type of pathway according to PathType - // Returns Path so it can be chained to set other properties too - SetType(typ PathType) Path - - // PathTypeName returns the string rep of functional type of pathway - // according to PathType (extensible in more specialized algorithms, by - // redefining this method as needed). - PathTypeName() string - - // AddClass adds a CSS-style class name(s) for this path, - // ensuring that it is not a duplicate, and properly space separated. - // Returns Path so it can be chained to set other properties too - AddClass(cls ...string) Path - - // Label satisfies the core.Labeler interface for getting the name of objects generically + // Label satisfies the core.Labeler interface for getting + // the name of objects generically. Label() string - // IsOff returns true if pathway or either send or recv layer has been turned Off. - // Useful for experimentation - IsOff() bool + // TypeName is the type or category of path, defined + // by the algorithm (and usually set by an enum). + TypeName() string + + // SendLayer returns the sending layer for this pathway, + // as an emer.Layer interface. The actual Path implmenetation + // can use a Send field with the actual Layer struct type. + SendLayer() Layer - // SetOff sets the pathway Off status (i.e., lesioned). Careful: Layer.SetOff(true) will - // reactivate that layer's pathways, so pathway-level lesioning should always be called - // after layer-level lesioning. - SetOff(off bool) + // RecvLayer returns the receiving layer for this pathway, + // as an emer.Layer interface. The actual Path implmenetation + // can use a Recv field with the actual Layer struct type. + RecvLayer() Layer // SynVarNames returns the names of all the variables on the synapse // This is typically a global list so do not modify! @@ -95,45 +76,42 @@ type Path interface { // for this paths. This is needed for extending indexes in derived types. SynVarNum() int - // Syn1DNum returns the number of synapses for this path as a 1D array. - // This is the max idx for SynVal1D and the number of vals set by SynValues. - Syn1DNum() int + // NumSyns returns the number of synapses for this path. + // This is the max idx for SynValue1D and the number + // of vals set by SynValues. + NumSyns() int - // SynVal1D returns value of given variable index (from SynVarIndex) on given SynIndex. - // Returns NaN on invalid index. - // This is the core synapse var access method used by other methods, - // so it is the only one that needs to be updated for derived layer types. - SynVal1D(varIndex int, synIndex int) float32 - - // SynValues sets values of given variable name for each synapse, using the natural ordering - // of the synapses (sender based for Leabra), + // SynValues sets values of given variable name for each synapse, + // using the natural ordering of the synapses (sender based for Axon), // into given float32 slice (only resized if not big enough). // Returns error on invalid var name. SynValues(vals *[]float32, varNm string) error - // SynVal returns value of given variable name on the synapse - // between given send, recv unit indexes (1D, flat indexes). - // Returns math32.NaN() for access errors. - SynValue(varNm string, sidx, ridx int) float32 + // SynValue1D returns value of given variable index + // (from SynVarIndex) on given SynIndex. + // Returns NaN on invalid index. + // This is the core synapse var access method used by other methods, + // so it is the only one that needs to be updated for derived types. + SynValue1D(varIndex int, synIndex int) float32 - // SetSynVal sets value of given variable name on the synapse + // todo: we don't need this in interface right? + // SetSynValue sets value of given variable name on the synapse // between given send, recv unit indexes (1D, flat indexes). // Typically only supports base synapse variables and is not extended // for derived types. // Returns error for access errors. - SetSynValue(varNm string, sidx, ridx int, val float32) error - - // Defaults sets default parameter values for all Path parameters - Defaults() + // SetSynValue(varNm string, sidx, ridx int, val float32) error // UpdateParams() updates parameter values for all Path parameters, // based on any other params that might have changed. UpdateParams() // ApplyParams applies given parameter style Sheet to this pathway. - // Calls UpdateParams if anything set to ensure derived parameters are all updated. - // If setMsg is true, then a message is printed to confirm each parameter that is set. - // it always prints a message if a parameter fails to be set. + // Calls UpdateParams if anything set to ensure derived + // parameters are all updated. + // If setMsg is true, then a message is printed to confirm each + // parameter that is set. + // It always prints a message if a parameter fails to be set. // returns true if any params were set, and error if there were any errors. ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) @@ -148,149 +126,84 @@ type Path interface { // AllParams returns a listing of all parameters in the Projection AllParams() string - // WriteWtsJSON writes the weights from this pathway from the receiver-side perspective - // in a JSON text format. We build in the indentation logic to make it much faster and + // WriteWtsJSON writes the weights from this pathway + // from the receiver-side perspective in a JSON text format. + // We build in the indentation logic to make it much faster and // more efficient. WriteWtsJSON(w io.Writer, depth int) - // ReadWtsJSON reads the weights from this pathway from the receiver-side perspective - // in a JSON text format. This is for a set of weights that were saved *for one path only* - // and is not used for the network-level ReadWtsJSON, which reads into a separate - // structure -- see SetWts method. + // ReadWtsJSON reads the weights from this pathway + // from the receiver-side perspective in a JSON text format. + // This is for a set of weights that were saved *for one path only* + // and is not used for the network-level ReadWtsJSON, + // which reads into a separate structure -- see SetWts method. ReadWtsJSON(r io.Reader) error - // SetWts sets the weights for this pathway from weights.Path decoded values + // SetWts sets the weights for this pathway from weights.Path + // decoded values SetWts(pw *weights.Path) error - - // Build constructs the full connectivity among the layers as specified in this pathway. - Build() error -} - -// Paths is a slice of pathways -type Paths []Path - -// ElemLabel satisfies the core.SliceLabeler interface to provide labels for slice elements -func (pl *Paths) ElemLabel(idx int) string { - if len(*pl) == 0 { - return "(empty)" - } - if idx < 0 || idx >= len(*pl) { - return "" - } - pj := (*pl)[idx] - if reflectx.AnyIsNil(pj) { - return "nil" - } - return pj.Name() } -// Add adds a pathway to the list -func (pl *Paths) Add(p Path) { - (*pl) = append(*pl, p) +// PathBase defines the basic shared data for a pathway +// which connects two layers, using a specific Pattern +// of connectivity, and with its own set of parameters. +// Name is set automatically to: +// Nothing algorithm-specific is implemented here. +type PathBase struct { + // EmerPath provides access to the emer.Path interface + // methods for functions defined in the PathBase type. + // Must set this with a pointer to the actual instance + // when created, using InitPath function. + EmerPath Path + + // Name of the path, which can be automatically set to + // SendLayer().Name + "To" + RecvLayer().Name via + // SetStandardName method. + Name string + + // Class is for applying parameter styles across multiple paths + // that all get the same parameters. This can be space separated + // with multple classes. + Class string + + // Pattern specifies the pattern of connectivity + // for interconnecting the sending and receiving layers. + Pattern paths.Pattern } -// Send finds the pathway with given send layer -func (pl *Paths) Send(send Layer) (Path, bool) { - for _, pj := range *pl { - if pj.SendLay() == send { - return pj, true - } - } - return nil, false +// InitPath initializes the path, setting the EmerPath interface +// to provide access to it for PathBase methods, along with the name. +func InitPath(l Path, name string) { + lb := l.AsEmer() + lb.EmerPath = l + lb.Name = name } -// Recv finds the pathway with given recv layer -func (pl *Paths) Recv(recv Layer) (Path, bool) { - for _, pj := range *pl { - if pj.RecvLay() == recv { - return pj, true - } - } - return nil, false -} +func (pt *PathBase) AsEmer() *PathBase { return pt } -// SendName finds the pathway with given send layer name, nil if not found -// see Try version for error checking. -func (pl *Paths) SendName(sender string) Path { - pj, _ := pl.SendNameTry(sender) - return pj -} +// params.Styler: +func (pt *PathBase) StyleType() string { return "Path" } +func (pt *PathBase) StyleClass() string { return pt.EmerPath.TypeName() + " " + pt.Class } +func (pt *PathBase) StyleName() string { return pt.Name } -// RecvName finds the pathway with given recv layer name, nil if not found -// see Try version for error checking. -func (pl *Paths) RecvName(recv string) Path { - pj, _ := pl.RecvNameTry(recv) - return pj +// AddClass adds a CSS-style class name(s) for this path, +// ensuring that it is not a duplicate, and properly space separated. +// Returns Path so it can be chained to set other properties too. +func (pt *PathBase) AddClass(cls ...string) *PathBase { + pt.Class = params.AddClass(pt.Class, cls...) + return pt } -// SendNameTry finds the pathway with given send layer name. -// returns error message if not found -func (pl *Paths) SendNameTry(sender string) (Path, error) { - for _, pj := range *pl { - if pj.SendLay().Name() == sender { - return pj, nil - } - } - return nil, fmt.Errorf("sending layer: %v not found in list of pathways", sender) -} - -// SendNameTypeTry finds the pathway with given send layer name and Type string. -// returns error message if not found. -func (pl *Paths) SendNameTypeTry(sender, typ string) (Path, error) { - for _, pj := range *pl { - if pj.SendLay().Name() == sender { - tstr := pj.PathTypeName() - if tstr == typ { - return pj, nil - } - } - } - return nil, fmt.Errorf("sending layer: %v, type: %v not found in list of pathways", sender, typ) -} +func (pt *PathBase) Label() string { return pt.Name } -// RecvNameTry finds the pathway with given recv layer name. -// returns error message if not found -func (pl *Paths) RecvNameTry(recv string) (Path, error) { - for _, pj := range *pl { - if pj.RecvLay().Name() == recv { - return pj, nil - } +// SynValue returns value of given variable name on the synapse +// between given send, recv unit indexes (1D, flat indexes). +// Returns math32.NaN() for access errors. +func (pt *PathBase) SynValue(varNm string, sidx, ridx int) float32 { + vidx, err := pt.EmerPath.SynVarIndex(varNm) + if err != nil { + return math32.NaN() } - return nil, fmt.Errorf("receiving layer: %v not found in list of pathways", recv) + syi := pt.EmerPath.SynIndex(sidx, ridx) + return pt.EmerPath.SynValue1D(vidx, syi) } - -// RecvNameTypeTry finds the pathway with given recv layer name and Type string. -// returns error message if not found. -func (pl *Paths) RecvNameTypeTry(recv, typ string) (Path, error) { - for _, pj := range *pl { - if pj.RecvLay().Name() == recv { - tstr := pj.PathTypeName() - if tstr == typ { - return pj, nil - } - } - } - return nil, fmt.Errorf("receiving layer: %v, type: %v not found in list of pathways", recv, typ) -} - -////////////////////////////////////////////////////////////////////////////////////// -// PathType - -// PathType is the type of the pathway (extensible for more specialized algorithms). -// Class parameter styles automatically key off of these types. -type PathType int32 //enums:enum - -// The pathway types -const ( - // Forward is a feedforward, bottom-up pathway from sensory inputs to higher layers - Forward PathType = iota - - // Back is a feedback, top-down pathway from higher layers back to lower layers - Back - - // Lateral is a lateral pathway within the same layer / area - Lateral - - // Inhib is an inhibitory pathway that drives inhibitory synaptic inputs instead of excitatory - Inhib -) diff --git a/emer/typegen.go b/emer/typegen.go index 79510692..6a0f6b3c 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,11 +6,9 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the basic interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nInterfaces are automatically pointers, so think of this\nas a pointer to your specific layer type,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NRecvPaths", Doc: "NRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NSendPaths", Doc: "NSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NRecvPaths", Doc: "NRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NSendPaths", Doc: "NSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here;\nall of that goes in your specific layer struct.", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes returns the current set of representative unit indexes.\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters).\nReturns nil if none has been set (in which case all units should be used).\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape returns the shape to use for the subset of representative\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this, otherwise a 1D array\nof len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layers", IDName: "layers", Doc: "Layers is a slice of layers"}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayNames", IDName: "lay-names", Doc: "LayNames is a list of layer names.\nHas convenience methods for adding, validating."}) @@ -22,8 +20,6 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetSiz var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the basic interface for a neural network, used for managing the structural\nelements of a network, and for visualization, I/O, etc", Methods: []types.Method{{Name: "InitName", Doc: "InitName MUST be called to initialize the network's pointer to itself as an emer.Network\nwhich enables the proper interface methods to be called. Also sets the name.", Args: []string{"net", "name"}}, {Name: "Name", Doc: "Name() returns name of the network", Returns: []string{"string"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "NLayers", Doc: "NLayers returns the number of layers in the network", Returns: []string{"int"}}, {Name: "Layer", Doc: "Layer returns layer (as emer.Layer interface) at given index -- does not\ndo extra bounds checking", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "LayerByName", Doc: "LayerByName returns layer of given name, nil if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer"}}, {Name: "LayerByNameTry", Doc: "LayerByNameTry returns layer of given name, returns error if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer", "error"}}, {Name: "PathByNameTry", Doc: "PathByNameTry returns path of given name, returns error if not found.\nPath names are SendToRecv, and are looked up by parsing the name", Args: []string{"name"}, Returns: []string{"Path", "error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to layers and paths in this network.\nCalls UpdateParams on anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Network that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Network", Returns: []string{"string"}}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on the units in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all layers need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapses in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all pathways need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes network weights (and any other state that adapts with learning)\nto JSON-formatted output.", Args: []string{"w"}, Returns: []string{"error"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads network weights (and any other state that adapts with learning)\nfrom JSON-formatted input. Reads into a temporary weights.Network structure that\nis then passed to SetWts to actually set the weights.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this network from weights.Network decoded values", Args: []string{"nw"}, Returns: []string{"error"}}, {Name: "SaveWtsJSON", Doc: "SaveWtsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWtsJSON", Doc: "OpenWtsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "Bounds", Doc: "Bounds returns the minimum and maximum display coordinates of the network for 3D display", Returns: []string{"min", "max"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "LayersByClass", Doc: "LayersByClass returns a list of layer names by given class(es).\nLists are compiled when network Build() function called.\nThe layer Type is always included as a Class, along with any other\nspace-separated strings specified in Class for parameter styling, etc.\nIf no classes are passed, all layer names in order are returned.", Args: []string{"classes"}, Returns: []string{"[]string"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the basic interface for a pathway which connects two layers.\nName is set automatically to: SendLay().Name() + \"To\" + RecvLay().Name()", Methods: []types.Method{{Name: "Init", Doc: "Init MUST be called to initialize the path's pointer to itself as an emer.Path\nwhich enables the proper interface methods to be called.", Args: []string{"path"}}, {Name: "SendLay", Doc: "SendLay returns the sending layer for this pathway", Returns: []string{"Layer"}}, {Name: "RecvLay", Doc: "RecvLay returns the receiving layer for this pathway", Returns: []string{"Layer"}}, {Name: "Pattern", Doc: "Pattern returns the pattern of connectivity for interconnecting the layers", Returns: []string{"Pattern"}}, {Name: "SetPattern", Doc: "SetPattern sets the pattern of connectivity for interconnecting the layers.\nReturns Path so it can be chained to set other properties too", Args: []string{"pat"}, Returns: []string{"Path"}}, {Name: "Type", Doc: "Type returns the functional type of pathway according to PathType (extensible in\nmore specialized algorithms)", Returns: []string{"PathType"}}, {Name: "SetType", Doc: "SetType sets the functional type of pathway according to PathType\nReturns Path so it can be chained to set other properties too", Args: []string{"typ"}, Returns: []string{"Path"}}, {Name: "PathTypeName", Doc: "PathTypeName returns the string rep of functional type of pathway\naccording to PathType (extensible in more specialized algorithms, by\nredefining this method as needed).", Returns: []string{"string"}}, {Name: "AddClass", Doc: "AddClass adds a CSS-style class name(s) for this path,\nensuring that it is not a duplicate, and properly space separated.\nReturns Path so it can be chained to set other properties too", Args: []string{"cls"}, Returns: []string{"Path"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "IsOff", Doc: "IsOff returns true if pathway or either send or recv layer has been turned Off.\nUseful for experimentation", Returns: []string{"bool"}}, {Name: "SetOff", Doc: "SetOff sets the pathway Off status (i.e., lesioned). Careful: Layer.SetOff(true) will\nreactivate that layer's pathways, so pathway-level lesioning should always be called\nafter layer-level lesioning.", Args: []string{"off"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "Syn1DNum", Doc: "Syn1DNum returns the number of synapses for this path as a 1D array.\nThis is the max idx for SynVal1D and the number of vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynVal1D", Doc: "SynVal1D returns value of given variable index (from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse, using the natural ordering\nof the synapses (sender based for Leabra),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue", Doc: "SynVal returns value of given variable name on the synapse\nbetween given send, recv unit indexes (1D, flat indexes).\nReturns math32.NaN() for access errors.", Args: []string{"varNm", "sidx", "ridx"}, Returns: []string{"float32"}}, {Name: "SetSynValue", Doc: "SetSynVal sets value of given variable name on the synapse\nbetween given send, recv unit indexes (1D, flat indexes).\nTypically only supports base synapse variables and is not extended\nfor derived types.\nReturns error for access errors.", Args: []string{"varNm", "sidx", "ridx", "val"}, Returns: []string{"error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for all Path parameters"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Projection that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Projection", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway from the receiver-side perspective\nin a JSON text format. We build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway from the receiver-side perspective\nin a JSON text format. This is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path decoded values", Args: []string{"pw"}, Returns: []string{"error"}}, {Name: "Build", Doc: "Build constructs the full connectivity among the layers as specified in this pathway.", Returns: []string{"error"}}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Paths", IDName: "paths", Doc: "Paths is a slice of pathways"}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nIt always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Projection that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Projection", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON,\nwhich reads into a separate structure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathType", IDName: "path-type", Doc: "PathType is the type of the pathway (extensible for more specialized algorithms).\nClass parameter styles automatically key off of these types."}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nName is set automatically to:\nNothing algorithm-specific is implemented here.", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}}}) From 0fe8c172ea3ce18d7293d8b82cb64dec9dce8c4d Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 9 Aug 2024 16:04:52 -0700 Subject: [PATCH 03/10] emer.network updated --- emer/layer.go | 39 ++----- emer/laynames.go | 63 ----------- emer/netparams.go | 67 ++++++++++++ emer/network.go | 258 ++++++++++++++++++++++++++++++++++++---------- emer/path.go | 39 ++----- 5 files changed, 287 insertions(+), 179 deletions(-) delete mode 100644 emer/laynames.go diff --git a/emer/layer.go b/emer/layer.go index ad32cbde..5e29988f 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -48,36 +48,12 @@ type Layer interface { // by the algorithm (and usually set by an enum). TypeName() string - // UnitVarNames returns a list of variable names available - // on the units in this layer. - // This is typically a global list so do not modify! - UnitVarNames() []string - - // UnitVarProps returns a map of unit variable properties, - // with the key being the name of the variable, and the - // value gives a space-separated list of - // go-tag-style properties for that variable. - // The NetView recognizes the following properties: - // range:"##" = +- range around 0 for default display scaling - // min:"##" max:"##" = min, max display range - // auto-scale:"+" or "-" = use automatic scaling instead of fixed - // range or not. - // zeroctr:"+" or "-" = control whether zero-centering is used - // desc:"txt" tooltip description of the variable - // Note: this is a global list so do not modify! - UnitVarProps() map[string]string - // UnitVarIndex returns the index of given variable within // the Neuron, according to *this layer's* UnitVarNames() list // (using a map to lookup index), or -1 and error message if // not found. UnitVarIndex(varNm string) (int, error) - // UnitVarNum returns the number of Neuron-level variables - // for this layer. This is needed for extending indexes in - // derived types. - UnitVarNum() int - // UnitVal1D returns value of given variable index on given unit, // using 1-dimensional index, and a data parallel index di, // for networks capable of processing multiple input patterns @@ -86,10 +62,6 @@ type Layer interface { // so it is the only one that needs to be updated for derived layer types. UnitVal1D(varIndex int, idx, di int) float32 - // VarRange returns the min / max values for given variable - // over the layer - VarRange(varNm string) (min, max float32, err error) - // NRecvPaths returns the number of receiving pathways. NRecvPaths() int @@ -185,17 +157,26 @@ type LayerBase struct { // methods for functions defined in the LayerBase type. // Must set this with a pointer to the actual instance // when created, using InitLayer function. - EmerLayer Layer + EmerLayer Layer `display:"-"` // Name of the layer, which must be unique within the network. // Layers are typically accessed directly by name, via a map. Name string + // Info contains descriptive information about the layer. + // This is displayed in a tooltip in the network view. + Info string + // Class is for applying parameter styles across multiple layers // that all get the same parameters. This can be space separated // with multple classes. Class string + // Off turns off the layer, removing from all computations. + // This provides a convenient way to dynamically test for + // the contributions of the layer, for example. + Off bool + // Shape of the layer, either 2D or 4D. Although spatial topology // is not relevant to all algorithms, the 2D shape is important for // efficiently visualizing large numbers of units / neurons. diff --git a/emer/laynames.go b/emer/laynames.go deleted file mode 100644 index c443f0c6..00000000 --- a/emer/laynames.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package emer - -import ( - "log" -) - -// LayNames is a list of layer names. -// Has convenience methods for adding, validating. -type LayNames []string - -// Validate ensures that LayNames layers are valid. -// ctxt is string for error message to provide context. -func (ln *LayNames) Validate(net Network, ctxt string) error { - var lasterr error - for _, lnm := range *ln { - _, err := net.LayerByNameTry(lnm) - if err != nil { - log.Printf("%s LayNames.Validate: %v\n", ctxt, err) - lasterr = err - } - } - return lasterr -} - -// Add adds given layer name(s) to list -func (ln *LayNames) Add(laynm ...string) { - *ln = append(*ln, laynm...) -} - -// AddAllBut adds all layers in network except those in exlude list -func (ln *LayNames) AddAllBut(net Network, excl ...string) { - exmap := make(map[string]struct{}) - for _, ex := range excl { - exmap[ex] = struct{}{} - } - *ln = nil - nl := net.NLayers() - for li := 0; li < nl; li++ { - aly := net.Layer(li) - nm := aly.AsEmer().Name - if _, on := exmap[nm]; on { - continue - } - ln.Add(nm) - } -} - -// Layers returns slice of emer.Layers in given network based on layer names -// error is returned if any are not found. -func (ln *LayNames) Layers(net Network) (lays []Layer, err error) { - var ly Layer - for _, nm := range *ln { - ly, err = net.LayerByNameTry(nm) - if err == nil { - lays = append(lays, ly) - } - } - return -} diff --git a/emer/netparams.go b/emer/netparams.go index 53394a36..56983cb0 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -7,6 +7,7 @@ package emer import ( "fmt" "log" + "log/slog" "strings" "cogentcore.org/core/base/mpi" @@ -146,3 +147,69 @@ func (pr *NetParams) SetNetworkSheet(net Network, sh *params.Sheet, setName stri pr.NetHypers.CopyFrom(hypers) } } + +// NetworkHyperParams returns the compiled hyper parameters from given Sheet +// for each layer and pathway in the network -- applies the standard css +// styling logic for the hyper parameters. +func NetworkHyperParams(net Network, sheet *params.Sheet) params.Flex { + hypers := params.Flex{} + nl := net.NumLayers() + for li := range nl { + ly := net.EmerLayer(li) + nm := ly.StyleName() + hypers[nm] = ¶ms.FlexVal{Nm: nm, Type: "Layer", Cls: ly.StyleClass(), Obj: params.Hypers{}} + } + // separate pathways + for li := range nl { + ly := net.EmerLayer(li) + np := ly.NRecvPaths() + for pi := range np { + pj := ly.RecvPath(pi) + nm := pj.StyleName() + hypers[nm] = ¶ms.FlexVal{Nm: nm, Type: "Path", Cls: pj.StyleClass(), Obj: params.Hypers{}} + } + } + for nm, vl := range hypers { + sheet.Apply(vl, false) + hv := vl.Obj.(params.Hypers) + hv.DeleteValOnly() + if len(hv) == 0 { + delete(hypers, nm) + } + } + return hypers +} + +// SetFloatParam sets given float32 param value to layer or pathway +// (typ = Layer or Path) of given name, at given path (which can start +// with the typ name). +// Returns an error (and logs it automatically) for any failure. +func SetFloatParam(net Network, name, typ, path string, val float32) error { + rpath := params.PathAfterType(path) + prs := fmt.Sprintf("%g", val) + switch typ { + case "Layer": + ly, err := net.LayerByNameTry(name) + if err != nil { + slog.Error(err.Error()) + return err + } + err = ly.SetParam(rpath, prs) + if err != nil { + slog.Error(err.Error()) + return err + } + case "Path": + pj, err := net.PathByNameTry(name) + if err != nil { + slog.Error(err.Error()) + return err + } + err = pj.SetParam(rpath, prs) + if err != nil { + slog.Error(err.Error()) + return err + } + } + return nil +} diff --git a/emer/network.go b/emer/network.go index 0aede233..6ae2c1b9 100644 --- a/emer/network.go +++ b/emer/network.go @@ -7,36 +7,52 @@ package emer //go:generate core generate -add-types import ( + "fmt" "io" + "strings" + "cogentcore.org/core/base/randx" "cogentcore.org/core/core" "cogentcore.org/core/math32" "github.com/emer/emergent/v2/params" "github.com/emer/emergent/v2/weights" ) -// Network defines the basic interface for a neural network, used for managing the structural -// elements of a network, and for visualization, I/O, etc +// Network defines the minimal interface for a neural network, +// used for managing the structural elements of a network, +// and for visualization, I/O, etc. +// Most of the standard expected functionality is defined in the +// NetworkBase struct, and this interface only has methods that must be +// implemented specifically for a given algorithmic implementation. type Network interface { - // InitName MUST be called to initialize the network's pointer to itself as an emer.Network - // which enables the proper interface methods to be called. Also sets the name. - InitName(net Network, name string) + // AsEmer returns the network as an *emer.NetworkBase, + // to access base functionality. + AsEmer() *NetworkBase - // Name() returns name of the network - Name() string - - // Label satisfies the core.Labeler interface for getting the name of objects generically + // Label satisfies the core.Labeler interface for getting + // the name of objects generically. Label() string - // NLayers returns the number of layers in the network - NLayers() int + // NumLayers returns the number of layers in the network. + NumLayers() int + + // EmerLayer returns layer as emer.Layer interface at given index. + // Does not do extra bounds checking. + EmerLayer(idx int) Layer + + // MaxParallelData returns the maximum number of data inputs that can be + // processed in parallel by the network. + // The NetView supports display of up to this many data elements. + MaxParallelData() int - // Layer returns layer (as emer.Layer interface) at given index -- does not - // do extra bounds checking - Layer(idx int) Layer + // NParallelData returns the current number of data inputs currently being + // processed in parallel by the network. + // Logging supports recording each of these where appropriate. + NParallelData() int + // todo: remove? // LayerByName returns layer of given name, nil if not found. - // Layer names must be unique and a map is used so this is a fast operation + // Layer names must be unique and a map is used so this is a fast operation. LayerByName(name string) Layer // LayerByNameTry returns layer of given name, returns error if not found. @@ -47,16 +63,19 @@ type Network interface { // Path names are SendToRecv, and are looked up by parsing the name PathByNameTry(name string) (Path, error) - // Defaults sets default parameter values for everything in the Network + // Defaults sets default parameter values for everything in the Network. Defaults() // UpdateParams() updates parameter values for all Network parameters, // based on any other params that might have changed. UpdateParams() - // ApplyParams applies given parameter style Sheet to layers and paths in this network. - // Calls UpdateParams on anything set to ensure derived parameters are all updated. - // If setMsg is true, then a message is printed to confirm each parameter that is set. + // ApplyParams applies given parameter style Sheet to layers + // and paths in this network. + // Calls UpdateParams on anything set to ensure derived parameters + // are all updated. + // If setMsg is true, then a message is printed to confirm each + // parameter that is set. // it always prints a message if a parameter fails to be set. // returns true if any params were set, and error if there were any errors. ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) @@ -76,15 +95,18 @@ type Network interface { // of the most important pathway-level params (specific to each algorithm). KeyPathParams() string - // UnitVarNames returns a list of variable names available on the units in this network. - // This list determines what is shown in the NetView (and the order of vars list). - // Not all layers need to support all variables, but must safely return math32.NaN() for - // unsupported ones. + // UnitVarNames returns a list of variable names available on + // the units in this network. + // This list determines what is shown in the NetView + // (and the order of vars list). + // Not all layers need to support all variables, + // but must safely return math32.NaN() for unsupported ones. // This is typically a global list so do not modify! UnitVarNames() []string - // UnitVarProps returns a map of unit variable properties, with the key being the - // name of the variable, and the value gives a space-separated list of + // UnitVarProps returns a map of unit variable properties, + // with the key being the name of the variable, + // and the value gives a space-separated list of // go-tag-style properties for that variable. // The NetView recognizes the following properties: // range:"##" = +- range around 0 for default display scaling @@ -95,15 +117,19 @@ type Network interface { // Note: this is typically a global list so do not modify! UnitVarProps() map[string]string - // SynVarNames returns the names of all the variables on the synapses in this network. - // This list determines what is shown in the NetView (and the order of vars list). - // Not all pathways need to support all variables, but must safely return math32.NaN() for + // SynVarNames returns the names of all the variables + // on the synapses in this network. + // This list determines what is shown in the NetView + // (and the order of vars list). + // Not all pathways need to support all variables, + // but must safely return math32.NaN() for // unsupported ones. // This is typically a global list so do not modify! SynVarNames() []string - // SynVarProps returns a map of synapse variable properties, with the key being the - // name of the variable, and the value gives a space-separated list of + // SynVarProps returns a map of synapse variable properties, + // with the key being the name of the variable, + // and the value gives a space-separated list of // go-tag-style properties for that variable. // The NetView recognizes the following properties: // range:"##" = +- range around 0 for default display scaling @@ -113,46 +139,166 @@ type Network interface { // Note: this is typically a global list so do not modify! SynVarProps() map[string]string - // WriteWtsJSON writes network weights (and any other state that adapts with learning) - // to JSON-formatted output. + // WriteWtsJSON writes network weights (and any other state + // that adapts with learning) to JSON-formatted output. WriteWtsJSON(w io.Writer) error - // ReadWtsJSON reads network weights (and any other state that adapts with learning) - // from JSON-formatted input. Reads into a temporary weights.Network structure that + // ReadWtsJSON reads network weights (and any other state + // that adapts with learning) from JSON-formatted input. + // Reads into a temporary weights.Network structure that // is then passed to SetWts to actually set the weights. ReadWtsJSON(r io.Reader) error - // SetWts sets the weights for this network from weights.Network decoded values + // SetWts sets the weights for this network from weights.Network + // decoded values. SetWts(nw *weights.Network) error - // SaveWtsJSON saves network weights (and any other state that adapts with learning) - // to a JSON-formatted file. If filename has .gz extension, then file is gzip compressed. + // SaveWtsJSON saves network weights (and any other state + // that adapts with learning) to a JSON-formatted file. + // If filename has .gz extension, then file is gzip compressed. SaveWtsJSON(filename core.Filename) error - // OpenWtsJSON opens network weights (and any other state that adapts with learning) - // from a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed. + // OpenWtsJSON opens network weights (and any other state that + // adapts with learning) from a JSON-formatted file. + // If filename has .gz extension, then file is gzip uncompressed. OpenWtsJSON(filename core.Filename) error - // Bounds returns the minimum and maximum display coordinates of the network for 3D display - Bounds() (min, max math32.Vector3) - // VarRange returns the min / max values for given variable VarRange(varNm string) (min, max float32, err error) +} - // LayersByClass returns a list of layer names by given class(es). - // Lists are compiled when network Build() function called. - // The layer Type is always included as a Class, along with any other - // space-separated strings specified in Class for parameter styling, etc. - // If no classes are passed, all layer names in order are returned. - LayersByClass(classes ...string) []string +// NetworkBase defines the basic data for a neural network, +// used for managing the structural elements of a network, +// and for visualization, I/O, etc. +type NetworkBase struct { + // EmerNetwork provides access to the emer.Network interface + // methods for functions defined in the NetworkBase type. + // Must set this with a pointer to the actual instance + // when created, using InitNetwork function. + EmerNetwork Network - // MaxParallelData returns the maximum number of data inputs that can be - // processed in parallel by the network. - // The NetView supports display of up to this many data elements. - MaxParallelData() int + // overall name of network, which helps discriminate if there are multiple. + Name string - // NParallelData returns the current number of data inputs currently being - // processed in parallel by the network. - // Logging supports recording each of these where appropriate. - NParallelData() int + // filename of last weights file loaded or saved. + WeightsFile string + + // map of name to layers, for LayerByName methods + LayerNameMap map[string]Layer `display:"-"` + + // map from class name to layer names. + LayerClassMap map[string][]string `display:"-"` + + // minimum display position in network + MinPos math32.Vector3 `display:"-"` + + // maximum display position in network + MaxPos math32.Vector3 `display:"-"` + + // optional metadata that is saved in network weights files, + // e.g., can indicate number of epochs that were trained, + // or any other information about this network that would be useful to save. + MetaData map[string]string + + // random number generator for the network. + // all random calls must use this. + // Set seed here for weight initialization values. + Rand randx.SysRand `display:"-"` + + // Random seed to be set at the start of configuring + // the network and initializing the weights. + // Set this to get a different set of weights. + RandSeed int64 `edit:"-"` +} + +// InitNetwork initializes the network, setting the EmerNetwork interface +// to provide access to it for NetworkBase methods, along with the name. +func InitNetwork(nt Network, name string) { + nb := nt.AsEmer() + nb.EmerNetwork = nt + nb.Name = name +} + +func (nt *NetworkBase) AsEmer() *NetworkBase { return nt } + +func (nt *NetworkBase) Label() string { return nt.Name } + +// UpdateLayerMaps updates the LayerNameMap and LayerClassMap. +// Call this when the network is built. +func (nt *NetworkBase) UpdateLayerMaps() { + nt.LayerNameMap = make(map[string]Layer) + nt.LayerClassMap = make(map[string][]string) + nl := nt.EmerNetwork.NumLayers() + for li := range nl { + ly := nt.EmerNetwork.EmerLayer(li) + lnm := ly.StyleName() + nt.LayerNameMap[lnm] = ly + cls := strings.Split(ly.StyleClass(), " ") + for _, cl := range cls { + ll := nt.LayerClassMap[cl] + ll = append(ll, lnm) + nt.LayerClassMap[cl] = ll + } + } +} + +// LayerByNameTry returns a layer by looking it up by name. +// returns error message if layer is not found. +func (nt *NetworkBase) LayerByNameTry(name string) (Layer, error) { + if nt.LayerNameMap == nil || len(nt.LayerNameMap) != nt.EmerNetwork.NumLayers() { + nt.UpdateLayerMaps() + } + if ly, ok := nt.LayerNameMap[name]; ok { + return ly, nil + } + err := fmt.Errorf("Layer named: %s not found in Network: %s", name, nt.Name) + return nil, err +} + +// LayerByName returns a layer by looking it up by name +// in the layer map (nil if not found). +func (nt *NetworkBase) LayerByName(name string) Layer { + ly, _ := nt.LayerByNameTry(name) + return ly +} + +// LayersByClass returns a list of layer names by given class(es). +// Lists are compiled when network Build() function called, +// or now if not yet present. +// The layer Type is always included as a Class, along with any other +// space-separated strings specified in Class for parameter styling, etc. +// If no classes are passed, all layer names in order are returned. +func (nt *NetworkBase) LayersByClass(classes ...string) []string { + if nt.LayerClassMap == nil { + nt.UpdateLayerMaps() + } + var nms []string + nl := nt.EmerNetwork.NumLayers() + if len(classes) == 0 { + for li := range nl { + ly := nt.EmerNetwork.EmerLayer(li).AsEmer() + if ly.Off { + continue + } + nms = append(nms, ly.Name) + } + return nms + } + for _, lc := range classes { + nms = append(nms, nt.LayerClassMap[lc]...) + } + // only get unique layers + layers := []string{} + has := map[string]bool{} + for _, nm := range nms { + if has[nm] { + continue + } + layers = append(layers, nm) + has[nm] = true + } + if len(layers) == 0 { + panic(fmt.Sprintf("No Layers found for query: %#v.", classes)) + } + return layers } diff --git a/emer/path.go b/emer/path.go index 52ab83c6..b4eab128 100644 --- a/emer/path.go +++ b/emer/path.go @@ -47,40 +47,25 @@ type Path interface { // can use a Recv field with the actual Layer struct type. RecvLayer() Layer - // SynVarNames returns the names of all the variables on the synapse - // This is typically a global list so do not modify! - SynVarNames() []string - - // SynVarProps returns a map of synapse variable properties, with the key being the - // name of the variable, and the value gives a space-separated list of - // go-tag-style properties for that variable. - // The NetView recognizes the following properties: - // range:"##" = +- range around 0 for default display scaling - // min:"##" max:"##" = min, max display range - // auto-scale:"+" or "-" = use automatic scaling instead of fixed range or not. - // zeroctr:"+" or "-" = control whether zero-centering is used - // Note: this is a global list so do not modify! - SynVarProps() map[string]string + // NumSyns returns the number of synapses for this path. + // This is the max idx for SynValue1D and the number + // of vals set by SynValues. + NumSyns() int // SynIndex returns the index of the synapse between given send, recv unit indexes // (1D, flat indexes). Returns -1 if synapse not found between these two neurons. // This requires searching within connections for receiving unit (a bit slow). SynIndex(sidx, ridx int) int + // SynVarNames returns the names of all the variables on the synapse + // This is typically a global list so do not modify! + SynVarNames() []string + // SynVarIndex returns the index of given variable within the synapse, // according to *this path's* SynVarNames() list (using a map to lookup index), // or -1 and error message if not found. SynVarIndex(varNm string) (int, error) - // SynVarNum returns the number of synapse-level variables - // for this paths. This is needed for extending indexes in derived types. - SynVarNum() int - - // NumSyns returns the number of synapses for this path. - // This is the max idx for SynValue1D and the number - // of vals set by SynValues. - NumSyns() int - // SynValues sets values of given variable name for each synapse, // using the natural ordering of the synapses (sender based for Axon), // into given float32 slice (only resized if not big enough). @@ -94,14 +79,6 @@ type Path interface { // so it is the only one that needs to be updated for derived types. SynValue1D(varIndex int, synIndex int) float32 - // todo: we don't need this in interface right? - // SetSynValue sets value of given variable name on the synapse - // between given send, recv unit indexes (1D, flat indexes). - // Typically only supports base synapse variables and is not extended - // for derived types. - // Returns error for access errors. - // SetSynValue(varNm string, sidx, ridx int, val float32) error - // UpdateParams() updates parameter values for all Path parameters, // based on any other params that might have changed. UpdateParams() From 1e01d34773a2750da4624a3be9f961e96331087d Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Fri, 9 Aug 2024 22:58:10 -0700 Subject: [PATCH 04/10] netview building. --- emer/layer.go | 75 ++++++++++++---------- emer/netparams.go | 6 +- emer/network.go | 28 +++----- emer/path.go | 4 ++ emer/typegen.go | 2 +- netview/data.go | 30 ++++----- netview/events.go | 32 ++++----- netview/laymesh.go | 58 ++++++++--------- netview/layraster.go | 150 +++++++++++++++++++++---------------------- netview/netdata.go | 81 +++++++++++------------ netview/netview.go | 60 ++++++++--------- 11 files changed, 266 insertions(+), 260 deletions(-) diff --git a/emer/layer.go b/emer/layer.go index 5e29988f..42da30f9 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -62,14 +62,14 @@ type Layer interface { // so it is the only one that needs to be updated for derived layer types. UnitVal1D(varIndex int, idx, di int) float32 - // NRecvPaths returns the number of receiving pathways. - NRecvPaths() int + // NumRecvPaths returns the number of receiving pathways. + NumRecvPaths() int // RecvPath returns a specific receiving pathway. RecvPath(idx int) Path - // NSendPaths returns the number of sending pathways. - NSendPaths() int + // NumSendPaths returns the number of sending pathways. + NumSendPaths() int // SendPath returns a specific sending pathway. SendPath(idx int) Path @@ -534,47 +534,56 @@ func Layer2DSampleIndexes(ly Layer, maxSize int) (idxs, shape []int) { return } -////////////////////////////////////////////////////////////////////////////////////// -// Layers - -// we keep these here to make it easier for other packages to implement the emer.Layer interface -// by just calling these methods -func SendNameTry(l Layer, sender string) (Path, error) { - for pi := 0; pi < l.NRecvPaths(); pi++ { - pj := l.RecvPath(pi) - if pj.SendLayer().AsEmer().Name == sender { - return pj, nil +// RecvPathBySendName returns the receiving Path with given +// sending layer name (the first one if multiple exist). +func (ly *LayerBase) RecvPathBySendName(sender string) (Path, error) { + el := ly.EmerLayer + for pi := range el.NumRecvPaths() { + pt := el.RecvPath(pi) + if pt.SendLayer().StyleName() == sender { + return pt, nil } } - return nil, fmt.Errorf("sending layer: %v not found in list of pathways", sender) + return nil, fmt.Errorf("sending layer named: %s not found in list of receiving pathways", sender) } -func RecvNameTry(l Layer, recv string) (Path, error) { - for pi := 0; pi < l.NSendPaths(); pi++ { - pj := l.SendPath(pi) - if pj.RecvLayer().AsEmer().Name == recv { - return pj, nil +// SendPathByRecvName returns the sending Path with given +// recieving layer name (the first one if multiple exist). +func (ly *LayerBase) SendPathByRecvName(recv string) (Path, error) { + el := ly.EmerLayer + for pi := range el.NumSendPaths() { + pt := el.SendPath(pi) + if pt.RecvLayer().StyleName() == recv { + return pt, nil } } - return nil, fmt.Errorf("receiving layer: %v not found in list of pathways", recv) + return nil, fmt.Errorf("receiving layer named: %s not found in list of sending pathways", recv) } -func SendNameTypeTry(l Layer, sender, typ string) (Path, error) { - for pi := 0; pi < l.NRecvPaths(); pi++ { - pj := l.RecvPath(pi) - if pj.SendLayer().AsEmer().Name == sender && pj.TypeName() == typ { - return pj, nil +// RecvPathBySendName returns the receiving Path with given +// sending layer name, with the given type name +// (the first one if multiple exist). +func (ly *LayerBase) RecvPathBySendNameType(sender, typeName string) (Path, error) { + el := ly.EmerLayer + for pi := range el.NumRecvPaths() { + pt := el.RecvPath(pi) + if pt.SendLayer().StyleName() == sender && pt.TypeName() == typeName { + return pt, nil } } - return nil, fmt.Errorf("sending layer: %v not found in list of pathways", sender) + return nil, fmt.Errorf("sending layer named: %s of type %s not found in list of receiving pathways", sender, typeName) } -func RecvNameTypeTry(l Layer, recv, typ string) (Path, error) { - for pi := 0; pi < l.NSendPaths(); pi++ { - pj := l.SendPath(pi) - if pj.RecvLayer().AsEmer().Name == recv && pj.TypeName() == typ { - return pj, nil +// SendPathByRecvName returns the sending Path with given +// recieving layer name, with the given type name +// (the first one if multiple exist). +func (ly *LayerBase) SendPathByRecvNameType(recv, typeName string) (Path, error) { + el := ly.EmerLayer + for pi := range el.NumSendPaths() { + pt := el.SendPath(pi) + if pt.RecvLayer().StyleName() == recv && pt.TypeName() == typeName { + return pt, nil } } - return nil, fmt.Errorf("receiving layer: %v, type: %v not found in list of pathways", recv, typ) + return nil, fmt.Errorf("receiving layer named: %s, type: %s not found in list of sending pathways", recv, typeName) } diff --git a/emer/netparams.go b/emer/netparams.go index 56983cb0..6b7585d2 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -162,7 +162,7 @@ func NetworkHyperParams(net Network, sheet *params.Sheet) params.Flex { // separate pathways for li := range nl { ly := net.EmerLayer(li) - np := ly.NRecvPaths() + np := ly.NumRecvPaths() for pi := range np { pj := ly.RecvPath(pi) nm := pj.StyleName() @@ -189,7 +189,7 @@ func SetFloatParam(net Network, name, typ, path string, val float32) error { prs := fmt.Sprintf("%g", val) switch typ { case "Layer": - ly, err := net.LayerByNameTry(name) + ly, err := net.EmerLayerByName(name) if err != nil { slog.Error(err.Error()) return err @@ -200,7 +200,7 @@ func SetFloatParam(net Network, name, typ, path string, val float32) error { return err } case "Path": - pj, err := net.PathByNameTry(name) + pj, err := net.EmerPathByName(name) if err != nil { slog.Error(err.Error()) return err diff --git a/emer/network.go b/emer/network.go index 6ae2c1b9..61f54dfa 100644 --- a/emer/network.go +++ b/emer/network.go @@ -50,18 +50,13 @@ type Network interface { // Logging supports recording each of these where appropriate. NParallelData() int - // todo: remove? - // LayerByName returns layer of given name, nil if not found. + // EmerLayerByName returns layer of given name, returns nil, error if not found. // Layer names must be unique and a map is used so this is a fast operation. - LayerByName(name string) Layer + EmerLayerByName(name string) (Layer, error) - // LayerByNameTry returns layer of given name, returns error if not found. - // Layer names must be unique and a map is used so this is a fast operation - LayerByNameTry(name string) (Layer, error) - - // PathByNameTry returns path of given name, returns error if not found. - // Path names are SendToRecv, and are looked up by parsing the name - PathByNameTry(name string) (Path, error) + // EmerPathByName returns path of given name, returns error if not found. + // Path names are SendToRecv, and are looked up by parsing the name. + EmerPathByName(name string) (Path, error) // Defaults sets default parameter values for everything in the Network. Defaults() @@ -183,7 +178,7 @@ type NetworkBase struct { // filename of last weights file loaded or saved. WeightsFile string - // map of name to layers, for LayerByName methods + // map of name to layers, for EmerLayerByName methods LayerNameMap map[string]Layer `display:"-"` // map from class name to layer names. @@ -242,9 +237,9 @@ func (nt *NetworkBase) UpdateLayerMaps() { } } -// LayerByNameTry returns a layer by looking it up by name. +// EmerLayerByName returns a layer by looking it up by name. // returns error message if layer is not found. -func (nt *NetworkBase) LayerByNameTry(name string) (Layer, error) { +func (nt *NetworkBase) EmerLayerByName(name string) (Layer, error) { if nt.LayerNameMap == nil || len(nt.LayerNameMap) != nt.EmerNetwork.NumLayers() { nt.UpdateLayerMaps() } @@ -255,13 +250,6 @@ func (nt *NetworkBase) LayerByNameTry(name string) (Layer, error) { return nil, err } -// LayerByName returns a layer by looking it up by name -// in the layer map (nil if not found). -func (nt *NetworkBase) LayerByName(name string) Layer { - ly, _ := nt.LayerByNameTry(name) - return ly -} - // LayersByClass returns a list of layer names by given class(es). // Lists are compiled when network Build() function called, // or now if not yet present. diff --git a/emer/path.go b/emer/path.go index b4eab128..3c440fca 100644 --- a/emer/path.go +++ b/emer/path.go @@ -61,6 +61,10 @@ type Path interface { // This is typically a global list so do not modify! SynVarNames() []string + // SynVarNum returns the number of synapse-level variables + // for this paths. This is needed for extending indexes in derived types. + SynVarNum() int + // SynVarIndex returns the index of given variable within the synapse, // according to *this path's* SynVarNames() list (using a map to lookup index), // or -1 and error message if not found. diff --git a/emer/typegen.go b/emer/typegen.go index 6a0f6b3c..9c7c5b9a 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,7 +6,7 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NRecvPaths", Doc: "NRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NSendPaths", Doc: "NSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) diff --git a/netview/data.go b/netview/data.go index 9caf05bb..ba51cfe8 100644 --- a/netview/data.go +++ b/netview/data.go @@ -31,19 +31,19 @@ type LayData struct { // AllocSendPaths allocates Sending pathways for given layer. // does nothing if already allocated. func (ld *LayData) AllocSendPaths(ly emer.Layer) { - nsp := ly.NSendPaths() + nsp := ly.NumSendPaths() if len(ld.SendPaths) == nsp { - for si := 0; si < ly.NSendPaths(); si++ { - pj := ly.SendPath(si) + for si := range ly.NumSendPaths() { + pt := ly.SendPath(si) spd := ld.SendPaths[si] - spd.Path = pj + spd.Path = pt } return } ld.SendPaths = make([]*PathData, nsp) - for si := 0; si < ly.NSendPaths(); si++ { - pj := ly.SendPath(si) - pd := &PathData{Send: pj.SendLay().Name(), Recv: pj.RecvLay().Name(), Path: pj} + for si := range ly.NumSendPaths() { + pt := ly.SendPath(si) + pd := &PathData{Send: pt.SendLayer().StyleName(), Recv: pt.RecvLayer().StyleName(), Path: pt} ld.SendPaths[si] = pd pd.Alloc() } @@ -74,9 +74,9 @@ type PathData struct { // Alloc allocates SynData to hold number of variables * nsyn synapses. // If already has capacity, nothing happens. func (pd *PathData) Alloc() { - pj := pd.Path - nvar := pj.SynVarNum() - nsyn := pj.Syn1DNum() + pt := pd.Path + nvar := pt.SynVarNum() + nsyn := pt.NumSyns() nt := nvar * nsyn if cap(pd.SynData) < nt { pd.SynData = make([]float32, nt) @@ -88,15 +88,15 @@ func (pd *PathData) Alloc() { // RecordData records synaptic data from given paths. // must use sender or recv based depending on natural ordering. func (pd *PathData) RecordData(nd *NetData) { - pj := pd.Path - vnms := pj.SynVarNames() - nvar := pj.SynVarNum() - nsyn := pj.Syn1DNum() + pt := pd.Path + vnms := pt.SynVarNames() + nvar := pt.SynVarNum() + nsyn := pt.NumSyns() for vi := 0; vi < nvar; vi++ { vnm := vnms[vi] si := vi * nsyn sv := pd.SynData[si : si+nsyn] - pj.SynValues(&sv, vnm) + pt.SynValues(&sv, vnm) nvi := nd.SynVarIndexes[vnm] mn := &nd.SynMinVar[nvi] mx := &nd.SynMaxVar[nvi] diff --git a/netview/events.go b/netview/events.go index 98dbf4db..6aadb3c1 100644 --- a/netview/events.go +++ b/netview/events.go @@ -48,9 +48,9 @@ func (sw *Scene) MouseDownEvent(e events.Event) { for _, n := range ns { ln, ok := n.(*LayName) if ok { - lay := ln.NetView.Net.LayerByName(ln.Text) + lay, _ := ln.NetView.Net.EmerLayerByName(ln.Text) if lay != nil { - FormDialog(sw, lay, "Layer: "+lay.Name()) + FormDialog(sw, lay, "Layer: "+lay.StyleName()) } e.SetHandled() return @@ -63,7 +63,7 @@ func (sw *Scene) MouseDownEvent(e events.Event) { } nv := sw.NetView nv.Data.PathUnIndex = unIndex - nv.Data.PathLay = lay.Name() + nv.Data.PathLay = lay.StyleName() nv.UpdateView() e.SetHandled() } @@ -77,10 +77,11 @@ func (sw *Scene) WidgetTooltip(pos image.Point) (string, image.Point) { if lay == nil { return "", pos } + lb := lay.AsEmer() nv := sw.NetView tt := "" - if lay.Is2D() { + if lb.Is2D() { idx := []int{ly, lx} val, _, _, hasval := nv.UnitValue(lay, idx) if !hasval { @@ -88,8 +89,8 @@ func (sw *Scene) WidgetTooltip(pos image.Point) (string, image.Point) { } else { tt = fmt.Sprintf("[%d,%d]=%g\n", lx, ly, val) } - } else if lay.Is4D() { - idx, ok := lay.Index4DFrom2D(lx, ly) + } else if lb.Is4D() { + idx, ok := lb.Index4DFrom2D(lx, ly) if !ok { return "", pos } @@ -114,21 +115,22 @@ func (sw *Scene) LayerUnitAtPoint(pos image.Point) (lay emer.Layer, lx, ly, unIn } _, laysGp := xyz.AsNode(laysGpi) nv := sw.NetView - nmin, nmax := nv.Net.Bounds() + nb := nv.Net.AsEmer() + nmin, nmax := nb.MinPos, nb.MaxPos nsz := nmax.Sub(nmin).Sub(math32.Vec3(1, 1, 0)).Max(math32.Vec3(1, 1, 1)) nsc := math32.Vec3(1.0/nsz.X, 1.0/nsz.Y, 1.0/nsz.Z) szc := math32.Max(nsc.X, nsc.Y) poff := math32.Vector3Scalar(0.5) poff.Y = -0.5 for li, lgi := range laysGp.Children { - lay = nv.Net.Layer(li) + lay = nv.Net.EmerLayer(li) + lb := lay.AsEmer() lg := lgi.(*xyz.Group) - lp := lay.Pos() + lp := lb.Pos.Pos lp.Y = -lp.Y // reverse direction lp = lp.Sub(nmin).Mul(nsc).Sub(poff) - rp := lay.RelPos() lg.Pose.Pos.Set(lp.X, lp.Z, lp.Y) - lg.Pose.Scale.Set(nsc.X*rp.Scale, szc, nsc.Y*rp.Scale) + lg.Pose.Scale.Set(nsc.X*lb.Pos.Scale, szc, nsc.Y*lb.Pos.Scale) lo := lg.Child(0).(*LayObj) ray := lo.RayPick(pos) // layer is in XZ plane with norm pointing up in Y axis @@ -144,16 +146,16 @@ func (sw *Scene) LayerUnitAtPoint(pos image.Point) (lay emer.Layer, lx, ly, unIn if lx < 0 || ly < 0 { continue } - lshp := lay.Shape() - if lay.Is2D() { + lshp := lb.Shape + if lb.Is2D() { idx := []int{ly, lx} if !lshp.IndexIsValid(idx) { continue } unIndex = lshp.Offset(idx) return - } else if lay.Is4D() { - idx, ok := lay.Index4DFrom2D(lx, ly) + } else if lb.Is4D() { + idx, ok := lb.Index4DFrom2D(lx, ly) if !ok { continue } diff --git a/netview/laymesh.go b/netview/laymesh.go index b0b5fab4..4d9d668e 100644 --- a/netview/laymesh.go +++ b/netview/laymesh.go @@ -5,9 +5,9 @@ package netview import ( + "cogentcore.org/core/gpu/shape" "cogentcore.org/core/math32" "cogentcore.org/core/tensor" - "cogentcore.org/core/vgpu/vshape" "cogentcore.org/core/xyz" "github.com/emer/emergent/v2/emer" ) @@ -35,7 +35,7 @@ func NewLayMesh(sc *xyz.Scene, nv *NetView, lay emer.Layer) *LayMesh { lm := &LayMesh{} lm.View = nv lm.Lay = lay - lm.Name = lay.Name() + lm.Name = lay.StyleName() sc.SetMesh(lm) return lm } @@ -46,7 +46,7 @@ func (lm *LayMesh) MeshSize() (nVtx, nIndex int, hasColor bool) { if lm.Lay == nil { return 0, 0, true } - shp := lm.Lay.Shape() + shp := &lm.Lay.AsEmer().Shape lm.Shape.CopyShape(shp) if lm.View.Params.Raster.On { if shp.NumDims() == 4 { @@ -69,7 +69,7 @@ func (lm *LayMesh) Size2D() (nVtx, nIndex int) { nx := lm.Shape.DimSize(1) segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) nVtx = vtxSz * 5 * nz * nx nIndex = idxSz * 5 * nz * nx return @@ -83,7 +83,7 @@ func (lm *LayMesh) Size4D() (nVtx, nIndex int) { segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) nVtx = vtxSz * 5 * npz * npx * nuz * nux nIndex = idxSz * 5 * npz * npx * nuz * nux return @@ -131,7 +131,7 @@ func (lm *LayMesh) Set2D(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry uo := (1.0 - uw) segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -144,23 +144,23 @@ func (lm *LayMesh) Set2D(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry x0 := uo + float32(xi) _, scaled, clr, _ := lm.View.UnitValue(lm.Lay, []int{zi, xi}) v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := 0.5 * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, uw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, uw, ht, z0, 0, x0+uw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, uw, uw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, uw, ht, x0, 0, z0+uw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, uw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, uw, ht, z0, 0, x0+uw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, uw, uw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, uw, ht, x0, 0, z0+uw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, uw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, -ht, x0+uw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, uw, uw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, uw, ht, x0, -ht, z0+uw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, uw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, -ht, x0+uw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, uw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, uw, uw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, uw, ht, x0, -ht, z0+uw, segs, segs, pos) // pz } pidx++ } @@ -194,7 +194,7 @@ func (lm *LayMesh) Set4D(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -211,23 +211,23 @@ func (lm *LayMesh) Set4D(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry x0 := xp0 + xsc*(uo+float32(xui)) _, scaled, clr, _ := lm.View.UnitValue(lm.Lay, []int{zpi, xpi, zui, xui}) v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := 0.5 * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz } pidx++ } diff --git a/netview/layraster.go b/netview/layraster.go index 7a3fbc7e..aba711b4 100644 --- a/netview/layraster.go +++ b/netview/layraster.go @@ -5,46 +5,46 @@ package netview import ( + "cogentcore.org/core/gpu/shape" "cogentcore.org/core/math32" - "cogentcore.org/core/vgpu/vshape" ) func (lm *LayMesh) RasterSize2D() (nVtx, nIndex int) { - rs := lm.Lay.RepShape() - nuz := rs.DimSize(0) - nux := rs.DimSize(1) + ss := &lm.Lay.AsEmer().SampleShape + nuz := ss.DimSize(0) + nux := ss.DimSize(1) nz := nuz*nux + nuz - 1 nx := lm.View.Params.Raster.Max + 1 segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) nVtx = vtxSz * 5 * nz * nx nIndex = idxSz * 5 * nz * nx return } func (lm *LayMesh) RasterSize4D() (nVtx, nIndex int) { - rs := lm.Lay.RepShape() - npz := rs.DimSize(0) // p = pool - npx := rs.DimSize(1) - nuz := rs.DimSize(2) // u = unit - nux := rs.DimSize(3) + ss := &lm.Lay.AsEmer().SampleShape + npz := ss.DimSize(0) // p = pool + npx := ss.DimSize(1) + nuz := ss.DimSize(2) // u = unit + nux := ss.DimSize(3) nz := nuz*nux + nuz - 1 nx := lm.View.Params.Raster.Max + 1 segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) nVtx = vtxSz * 5 * npz * npx * nz * nx nIndex = idxSz * 5 * npz * npx * nz * nx return } func (lm *LayMesh) RasterSet2DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry math32.ArrayU32) { - rs := lm.Lay.RepShape() - nuz := rs.DimSize(0) - nux := rs.DimSize(1) + ss := &lm.Lay.AsEmer().SampleShape + nuz := ss.DimSize(0) + nux := ss.DimSize(1) nz := nuz*nux + nuz - 1 nx := lm.View.Params.Raster.Max + 1 htsc := 0.5 * lm.View.Params.Raster.UnitHeight @@ -71,7 +71,7 @@ func (lm *LayMesh) RasterSet2DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -96,23 +96,23 @@ func (lm *LayMesh) RasterSet2DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, xoff++ } v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := htsc * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz } pidx++ } @@ -123,9 +123,9 @@ func (lm *LayMesh) RasterSet2DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, } func (lm *LayMesh) RasterSet2DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry math32.ArrayU32) { - rs := lm.Lay.RepShape() - nuz := rs.DimSize(0) - nux := rs.DimSize(1) + ss := &lm.Lay.AsEmer().SampleShape + nuz := ss.DimSize(0) + nux := ss.DimSize(1) nx := nuz*nux + nuz - 1 nz := lm.View.Params.Raster.Max + 1 htsc := 0.5 * lm.View.Params.Raster.UnitHeight @@ -152,7 +152,7 @@ func (lm *LayMesh) RasterSet2DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -177,23 +177,23 @@ func (lm *LayMesh) RasterSet2DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, zoff = 0 } v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := htsc * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz } pidx++ } @@ -203,11 +203,11 @@ func (lm *LayMesh) RasterSet2DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, } func (lm *LayMesh) RasterSet4DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry math32.ArrayU32) { - rs := lm.Lay.RepShape() - npz := rs.DimSize(0) // p = pool - npx := rs.DimSize(1) - nuz := rs.DimSize(2) // u = unit - nux := rs.DimSize(3) + ss := &lm.Lay.AsEmer().SampleShape + npz := ss.DimSize(0) // p = pool + npx := ss.DimSize(1) + nuz := ss.DimSize(2) // u = unit + nux := ss.DimSize(3) nz := nuz*nux + nuz - 1 nx := lm.View.Params.Raster.Max + 1 @@ -247,7 +247,7 @@ func (lm *LayMesh) RasterSet4DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -276,23 +276,23 @@ func (lm *LayMesh) RasterSet4DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, xoff++ } v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := htsc * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz } pidx++ } @@ -304,11 +304,11 @@ func (lm *LayMesh) RasterSet4DX(vtxAry, normAry, texAry, clrAry math32.ArrayF32, } func (lm *LayMesh) RasterSet4DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, idxAry math32.ArrayU32) { - rs := lm.Lay.RepShape() - npz := rs.DimSize(0) // p = pool - npx := rs.DimSize(1) - nuz := rs.DimSize(2) // u = unit - nux := rs.DimSize(3) + ss := &lm.Lay.AsEmer().SampleShape + npz := ss.DimSize(0) // p = pool + npx := ss.DimSize(1) + nuz := ss.DimSize(2) // u = unit + nux := ss.DimSize(3) nx := nuz*nux + nuz - 1 nz := lm.View.Params.Raster.Max + 1 @@ -348,7 +348,7 @@ func (lm *LayMesh) RasterSet4DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, segs := 1 - vtxSz, idxSz := vshape.PlaneN(segs, segs) + vtxSz, idxSz := shape.PlaneN(segs, segs) pidx := 0 // plane index pos := math32.Vector3{} @@ -377,23 +377,23 @@ func (lm *LayMesh) RasterSet4DZ(vtxAry, normAry, texAry, clrAry math32.ArrayF32, zoff = 0 } v4c := math32.NewVector4Color(clr) - vshape.SetColor(clrAry, poff, 5*vtxSz, v4c) + shape.SetColor(clrAry, poff, 5*vtxSz, v4c) ht := htsc * math32.Abs(scaled) if ht < MinUnitHeight { ht = MinUnitHeight } if scaled >= 0 { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, -1, -1, xuw, ht, x0, 0, z0, segs, segs, pos) // nz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, -1, -1, zuw, ht, z0, 0, x0+xuw, segs, segs, pos) // px + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, 0, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, ht, segs, segs, pos) // py <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, 0, z0+zuw, segs, segs, pos) // pz } else { - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- - vshape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff, ioff, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0, segs, segs, pos) // nz = pz norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+1*vtxSz, ioff+1*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0+xuw, segs, segs, pos) // px = nx norm + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+2*vtxSz, ioff+2*idxSz, math32.Z, math32.Y, 1, -1, zuw, ht, z0, -ht, x0, segs, segs, pos) // nx + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+3*vtxSz, ioff+3*idxSz, math32.X, math32.Z, 1, 1, xuw, zuw, x0, z0, -ht, segs, segs, pos) // ny <- + shape.SetPlane(vtxAry, normAry, texAry, idxAry, poff+4*vtxSz, ioff+4*idxSz, math32.X, math32.Y, 1, -1, xuw, ht, x0, -ht, z0+zuw, segs, segs, pos) // pz } pidx++ } diff --git a/netview/netdata.go b/netview/netdata.go index 9db7e950..a4ad4352 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -17,6 +17,7 @@ import ( "strconv" "strings" + "cogentcore.org/core/base/errors" "cogentcore.org/core/core" "cogentcore.org/core/math32" "cogentcore.org/core/plot/plotcore" @@ -111,7 +112,7 @@ func (nd *NetData) Init(net emer.Network, max int, noSynData bool, maxData int) // Config configures the data storage for given network // only re-allocates if needed. func (nd *NetData) Config() { - nlay := nd.Net.NLayers() + nlay := nd.Net.NumLayers() if nlay == 0 { return } @@ -143,26 +144,26 @@ func (nd *NetData) Config() { makeData: if len(nd.LayData) != nlay { nd.LayData = make(map[string]*LayData, nlay) - for li := 0; li < nlay; li++ { - lay := nd.Net.Layer(li) - nm := lay.Name() - ld := &LayData{LayName: nm, NUnits: lay.Shape().Len()} + for li := range nlay { + lay := nd.Net.EmerLayer(li).AsEmer() + nm := lay.Name + ld := &LayData{LayName: nm, NUnits: lay.Shape.Len()} nd.LayData[nm] = ld if nd.NoSynData { ld.FreePaths() } else { - ld.AllocSendPaths(lay) + ld.AllocSendPaths(lay.EmerLayer) } } if !nd.NoSynData { - for li := 0; li < nlay; li++ { - rlay := nd.Net.Layer(li) - rld := nd.LayData[rlay.Name()] - rld.RecvPaths = make([]*PathData, rlay.NRecvPaths()) - for ri := 0; ri < rlay.NRecvPaths(); ri++ { + for li := range nlay { + rlay := nd.Net.EmerLayer(li) + rld := nd.LayData[rlay.StyleName()] + rld.RecvPaths = make([]*PathData, rlay.NumRecvPaths()) + for ri := 0; ri < rlay.NumRecvPaths(); ri++ { rpj := rlay.RecvPath(ri) - slay := rpj.SendLay() - sld := nd.LayData[slay.Name()] + slay := rpj.SendLayer() + sld := nd.LayData[slay.StyleName()] for _, spj := range sld.SendPaths { if spj.Path == rpj { rld.RecvPaths[ri] = spj // link @@ -172,9 +173,9 @@ makeData: } } } else { - for li := 0; li < nlay; li++ { - lay := nd.Net.Layer(li) - ld := nd.LayData[lay.Name()] + for li := range nlay { + lay := nd.Net.EmerLayer(li) + ld := nd.LayData[lay.StyleName()] if nd.NoSynData { ld.FreePaths() } else { @@ -183,15 +184,15 @@ makeData: } } vmax := vlen * rmax * nd.MaxData - for li := 0; li < nlay; li++ { - lay := nd.Net.Layer(li) - nm := lay.Name() + for li := range nlay { + lay := nd.Net.EmerLayer(li).AsEmer() + nm := lay.Name ld, ok := nd.LayData[nm] if !ok { nd.LayData = nil goto makeData } - ld.NUnits = lay.Shape().Len() + ld.NUnits = lay.Shape.Len() nu := ld.NUnits ltot := vmax * nu if len(ld.Data) != ltot { @@ -221,7 +222,7 @@ makeData: // and raster counter value -- if negative, then an internal // wraping-around counter is used. func (nd *NetData) Record(ctrs string, rastCtr, rastMax int) { - nlay := nd.Net.NLayers() + nlay := nd.Net.NumLayers() if nlay == 0 { return } @@ -248,11 +249,11 @@ func (nd *NetData) Record(ctrs string, rastCtr, rastMax int) { nd.UnMinPer[mmidx+vi] = math.MaxFloat32 nd.UnMaxPer[mmidx+vi] = -math.MaxFloat32 } - for li := 0; li < nlay; li++ { - lay := nd.Net.Layer(li) - laynm := lay.Name() + for li := range nlay { + lay := nd.Net.EmerLayer(li).AsEmer() + laynm := lay.Name ld := nd.LayData[laynm] - nu := lay.Shape().Len() + nu := lay.Shape.Len() nvu := vlen * maxData * nu for vi, vnm := range nd.UnVars { mn := &nd.UnMinPer[mmidx+vi] @@ -334,7 +335,7 @@ func (nd *NetData) RecordSyns() { if nd.NoSynData { return } - nlay := nd.Net.NLayers() + nlay := nd.Net.NumLayers() if nlay == 0 { return } @@ -343,11 +344,11 @@ func (nd *NetData) RecordSyns() { nd.SynMinVar[vi] = math.MaxFloat32 nd.SynMaxVar[vi] = -math.MaxFloat32 } - for li := 0; li < nlay; li++ { - lay := nd.Net.Layer(li) - laynm := lay.Name() + for li := range nlay { + lay := nd.Net.EmerLayer(li) + laynm := lay.StyleName() ld := nd.LayData[laynm] - for si := 0; si < lay.NSendPaths(); si++ { + for si := 0; si < lay.NumSendPaths(); si++ { spd := ld.SendPaths[si] spd.RecordData(nd) } @@ -443,19 +444,19 @@ func (nd *NetData) RecvUnitValue(laynm string, vnm string, uidx1d int) (float32, if nd.NoSynData || !ok || nd.PathLay == "" { return 0, false } - recvLay := nd.Net.LayerByName(nd.PathLay) + recvLay := errors.Ignore1(nd.Net.EmerLayerByName(nd.PathLay)).AsEmer() if recvLay == nil { return 0, false } var pj emer.Path var err error if nd.PathType != "" { - pj, err = recvLay.SendNameTypeTry(laynm, nd.PathType) + pj, err = recvLay.RecvPathBySendNameType(laynm, nd.PathType) if pj == nil { - pj, err = recvLay.SendNameTry(laynm) + pj, err = recvLay.RecvPathBySendName(laynm) } } else { - pj, err = recvLay.SendNameTry(laynm) + pj, err = recvLay.RecvPathBySendName(laynm) } if pj == nil { return 0, false @@ -478,7 +479,7 @@ func (nd *NetData) RecvUnitValue(laynm string, vnm string, uidx1d int) (float32, if synIndex < 0 { return 0, false } - nsyn := pj.Syn1DNum() + nsyn := pj.NumSyns() val := spd.SynData[varIndex*nsyn+synIndex] return val, true } @@ -491,19 +492,19 @@ func (nd *NetData) SendUnitValue(laynm string, vnm string, uidx1d int) (float32, if nd.NoSynData || !ok || nd.PathLay == "" { return 0, false } - sendLay := nd.Net.LayerByName(nd.PathLay) + sendLay := errors.Ignore1(nd.Net.EmerLayerByName(nd.PathLay)).AsEmer() if sendLay == nil { return 0, false } var pj emer.Path var err error if nd.PathType != "" { - pj, err = sendLay.RecvNameTypeTry(laynm, nd.PathType) + pj, err = sendLay.SendPathByRecvNameType(laynm, nd.PathType) if pj == nil { - pj, err = sendLay.RecvNameTry(laynm) + pj, err = sendLay.SendPathByRecvName(laynm) } } else { - pj, err = sendLay.RecvNameTry(laynm) + pj, err = sendLay.SendPathByRecvName(laynm) } if pj == nil { return 0, false @@ -526,7 +527,7 @@ func (nd *NetData) SendUnitValue(laynm string, vnm string, uidx1d int) (float32, if synIndex < 0 { return 0, false } - nsyn := pj.Syn1DNum() + nsyn := pj.NumSyns() val := rpd.SynData[varIndex*nsyn+synIndex] return val, true } diff --git a/netview/netview.go b/netview/netview.go index 1db3fd20..8df86a3d 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -171,7 +171,7 @@ func (nv *NetView) SetMaxRecs(max int) { // HasLayers returns true if network has any layers -- else no display func (nv *NetView) HasLayers() bool { - if nv.Net == nil || nv.Net.NLayers() == 0 { + if nv.Net == nil || nv.Net.NumLayers() == 0 { return false } return true @@ -444,7 +444,7 @@ func (nv *NetView) RecTrackLatest() bool { // layEven ensures that the number of layer variables is an even number if true // (used for display but not storage). func (nv *NetView) NetVarsList(net emer.Network, layEven bool) (nvars, synvars []string) { - if net == nil || net.NLayers() == 0 { + if net == nil || net.NumLayers() == 0 { return nil, nil } unvars := net.UnitVarNames() @@ -539,27 +539,28 @@ func (nv *NetView) UpdateLayers() { sw := nv.SceneWidget() se := sw.SceneXYZ() - if nv.Net == nil || nv.Net.NLayers() == 0 { + if nv.Net == nil || nv.Net.NumLayers() == 0 { se.DeleteChildren() se.Meshes.Reset() return } + nb := nv.Net.AsEmer() if nv.NeedsRebuild() { se.Background = colors.Scheme.Background } - nlay := nv.Net.NLayers() + nlay := nv.Net.NumLayers() laysGp := se.ChildByName("Layers", 0).(*xyz.Group) layConfig := tree.TypePlan{} - for li := 0; li < nlay; li++ { - ly := nv.Net.Layer(li) - layConfig.Add(types.For[xyz.Group](), ly.Name()) + for li := range nlay { + ly := nv.Net.EmerLayer(li) + layConfig.Add(types.For[xyz.Group](), ly.StyleName()) } if !tree.Update(laysGp, layConfig) { for li := range laysGp.Children { - ly := nv.Net.Layer(li) - lmesh := se.MeshByName(ly.Name()) + ly := nv.Net.EmerLayer(li) + lmesh := se.MeshByName(ly.StyleName()) se.SetMesh(lmesh) // does update } return @@ -569,35 +570,35 @@ func (nv *NetView) UpdateLayers() { gpConfig.Add(types.For[LayObj](), "layer") gpConfig.Add(types.For[LayName](), "name") - nmin, nmax := nv.Net.Bounds() + nmin, nmax := nb.MinPos, nb.MaxPos nsz := nmax.Sub(nmin).Sub(math32.Vec3(1, 1, 0)).Max(math32.Vec3(1, 1, 1)) nsc := math32.Vec3(1.0/nsz.X, 1.0/nsz.Y, 1.0/nsz.Z) szc := math32.Max(nsc.X, nsc.Y) poff := math32.Vector3Scalar(0.5) poff.Y = -0.5 for li, lgi := range laysGp.Children { - ly := nv.Net.Layer(li) - lmesh := se.MeshByName(ly.Name()) + ly := nv.Net.EmerLayer(li) + lb := ly.AsEmer() + lmesh := se.MeshByName(ly.StyleName()) if lmesh == nil { NewLayMesh(se, nv, ly) } else { lmesh.(*LayMesh).Lay = ly // make sure } lg := lgi.(*xyz.Group) - gpConfig[1].Name = ly.Name() // text2d textures use obj name, so must be unique + gpConfig[1].Name = ly.StyleName() // text2d textures use obj name, so must be unique tree.Update(lg, gpConfig) - lp := ly.Pos() + lp := lb.Pos.Pos lp.Y = -lp.Y // reverse direction lp = lp.Sub(nmin).Mul(nsc).Sub(poff) - rp := ly.RelPos() lg.Pose.Pos.Set(lp.X, lp.Z, lp.Y) - lg.Pose.Scale.Set(nsc.X*rp.Scale, szc, nsc.Y*rp.Scale) + lg.Pose.Scale.Set(nsc.X*lb.Pos.Scale, szc, nsc.Y*lb.Pos.Scale) lo := lg.Child(0).(*LayObj) lo.Defaults() - lo.LayName = ly.Name() + lo.LayName = ly.StyleName() lo.NetView = nv - lo.SetMeshName(ly.Name()) + lo.SetMeshName(ly.StyleName()) lo.Material.Color = colors.FromRGB(255, 100, 255) lo.Material.Reflective = 8 lo.Material.Bright = 8 @@ -608,7 +609,7 @@ func (nv *NetView) UpdateLayers() { txt := lg.Child(1).(*LayName) txt.Defaults() txt.NetView = nv - txt.SetText(ly.Name()) + txt.SetText(ly.StyleName()) txt.Pose.Scale = math32.Vector3Scalar(nv.Params.LayNmSize).Div(lg.Pose.Scale) txt.Pose.RotateOnAxis(0, 1, 0, 180) txt.Styles.Background = colors.Uniform(colors.Transparent) @@ -647,11 +648,12 @@ func (nv *NetView) ReadUnlock() { // UnitVal returns the raw value, scaled value, and color representation // for given unit of given layer. scaled is in range -1..1 func (nv *NetView) UnitValue(lay emer.Layer, idx []int) (raw, scaled float32, clr color.RGBA, hasval bool) { - idx1d := lay.Shape().Offset(idx) - if idx1d >= lay.Shape().Len() { + lb := lay.AsEmer() + idx1d := lb.Shape.Offset(idx) + if idx1d >= lb.Shape.Len() { raw, hasval = 0, false } else { - raw, hasval = nv.Data.UnitValue(lay.Name(), nv.Var, idx1d, nv.RecNo, nv.Di) + raw, hasval = nv.Data.UnitValue(lb.Name, nv.Var, idx1d, nv.RecNo, nv.Di) } scaled, clr = nv.UnitValColor(lay, idx1d, raw, hasval) return @@ -661,21 +663,21 @@ func (nv *NetView) UnitValue(lay emer.Layer, idx []int) (raw, scaled float32, cl // for given unit of given layer, and given raster counter index value (0..RasterMax) // scaled is in range -1..1 func (nv *NetView) UnitValRaster(lay emer.Layer, idx []int, rCtr int) (raw, scaled float32, clr color.RGBA, hasval bool) { - rs := lay.RepShape() - idx1d := rs.Offset(idx) - ridx := lay.RepIndexes() + lb := lay.AsEmer() + idx1d := lb.SampleShape.Offset(idx) + ridx := lb.SampleIndexes if len(ridx) == 0 { // no rep - if idx1d >= lay.Shape().Len() { + if idx1d >= lb.Shape.Len() { raw, hasval = 0, false } else { - raw, hasval = nv.Data.UnitValRaster(lay.Name(), nv.Var, idx1d, rCtr, nv.Di) + raw, hasval = nv.Data.UnitValRaster(lb.Name, nv.Var, idx1d, rCtr, nv.Di) } } else { if idx1d >= len(ridx) { raw, hasval = 0, false } else { idx1d = ridx[idx1d] - raw, hasval = nv.Data.UnitValRaster(lay.Name(), nv.Var, idx1d, rCtr, nv.Di) + raw, hasval = nv.Data.UnitValRaster(lb.Name, nv.Var, idx1d, rCtr, nv.Di) } } scaled, clr = nv.UnitValColor(lay, idx1d, raw, hasval) @@ -696,7 +698,7 @@ func (nv *NetView) UnitValColor(lay emer.Layer, idx1d int, raw float32, hasval b } if !hasval { scaled = 0 - if lay.Name() == nv.Data.PathLay && idx1d == nv.Data.PathUnIndex { + if lay.StyleName() == nv.Data.PathLay && idx1d == nv.Data.PathUnIndex { clr = color.RGBA{0x20, 0x80, 0x20, 0x80} } else { clr = NilColor From f5695615d30802bc3e35b5d66d55f08a7a92ff3e Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 09:53:27 -0700 Subject: [PATCH 05/10] all building with new api. --- decoder/softmax.go | 9 +++++---- ecmd/std.go | 8 ++++---- egui/netview.go | 2 +- elog/context.go | 21 +++++++++++---------- elog/stditems.go | 11 ++++++----- emer/layer.go | 7 ++----- estats/actrf.go | 16 ++++++++-------- estats/funcs.go | 19 ++++++++++--------- estats/rasters.go | 9 +++++---- 9 files changed, 52 insertions(+), 50 deletions(-) diff --git a/decoder/softmax.go b/decoder/softmax.go index 2e9ad8da..8c164bfd 100644 --- a/decoder/softmax.go +++ b/decoder/softmax.go @@ -80,7 +80,7 @@ func (sm *SoftMax) InitLayer(ncats int, layers []emer.Layer) { sm.Layers = layers nin := 0 for _, ly := range sm.Layers { - nin += ly.Shape().Len() + nin += ly.AsEmer().Shape.Len() } sm.Init(ncats, nin) } @@ -143,12 +143,13 @@ func (sm *SoftMax) ValuesTsr(name string) *tensor.Float32 { func (sm *SoftMax) Input(varNm string, di int) { off := 0 for _, ly := range sm.Layers { - tsr := sm.ValuesTsr(ly.Name()) - ly.UnitValuesTensor(tsr, varNm, di) + lb := ly.AsEmer() + tsr := sm.ValuesTsr(lb.Name) + lb.UnitValuesTensor(tsr, varNm, di) for j, v := range tsr.Values { sm.Inputs[off+j] = v } - off += ly.Shape().Len() + off += lb.Shape.Len() } } diff --git a/ecmd/std.go b/ecmd/std.go index 8dc9d550..724db092 100644 --- a/ecmd/std.go +++ b/ecmd/std.go @@ -46,7 +46,7 @@ func LogFilename(logName, netName, runName string) string { // ProcStd processes the standard args, after Parse has been called // for help, note, params, tag and wts -func (ar *Args) ProcStd(params *emer.Params) { +func (ar *Args) ProcStd(params *emer.NetParams) { if ar.Bool("help") { ar.Usage() os.Exit(0) @@ -55,8 +55,8 @@ func (ar *Args) ProcStd(params *emer.Params) { mpi.Printf("note: %s\n", note) } if pars := ar.String("params"); pars != "" { - params.ExtraSets = pars - mpi.Printf("Using ParamSet: %s\n", params.ExtraSets) + // params.ExtraSets = pars // todo: + // mpi.Printf("Using ParamSet: %s\n", params.ExtraSets) } if tag := ar.String("tag"); tag != "" { params.Tag = tag @@ -71,7 +71,7 @@ func (ar *Args) ProcStd(params *emer.Params) { // setting the log files for standard log file names using netName // and params.RunName to identify the network / sim and run params, tag, // and starting run number -func (ar *Args) ProcStdLogs(logs *elog.Logs, params *emer.Params, netName string) { +func (ar *Args) ProcStdLogs(logs *elog.Logs, params *emer.NetParams, netName string) { runName := params.RunName(ar.Int("run")) // used for naming logs, stats, etc if ar.Bool("epclog") { fnm := LogFilename("epc", netName, runName) diff --git a/egui/netview.go b/egui/netview.go index 47082f76..a9656003 100644 --- a/egui/netview.go +++ b/egui/netview.go @@ -48,6 +48,6 @@ func (gui *GUI) SaveNetData(extra string) { if gui.NetData == nil { return } - ndfn := gui.NetData.Net.Name() + "_" + extra + ".netdata.gz" + ndfn := gui.NetData.Net.AsEmer().Name + "_" + extra + ".netdata.gz" gui.NetData.SaveJSON(core.Filename(ndfn)) } diff --git a/elog/context.go b/elog/context.go index 6769c1d8..45a53d99 100644 --- a/elog/context.go +++ b/elog/context.go @@ -8,6 +8,7 @@ import ( "fmt" "log" + "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/stats/metric" "cogentcore.org/core/tensor/stats/stats" @@ -233,10 +234,10 @@ func (ctx *Context) ItemColTensorScope(scope etime.ScopeKey, itemNm string) tens /////////////////////////////////////////////////// // Network -// Layer returns layer by name as the emer.Layer interface -- -// you may then need to convert to a concrete type depending. +// Layer returns layer by name as the emer.Layer interface. +// May then need to convert to a concrete type depending. func (ctx *Context) Layer(layNm string) emer.Layer { - return ctx.Net.LayerByName(layNm) + return errors.Log1(ctx.Net.EmerLayerByName(layNm)) } // GetLayerTensor gets tensor of Unit values on a layer for given variable @@ -244,16 +245,16 @@ func (ctx *Context) Layer(layNm string) emer.Layer { func (ctx *Context) GetLayerTensor(layNm, unitVar string) *tensor.Float32 { ly := ctx.Layer(layNm) tsr := ctx.Stats.F32Tensor(layNm) - ly.UnitValuesTensor(tsr, unitVar, ctx.Di) + ly.AsEmer().UnitValuesTensor(tsr, unitVar, ctx.Di) return tsr } -// GetLayerRepTensor gets tensor of representative Unit values on a layer for given variable +// GetLayerSampleTensor gets tensor of representative Unit values on a layer for given variable // from current ctx.Di data parallel index. -func (ctx *Context) GetLayerRepTensor(layNm, unitVar string) *tensor.Float32 { +func (ctx *Context) GetLayerSampleTensor(layNm, unitVar string) *tensor.Float32 { ly := ctx.Layer(layNm) tsr := ctx.Stats.F32Tensor(layNm) - ly.UnitValuesRepTensor(tsr, unitVar, ctx.Di) + ly.AsEmer().UnitValuesSampleTensor(tsr, unitVar, ctx.Di) return tsr } @@ -265,10 +266,10 @@ func (ctx *Context) SetLayerTensor(layNm, unitVar string) *tensor.Float32 { return tsr } -// SetLayerRepTensor sets tensor of representative Unit values on a layer for given variable +// SetLayerSampleTensor sets tensor of representative Unit values on a layer for given variable // to current ctx.Di data parallel index. -func (ctx *Context) SetLayerRepTensor(layNm, unitVar string) *tensor.Float32 { - tsr := ctx.GetLayerRepTensor(layNm, unitVar) +func (ctx *Context) SetLayerSampleTensor(layNm, unitVar string) *tensor.Float32 { + tsr := ctx.GetLayerSampleTensor(layNm, unitVar) ctx.SetTensor(tsr) return tsr } diff --git a/elog/stditems.go b/elog/stditems.go index 0e6788a3..53fe7843 100644 --- a/elog/stditems.go +++ b/elog/stditems.go @@ -9,6 +9,7 @@ import ( "reflect" "time" + "cogentcore.org/core/base/errors" "cogentcore.org/core/math32/minmax" "cogentcore.org/core/tensor/stats/split" "cogentcore.org/core/tensor/stats/stats" @@ -292,26 +293,26 @@ func (lg *Logs) RunStats(stats ...string) { // to it so there aren't any duplicate items. // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.Modes, etm etime.Times, layClasses ...string) { - layers := net.LayersByClass(layClasses...) + layers := net.AsEmer().LayersByClass(layClasses...) for _, lnm := range layers { clnm := lnm - cly := net.LayerByName(clnm) + cly := errors.Log1(net.EmerLayerByName(clnm)) itmNm := clnm + "_" + varNm itm, has := lg.ItemByName(itmNm) if has { itm.Write[etime.Scope(mode, etm)] = func(ctx *Context) { - ctx.SetLayerRepTensor(clnm, varNm) + ctx.SetLayerSampleTensor(clnm, varNm) } } else { lg.AddItem(&Item{ Name: itmNm, Type: reflect.Float32, - CellShape: cly.RepShape().Sizes, + CellShape: cly.AsEmer().SampleShape.Sizes, FixMin: true, Range: minmax.F32{Max: 1}, Write: WriteMap{ etime.Scope(mode, etm): func(ctx *Context) { - ctx.SetLayerRepTensor(clnm, varNm) + ctx.SetLayerSampleTensor(clnm, varNm) }}}) } } diff --git a/emer/layer.go b/emer/layer.go index 42da30f9..e41913cc 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -10,6 +10,7 @@ import ( "log" "math" + "cogentcore.org/core/base/slicesx" "cogentcore.org/core/math32" "cogentcore.org/core/tensor" "github.com/emer/emergent/v2/params" @@ -366,11 +367,7 @@ func (ly *LayerBase) NumPools() int { // Returns error on invalid var name. func (ly *LayerBase) UnitValues(vals *[]float32, varNm string, di int) error { nn := ly.NumUnits() - if *vals == nil || cap(*vals) < int(nn) { - *vals = make([]float32, nn) - } else if len(*vals) < int(nn) { - *vals = (*vals)[0:nn] - } + slicesx.SetLength(*vals, nn) vidx, err := ly.EmerLayer.UnitVarIndex(varNm) if err != nil { nan := math32.NaN() diff --git a/estats/actrf.go b/estats/actrf.go index 78fe5f9e..736594da 100644 --- a/estats/actrf.go +++ b/estats/actrf.go @@ -27,18 +27,18 @@ func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error for _, anm := range arfs { sp := strings.Split(anm, ":") lnm := sp[0] - _, err = net.LayerByNameTry(lnm) + _, err = net.EmerLayerByName(lnm) if err != nil { fmt.Printf("estats.InitActRFs: %s\n", err) continue } - lvt := st.SetLayerRepTensor(net, lnm, varnm, 0) + lvt := st.SetLayerSampleTensor(net, lnm, varnm, 0) tnm := sp[1] var tvt *tensor.Float32 - _, err = net.LayerByNameTry(tnm) + _, err = net.EmerLayerByName(tnm) if err == nil { - tvt = st.SetLayerRepTensor(net, tnm, varnm, 0) + tvt = st.SetLayerSampleTensor(net, tnm, varnm, 0) } else { ok := false tvt, ok = st.F32Tensors[tnm] @@ -65,16 +65,16 @@ func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32, di in anm := rf.Name sp := strings.Split(anm, ":") lnm := sp[0] - _, err := net.LayerByNameTry(lnm) + _, err := net.EmerLayerByName(lnm) if err != nil { continue } - lvt := st.SetLayerRepTensor(net, lnm, varnm, di) + lvt := st.SetLayerSampleTensor(net, lnm, varnm, di) tnm := sp[1] var tvt *tensor.Float32 - _, err = net.LayerByNameTry(tnm) + _, err = net.EmerLayerByName(tnm) if err == nil { - tvt = st.SetLayerRepTensor(net, tnm, varnm, di) + tvt = st.SetLayerSampleTensor(net, tnm, varnm, di) } else { // random state tvt = st.F32Tensor(tnm) } diff --git a/estats/funcs.go b/estats/funcs.go index b8a18b6a..eefab34e 100644 --- a/estats/funcs.go +++ b/estats/funcs.go @@ -5,6 +5,7 @@ package estats import ( + "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor" "cogentcore.org/core/tensor/stats/metric" "cogentcore.org/core/tensor/stats/stats" @@ -18,26 +19,26 @@ import ( // to a F32Tensor with name = layNm // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) SetLayerTensor(net emer.Network, layNm, unitVar string, di int) *tensor.Float32 { - ly := net.LayerByName(layNm) + ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() tsr := st.F32TensorDi(layNm, di) ly.UnitValuesTensor(tsr, unitVar, di) return tsr } -// SetLayerRepTensor sets tensor of representative Unit values on a layer +// SetLayerSampleTensor sets tensor of representative Unit values on a layer // for given variable to a F32Tensor with name = layNm // di is a data parallel index di, for networks capable of processing input patterns in parallel. -func (st *Stats) SetLayerRepTensor(net emer.Network, layNm, unitVar string, di int) *tensor.Float32 { - ly := net.LayerByName(layNm) +func (st *Stats) SetLayerSampleTensor(net emer.Network, layNm, unitVar string, di int) *tensor.Float32 { + ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() tsr := st.F32TensorDi(layNm, di) - ly.UnitValuesRepTensor(tsr, unitVar, di) + ly.UnitValuesSampleTensor(tsr, unitVar, di) return tsr } // LayerVarsCorrel returns the correlation between two variables on a given layer // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 { - ly := net.LayerByName(layNm) + ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() tsrA := st.F32TensorDi(layNm, di) // standard re-used storage tensor ly.UnitValuesTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor @@ -49,11 +50,11 @@ func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB str // Rep version uses representative units. // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 { - ly := net.LayerByName(layNm) + ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() tsrA := st.F32TensorDi(layNm, di) // standard re-used storage tensor - ly.UnitValuesRepTensor(tsrA, unitVarA, di) + ly.UnitValuesSampleTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor - ly.UnitValuesRepTensor(tsrB, unitVarB, di) + ly.UnitValuesSampleTensor(tsrB, unitVarB, di) return metric.Correlation32(tsrA.Values, tsrB.Values) } diff --git a/estats/rasters.go b/estats/rasters.go index b62c9e40..f04e5efd 100644 --- a/estats/rasters.go +++ b/estats/rasters.go @@ -5,6 +5,7 @@ package estats import ( + "cogentcore.org/core/base/errors" "cogentcore.org/core/tensor" "github.com/emer/emergent/v2/emer" ) @@ -14,11 +15,11 @@ import ( func (st *Stats) ConfigRasters(net emer.Network, maxCyc int, layers []string) { st.Rasters = layers for _, lnm := range st.Rasters { - ly := net.LayerByName(lnm) + ly := errors.Log1(net.EmerLayerByName(lnm)).AsEmer() sr := st.F32Tensor("Raster_" + lnm) - nu := len(ly.RepIndexes()) + nu := len(ly.SampleIndexes) if nu == 0 { - nu = ly.Shape().Len() + nu = ly.Shape.Len() } sr.SetShape([]int{nu, maxCyc}, "Nrn", "Cyc") } @@ -36,7 +37,7 @@ func (st *Stats) SetRasterCol(sr, tsr *tensor.Float32, col int) { // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) RasterRec(net emer.Network, cyc int, varNm string, di int) { for _, lnm := range st.Rasters { - tsr := st.SetLayerRepTensor(net, lnm, varNm, di) + tsr := st.SetLayerSampleTensor(net, lnm, varNm, di) sr := st.F32Tensor("Raster_" + lnm) if sr.DimSize(1) <= cyc { continue From 81ec3718a12208fa13853907a9cc21c3167d2055 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 10:27:52 -0700 Subject: [PATCH 06/10] layout layers in base --- emer/layer.go | 4 ++-- emer/network.go | 59 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 2 deletions(-) diff --git a/emer/layer.go b/emer/layer.go index e41913cc..efc084fd 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -288,11 +288,11 @@ func (ly *LayerBase) PlaceAbove(other Layer) { ly.Pos.SetAbove(other.AsEmer().Name) } -// Size returns the display size of this layer for the 3D view. +// DisplaySize returns the display size of this layer for the 3D view. // see Pos field for general info. // This is multiplied by the Pos.Scale factor to rescale // layer sizes, and takes into account 2D and 4D layer structures. -func (ly *LayerBase) Size() math32.Vector2 { +func (ly *LayerBase) DisplaySize() math32.Vector2 { if ly.Pos.Scale == 0 { ly.Pos.Defaults() } diff --git a/emer/network.go b/emer/network.go index 61f54dfa..b42dd429 100644 --- a/emer/network.go +++ b/emer/network.go @@ -11,10 +11,12 @@ import ( "io" "strings" + "cogentcore.org/core/base/errors" "cogentcore.org/core/base/randx" "cogentcore.org/core/core" "cogentcore.org/core/math32" "github.com/emer/emergent/v2/params" + "github.com/emer/emergent/v2/relpos" "github.com/emer/emergent/v2/weights" ) @@ -290,3 +292,60 @@ func (nt *NetworkBase) LayersByClass(classes ...string) []string { } return layers } + +// LayoutLayers computes the 3D layout of layers based on their relative +// position settings. +func (nt *NetworkBase) LayoutLayers() { + en := nt.EmerNetwork + nlay := en.NumLayers() + for range 5 { + var lstly *LayerBase + for li := range nlay { + ly := en.EmerLayer(li).AsEmer() + var oly *LayerBase + if lstly != nil && ly.Pos.Rel == relpos.NoRel { + if ly.Pos.Pos.X != 0 || ly.Pos.Pos.Y != 0 || ly.Pos.Pos.Z != 0 { + // Position has been modified, don't mess with it. + continue + } + oly = lstly + ly.Pos = relpos.Pos{Rel: relpos.Above, Other: lstly.Name, XAlign: relpos.Middle, YAlign: relpos.Front} + } else { + if ly.Pos.Other != "" { + olyi, err := nt.EmerLayerByName(ly.Pos.Other) + if errors.Log(err) != nil { + continue + } + oly = olyi.AsEmer() + } else if lstly != nil { + oly = lstly + ly.Pos = relpos.Pos{Rel: relpos.Above, Other: lstly.Name, XAlign: relpos.Middle, YAlign: relpos.Front} + } + } + if oly != nil { + ly.Pos.SetPos(oly.Pos.Pos, oly.DisplaySize(), ly.DisplaySize()) + } + lstly = ly + } + } + nt.layoutBoundsUpdate() +} + +// layoutBoundsUpdate updates the Min / Max display bounds for 3D display. +func (nt *NetworkBase) layoutBoundsUpdate() { + en := nt.EmerNetwork + nlay := en.NumLayers() + mn := math32.Vector3Scalar(math32.Infinity) + mx := math32.Vector3{} + for li := range nlay { + ly := en.EmerLayer(li).AsEmer() + sz := ly.DisplaySize() + ru := ly.Pos.Pos + ru.X += sz.X + ru.Y += sz.Y + mn.SetMax(ly.Pos.Pos) + mx.SetMax(ru) + } + nt.MaxPos = mn + nt.MaxPos = mx +} From 77099e232215faabdd66be0e0e57fda74fa422b6 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 11:54:03 -0700 Subject: [PATCH 07/10] renames, EmerPathByName --- doc.go | 10 +++++----- emer/netparams.go | 5 +++-- emer/network.go | 33 +++++++++++++++++++++++++-------- emer/path.go | 4 ++-- emer/typegen.go | 12 ++++++------ netview/typegen.go | 6 +----- params/typegen.go | 6 ++---- relpos/typegen.go | 2 +- 8 files changed, 45 insertions(+), 33 deletions(-) diff --git a/doc.go b/doc.go index dd3cfbb6..1a6dbf4f 100644 --- a/doc.go +++ b/doc.go @@ -23,11 +23,11 @@ and easier support for making permuted random lists, etc. * netview provides the NetView interactive 3D network viewer, implemented in the Cogent Core 3D framework. -* path is a separate package for defining patterns of connectivity between layers -(i.e., the ProjectionSpecs from C++ emergent). This is done using a fully independent -structure that *only* knows about the shapes of the two layers, and it returns a fully general -bitmap representation of the pattern of connectivity between them. The leabra.Path code -then uses these patterns to do all the nitty-gritty of connecting up neurons. +* path is a separate package for defining patterns of connectivity between layers. +This is done using a fully independent structure that *only* knows about the shapes +of the two layers, and it returns a fully general bitmap representation of the pattern +of connectivity between them. The leabra.Path code then uses these patterns to do +all the nitty-gritty of connecting up neurons. This makes the pathway code *much* simpler compared to the ProjectionSpec in C++ emergent, which was involved in both creating the pattern and also all the complexity of setting up the actual connections themselves. This should be the *last* time any of those pathway patterns diff --git a/emer/netparams.go b/emer/netparams.go index 6b7585d2..de2efa6b 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -187,9 +187,10 @@ func NetworkHyperParams(net Network, sheet *params.Sheet) params.Flex { func SetFloatParam(net Network, name, typ, path string, val float32) error { rpath := params.PathAfterType(path) prs := fmt.Sprintf("%g", val) + en := net.AsEmer() switch typ { case "Layer": - ly, err := net.EmerLayerByName(name) + ly, err := en.EmerLayerByName(name) if err != nil { slog.Error(err.Error()) return err @@ -200,7 +201,7 @@ func SetFloatParam(net Network, name, typ, path string, val float32) error { return err } case "Path": - pj, err := net.EmerPathByName(name) + pj, err := en.EmerPathByName(name) if err != nil { slog.Error(err.Error()) return err diff --git a/emer/network.go b/emer/network.go index b42dd429..4b0c9ad5 100644 --- a/emer/network.go +++ b/emer/network.go @@ -52,14 +52,6 @@ type Network interface { // Logging supports recording each of these where appropriate. NParallelData() int - // EmerLayerByName returns layer of given name, returns nil, error if not found. - // Layer names must be unique and a map is used so this is a fast operation. - EmerLayerByName(name string) (Layer, error) - - // EmerPathByName returns path of given name, returns error if not found. - // Path names are SendToRecv, and are looked up by parsing the name. - EmerPathByName(name string) (Path, error) - // Defaults sets default parameter values for everything in the Network. Defaults() @@ -252,6 +244,31 @@ func (nt *NetworkBase) EmerLayerByName(name string) (Layer, error) { return nil, err } +// EmerPathByName returns a path by looking it up by name. +// Paths are named SendToRecv = sending layer name "To" recv layer name. +// returns error message if path is not found. +func (nt *NetworkBase) EmerPathByName(name string) (Path, error) { + ti := strings.Index(name, "To") + if ti < 0 { + return nil, errors.Log(fmt.Errorf("EmerPathByName: path name must contain 'To': %s", name)) + } + sendNm := name[:ti] + recvNm := name[ti+2:] + _, err := nt.EmerLayerByName(sendNm) + if errors.Log(err) != nil { + return nil, err + } + recv, err := nt.EmerLayerByName(recvNm) + if errors.Log(err) != nil { + return nil, err + } + path, err := recv.AsEmer().RecvPathBySendName(sendNm) + if errors.Log(err) != nil { + return nil, err + } + return path, nil +} + // LayersByClass returns a list of layer names by given class(es). // Lists are compiled when network Build() function called, // or now if not yet present. diff --git a/emer/path.go b/emer/path.go index 3c440fca..6501ee1c 100644 --- a/emer/path.go +++ b/emer/path.go @@ -100,11 +100,11 @@ type Path interface { // returns error if path not found or value cannot be set. SetParam(path, val string) error - // NonDefaultParams returns a listing of all parameters in the Projection that + // NonDefaultParams returns a listing of all parameters in the Pathway that // are not at their default values -- useful for setting param styles etc. NonDefaultParams() string - // AllParams returns a listing of all parameters in the Projection + // AllParams returns a listing of all parameters in the Pathway. AllParams() string // WriteWtsJSON writes the weights from this pathway diff --git a/emer/typegen.go b/emer/typegen.go index 9c7c5b9a..62b63a80 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,11 +6,9 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available\non the units in this layer.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable, and the\nvalue gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed\nrange or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVarNum", Doc: "UnitVarNum returns the number of Neuron-level variables\nfor this layer. This is needed for extending indexes in\nderived types.", Returns: []string{"int"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable\nover the layer", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayNames", IDName: "lay-names", Doc: "LayNames is a list of layer names.\nHas convenience methods for adding, validating."}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Info", Doc: "Info contains descriptive information about the layer.\nThis is displayed in a tooltip in the network view."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Off", Doc: "Off turns off the layer, removing from all computations.\nThis provides a convenient way to dynamically test for\nthe contributions of the layer, for example."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetParams", IDName: "net-params", Doc: "NetParams handles standard parameters for a Network\n(use econfig and a Config struct for other configuration params).\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSheets", Doc: "optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Network", Doc: "the network to apply parameters to"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) @@ -18,8 +16,10 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LaySiz var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetSize", IDName: "net-size", Doc: "NetSize is a network schema for holding a params for layer sizes.\nValues can be queried for getting sizes when configuring the network.\nUses params.Flex to support flexible parameter specification"}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the basic interface for a neural network, used for managing the structural\nelements of a network, and for visualization, I/O, etc", Methods: []types.Method{{Name: "InitName", Doc: "InitName MUST be called to initialize the network's pointer to itself as an emer.Network\nwhich enables the proper interface methods to be called. Also sets the name.", Args: []string{"net", "name"}}, {Name: "Name", Doc: "Name() returns name of the network", Returns: []string{"string"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting the name of objects generically", Returns: []string{"string"}}, {Name: "NLayers", Doc: "NLayers returns the number of layers in the network", Returns: []string{"int"}}, {Name: "Layer", Doc: "Layer returns layer (as emer.Layer interface) at given index -- does not\ndo extra bounds checking", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "LayerByName", Doc: "LayerByName returns layer of given name, nil if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer"}}, {Name: "LayerByNameTry", Doc: "LayerByNameTry returns layer of given name, returns error if not found.\nLayer names must be unique and a map is used so this is a fast operation", Args: []string{"name"}, Returns: []string{"Layer", "error"}}, {Name: "PathByNameTry", Doc: "PathByNameTry returns path of given name, returns error if not found.\nPath names are SendToRecv, and are looked up by parsing the name", Args: []string{"name"}, Returns: []string{"Path", "error"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network"}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to layers and paths in this network.\nCalls UpdateParams on anything set to ensure derived parameters are all updated.\nIf setMsg is true, then a message is printed to confirm each parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Network that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Network", Returns: []string{"string"}}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on the units in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all layers need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapses in this network.\nThis list determines what is shown in the NetView (and the order of vars list).\nNot all pathways need to support all variables, but must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes network weights (and any other state that adapts with learning)\nto JSON-formatted output.", Args: []string{"w"}, Returns: []string{"error"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads network weights (and any other state that adapts with learning)\nfrom JSON-formatted input. Reads into a temporary weights.Network structure that\nis then passed to SetWts to actually set the weights.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this network from weights.Network decoded values", Args: []string{"nw"}, Returns: []string{"error"}}, {Name: "SaveWtsJSON", Doc: "SaveWtsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWtsJSON", Doc: "OpenWtsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "Bounds", Doc: "Bounds returns the minimum and maximum display coordinates of the network for 3D display", Returns: []string{"min", "max"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "LayersByClass", Doc: "LayersByClass returns a list of layer names by given class(es).\nLists are compiled when network Build() function called.\nThe layer Type is always included as a Class, along with any other\nspace-separated strings specified in Class for parameter styling, etc.\nIf no classes are passed, all layer names in order are returned.", Args: []string{"classes"}, Returns: []string{"[]string"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the minimal interface for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nMost of the standard expected functionality is defined in the\nNetworkBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the network as an *emer.NetworkBase,\nto access base functionality.", Returns: []string{"NetworkBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "NumLayers", Doc: "NumLayers returns the number of layers in the network.", Returns: []string{"int"}}, {Name: "EmerLayer", Doc: "EmerLayer returns layer as emer.Layer interface at given index.\nDoes not do extra bounds checking.", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network."}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to layers\nand paths in this network.\nCalls UpdateParams on anything set to ensure derived parameters\nare all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Network that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Network", Returns: []string{"string"}}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on\nthe units in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all layers need to support all variables,\nbut must safely return math32.NaN() for unsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables\non the synapses in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all pathways need to support all variables,\nbut must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes network weights (and any other state\nthat adapts with learning) to JSON-formatted output.", Args: []string{"w"}, Returns: []string{"error"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads network weights (and any other state\nthat adapts with learning) from JSON-formatted input.\nReads into a temporary weights.Network structure that\nis then passed to SetWts to actually set the weights.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this network from weights.Network\ndecoded values.", Args: []string{"nw"}, Returns: []string{"error"}}, {Name: "SaveWtsJSON", Doc: "SaveWtsJSON saves network weights (and any other state\nthat adapts with learning) to a JSON-formatted file.\nIf filename has .gz extension, then file is gzip compressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWtsJSON", Doc: "OpenWtsJSON opens network weights (and any other state that\nadapts with learning) from a JSON-formatted file.\nIf filename has .gz extension, then file is gzip uncompressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}}}) + +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetworkBase", IDName: "network-base", Doc: "NetworkBase defines the basic data for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.", Fields: []types.Field{{Name: "EmerNetwork", Doc: "EmerNetwork provides access to the emer.Network interface\nmethods for functions defined in the NetworkBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitNetwork function."}, {Name: "Name", Doc: "overall name of network, which helps discriminate if there are multiple."}, {Name: "WeightsFile", Doc: "filename of last weights file loaded or saved."}, {Name: "LayerNameMap", Doc: "map of name to layers, for EmerLayerByName methods"}, {Name: "LayerClassMap", Doc: "map from class name to layer names."}, {Name: "MinPos", Doc: "minimum display position in network"}, {Name: "MaxPos", Doc: "maximum display position in network"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}, {Name: "Rand", Doc: "random number generator for the network.\nall random calls must use this.\nSet seed here for weight initialization values."}, {Name: "RandSeed", Doc: "Random seed to be set at the start of configuring\nthe network and initializing the weights.\nSet this to get a different set of weights."}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties, with the key being the\nname of the variable, and the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nIt always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Projection that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Projection", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON,\nwhich reads into a separate structure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse,\nusing the natural ordering of the synapses (sender based for Axon),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nIt always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Pathway that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Pathway.", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON,\nwhich reads into a separate structure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nName is set automatically to:\nNothing algorithm-specific is implemented here.", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}}}) diff --git a/netview/typegen.go b/netview/typegen.go index ce0ae112..b25910ea 100644 --- a/netview/typegen.go +++ b/netview/typegen.go @@ -3,11 +3,7 @@ package netview import ( - "sync" - - "cogentcore.org/core/colors/colormap" - "cogentcore.org/core/core" - "cogentcore.org/core/tree" + "cogentcore.org/core/base/ordmap" "cogentcore.org/core/types" ) diff --git a/params/typegen.go b/params/typegen.go index c30bd5f8..c696c715 100644 --- a/params/typegen.go +++ b/params/typegen.go @@ -24,14 +24,12 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sel" var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sheet", IDName: "sheet", Doc: "Sheet is a CSS-like style-sheet of params.Sel values, each of which represents\na different set of specific parameter values applied according to the Sel selector:\n.Class #Name or Type.\n\nThe order of elements in the Sheet list is critical, as they are applied\nin the order given by the list (slice), and thus later Sel's can override\nthose applied earlier. Thus, you generally want to have more general Type-level\nparameters listed first, and then subsequently more specific ones (.Class and #Name)\n\nThis is the highest level of params that has an Apply method -- above this level\napplication must be done under explicit program control."}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sheets", IDName: "sheets", Doc: "Sheets is a map of named sheets -- used in the Set"}) - var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sets", IDName: "sets", Doc: "Sets is a collection of Sheets that can be chosen among\ndepending on different desired configurations etc. Thus, each Set\nrepresents a collection of different possible specific configurations,\nand different such configurations can be chosen by name to apply as desired."}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.SearchValues", IDName: "search-values", Doc: "SearchValues is a list of parameter values to search for one parameter\non a given object (specified by Name), for float-valued params.", Fields: []types.Field{{Name: "Name", Doc: "name of object with the parameter"}, {Name: "Type", Doc: "type of object with the parameter. This is a Base type name (e.g., Layer, Path),\nthat is at the start of the path in Network params."}, {Name: "Path", Doc: "path to the parameter within the object"}, {Name: "Start", Doc: "starting value, e.g., for restoring after searching\nbefore moving on to another parameter, for grid search."}, {Name: "Values", Doc: "values of the parameter to search"}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Styler", IDName: "styler", Doc: "The params.Styler interface exposes TypeName, Class, and Name methods\nthat allow the params.Sel CSS-style selection specifier to determine\nwhether a given parameter applies.\nAdding Set versions of Name and Class methods is a good idea but not\nneeded for this interface, so they are not included here.", Methods: []types.Method{{Name: "TypeName", Doc: "TypeName returns the name of this type. CSS Sel selector with no prefix\noperates on type name. This type is used *in addition* to the actual\nGo type name of the object, and is a kind of type-category (e.g., Layer\nor Path in emergent network objects)", Returns: []string{"string"}}, {Name: "Class", Doc: "Class returns the space-separated list of class selectors (tags).\nParameters with a . prefix target class tags.\nDo NOT include the. in the Class tags on Styler objects however\n-- those are only in the Sel selector on the params.Sel.", Returns: []string{"string"}}, {Name: "Name", Doc: "Name returns the name of this object.\nParameters with a # prefix target object names, which are typically\nunique. Note, do not include the # prefix in the Styler name.", Returns: []string{"string"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Styler", IDName: "styler", Doc: "The params.Styler interface exposes TypeName, Class, and Name methods\nthat allow the params.Sel CSS-style selection specifier to determine\nwhether a given parameter applies.\nAdding Set versions of Name and Class methods is a good idea but not\nneeded for this interface, so they are not included here.", Methods: []types.Method{{Name: "StyleType", Doc: "StyleType returns the name of this type for CSS-style matching.\nThis is used for CSS Sel selector with no prefix.\nThis type is used *in addition* to the actual Go type name\nof the object, and is a kind of type-category (e.g., Layer\nor Path in emergent network objects).", Returns: []string{"string"}}, {Name: "StyleClass", Doc: "StyleClass returns the space-separated list of class selectors (tags).\nParameters with a . prefix target class tags.\nDo NOT include the . in the Class tags on Styler objects;\nThe . is only used in the Sel selector on the params.Sel.", Returns: []string{"string"}}, {Name: "StyleName", Doc: "StyleName returns the name of this object.\nParameters with a # prefix target object names, which are typically\nunique. Note, do not include the # prefix in the actual object name,\nonly in the Sel selector on params.Sel.", Returns: []string{"string"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObject", IDName: "styler-obj", Doc: "The params.StylerObject interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "Object", Doc: "Object returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObject", IDName: "styler-object", Doc: "The params.StylerObject interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "Object", Doc: "Object returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Tweaks", IDName: "tweaks", Doc: "Tweaks holds parameter tweak values associated with one parameter selector.\nHas all the object values affected for a given parameter within one\nselector, that has a tweak hyperparameter set.", Fields: []types.Field{{Name: "Param", Doc: "the parameter path for this param"}, {Name: "Sel", Doc: "the param selector that set the specific value upon which tweak is based"}, {Name: "Search", Doc: "the search values for all objects covered by this selector"}}}) diff --git a/relpos/typegen.go b/relpos/typegen.go index 6fa3acb0..f55b0ae3 100644 --- a/relpos/typegen.go +++ b/relpos/typegen.go @@ -6,7 +6,7 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/relpos.Rel", IDName: "rel", Doc: "Rel defines a position relationship among layers, in terms of X,Y width and height of layer\nand associated position within a given X-Y plane,\nand Z vertical stacking of layers above and below each other.", Directives: []types.Directive{{Tool: "git", Directive: "add"}}, Fields: []types.Field{{Name: "Rel", Doc: "spatial relationship between this layer and the other layer"}, {Name: "XAlign", Doc: "] horizontal (x-axis) alignment relative to other"}, {Name: "YAlign", Doc: "] vertical (y-axis) alignment relative to other"}, {Name: "Other", Doc: "name of the other layer we are in relationship to"}, {Name: "Scale", Doc: "scaling factor applied to layer size for displaying"}, {Name: "Space", Doc: "number of unit-spaces between us"}, {Name: "XOffset", Doc: "for vertical (y-axis) alignment, amount we are offset relative to perfect alignment"}, {Name: "YOffset", Doc: "for horizontial (x-axis) alignment, amount we are offset relative to perfect alignment"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/relpos.Pos", IDName: "pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis.", Directives: []types.Directive{{Tool: "git", Directive: "add"}}, Fields: []types.Field{{Name: "Rel", Doc: "spatial relationship between this layer and the other layer"}, {Name: "XAlign", Doc: "] horizontal (x-axis) alignment relative to other"}, {Name: "YAlign", Doc: "] vertical (y-axis) alignment relative to other"}, {Name: "Other", Doc: "name of the other layer we are in relationship to"}, {Name: "Scale", Doc: "scaling factor applied to layer size for displaying"}, {Name: "Space", Doc: "number of unit-spaces between us"}, {Name: "XOffset", Doc: "for vertical (y-axis) alignment, amount we are offset relative to perfect alignment"}, {Name: "YOffset", Doc: "for horizontial (x-axis) alignment, amount we are offset relative to perfect alignment"}, {Name: "Pos", Doc: "Pos is the computed position of lower-left-hand corner of layer\nin 3D space, computed from the relation to other layer."}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/relpos.Relations", IDName: "relations", Doc: "Relations are different spatial relationships (of layers)"}) From d1c3c6b3cd7f858480e753153ab3ab254229c4ed Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 13:00:52 -0700 Subject: [PATCH 08/10] moving network infrastructure from axon to emer --- emer/layer.go | 19 +++-- emer/netparams.go | 2 +- emer/network.go | 195 ++++++++++++++++++++++++++++++++++------------ 3 files changed, 159 insertions(+), 57 deletions(-) diff --git a/emer/layer.go b/emer/layer.go index efc084fd..48445160 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -63,6 +63,9 @@ type Layer interface { // so it is the only one that needs to be updated for derived layer types. UnitVal1D(varIndex int, idx, di int) float32 + // VarRange returns the min / max values for given variable + VarRange(varNm string) (min, max float32, err error) + // NumRecvPaths returns the number of receiving pathways. NumRecvPaths() int @@ -130,23 +133,23 @@ type Layer interface { // AllParams returns a listing of all parameters in the Layer AllParams() string - // WriteWtsJSON writes the weights from this layer from the + // WriteWeightsJSON writes the weights from this layer from the // receiver-side perspective in a JSON text format. // We build in the indentation logic to make it much faster and // more efficient. - WriteWtsJSON(w io.Writer, depth int) + WriteWeightsJSON(w io.Writer, depth int) - // ReadWtsJSON reads the weights from this layer from the + // ReadWeightsJSON reads the weights from this layer from the // receiver-side perspective in a JSON text format. // This is for a set of weights that were saved // *for one layer only* and is not used for the - // network-level ReadWtsJSON, which reads into a separate - // structure -- see SetWts method. - ReadWtsJSON(r io.Reader) error + // network-level ReadWeightsJSON, which reads into a separate + // structure -- see SetWeights method. + ReadWeightsJSON(r io.Reader) error - // SetWts sets the weights for this layer from weights.Layer + // SetWeights sets the weights for this layer from weights.Layer // decoded values - SetWts(lw *weights.Layer) error + SetWeights(lw *weights.Layer) error } // LayerBase defines the basic shared data for neural network layers, diff --git a/emer/netparams.go b/emer/netparams.go index de2efa6b..2bd2031c 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -139,7 +139,7 @@ func (pr *NetParams) SetNetworkMap(net Network, vals map[string]any) error { // SetNetworkSheet applies params from given sheet func (pr *NetParams) SetNetworkSheet(net Network, sh *params.Sheet, setName string) { - net.ApplyParams(sh, pr.SetMsg) + net.AsEmer().ApplyParams(sh, pr.SetMsg) hypers := NetworkHyperParams(net, sh) if setName == "Base" { pr.NetHypers = hypers diff --git a/emer/network.go b/emer/network.go index 4b0c9ad5..03b006b0 100644 --- a/emer/network.go +++ b/emer/network.go @@ -8,7 +8,8 @@ package emer import ( "fmt" - "io" + "log" + "os" "strings" "cogentcore.org/core/base/errors" @@ -17,7 +18,6 @@ import ( "cogentcore.org/core/math32" "github.com/emer/emergent/v2/params" "github.com/emer/emergent/v2/relpos" - "github.com/emer/emergent/v2/weights" ) // Network defines the minimal interface for a neural network, @@ -59,23 +59,6 @@ type Network interface { // based on any other params that might have changed. UpdateParams() - // ApplyParams applies given parameter style Sheet to layers - // and paths in this network. - // Calls UpdateParams on anything set to ensure derived parameters - // are all updated. - // If setMsg is true, then a message is printed to confirm each - // parameter that is set. - // it always prints a message if a parameter fails to be set. - // returns true if any params were set, and error if there were any errors. - ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) - - // NonDefaultParams returns a listing of all parameters in the Network that - // are not at their default values -- useful for setting param styles etc. - NonDefaultParams() string - - // AllParams returns a listing of all parameters in the Network - AllParams() string - // KeyLayerParams returns a listing for all layers in the network, // of the most important layer-level params (specific to each algorithm). KeyLayerParams() string @@ -127,33 +110,6 @@ type Network interface { // zeroctr:"+" or "-" = control whether zero-centering is used // Note: this is typically a global list so do not modify! SynVarProps() map[string]string - - // WriteWtsJSON writes network weights (and any other state - // that adapts with learning) to JSON-formatted output. - WriteWtsJSON(w io.Writer) error - - // ReadWtsJSON reads network weights (and any other state - // that adapts with learning) from JSON-formatted input. - // Reads into a temporary weights.Network structure that - // is then passed to SetWts to actually set the weights. - ReadWtsJSON(r io.Reader) error - - // SetWts sets the weights for this network from weights.Network - // decoded values. - SetWts(nw *weights.Network) error - - // SaveWtsJSON saves network weights (and any other state - // that adapts with learning) to a JSON-formatted file. - // If filename has .gz extension, then file is gzip compressed. - SaveWtsJSON(filename core.Filename) error - - // OpenWtsJSON opens network weights (and any other state that - // adapts with learning) from a JSON-formatted file. - // If filename has .gz extension, then file is gzip uncompressed. - OpenWtsJSON(filename core.Filename) error - - // VarRange returns the min / max values for given variable - VarRange(varNm string) (min, max float32, err error) } // NetworkBase defines the basic data for a neural network, @@ -212,11 +168,20 @@ func (nt *NetworkBase) AsEmer() *NetworkBase { return nt } func (nt *NetworkBase) Label() string { return nt.Name } -// UpdateLayerMaps updates the LayerNameMap and LayerClassMap. +// MakeLayerMaps creates new LayerNameMap and LayerClassMap. // Call this when the network is built. -func (nt *NetworkBase) UpdateLayerMaps() { +func (nt *NetworkBase) MakeLayerMaps() { nt.LayerNameMap = make(map[string]Layer) nt.LayerClassMap = make(map[string][]string) + nt.UpdateLayerMaps() +} + +// UpdateLayerMaps updates the LayerNameMap and LayerClassMap. +func (nt *NetworkBase) UpdateLayerMaps() { + if nt.LayerNameMap == nil { + nt.MakeLayerMaps() + return + } nl := nt.EmerNetwork.NumLayers() for li := range nl { ly := nt.EmerNetwork.EmerLayer(li) @@ -366,3 +331,137 @@ func (nt *NetworkBase) layoutBoundsUpdate() { nt.MaxPos = mn nt.MaxPos = mx } + +// VerticalLayerLayout arranges layers in a standard vertical (z axis stack) +// layout, by setting the Pos settings. +func (nt *NetworkBase) VerticalLayerLayout() { + lstnm := "" + en := nt.EmerNetwork + nlay := en.NumLayers() + for li := range nlay { + ly := en.EmerLayer(li).AsEmer() + if li == 0 { + ly.Pos = relpos.Pos{Rel: relpos.NoRel} + lstnm = ly.Name + } else { + ly.Pos = relpos.Pos{Rel: relpos.Above, Other: lstnm, XAlign: relpos.Middle, YAlign: relpos.Front} + } + } +} + +// VarRange returns the min / max values for given variable. +// error occurs when variable name is not found. +func (nt *NetworkBase) VarRange(varNm string) (min, max float32, err error) { + first := true + en := nt.EmerNetwork + nlay := en.NumLayers() + for li := range nlay { + ly := en.EmerLayer(li) + lmin, lmax, lerr := ly.VarRange(varNm) + if lerr != nil { + err = lerr + return + } + if first { + min = lmin + max = lmax + continue + } + if lmin < min { + min = lmin + } + if lmax > max { + max = lmax + } + } + return +} + +/////////////////////////////////////////////////////////////////////// +// Params + +// ApplyParams applies given parameter style Sheet to layers and paths in this network. +// Calls UpdateParams to ensure derived parameters are all updated. +// If setMsg is true, then a message is printed to confirm each parameter that is set. +// it always prints a message if a parameter fails to be set. +// returns true if any params were set, and error if there were any errors. +func (nt *NetworkBase) ApplyParams(pars *params.Sheet, setMsg bool) (bool, error) { + applied := false + var rerr error + en := nt.EmerNetwork + nlay := en.NumLayers() + for li := range nlay { + ly := en.EmerLayer(li) + app, err := ly.ApplyParams(pars, setMsg) + if app { + applied = true + } + if err != nil { + rerr = err + } + } + return applied, rerr +} + +// NonDefaultParams returns a listing of all parameters in the Network that +// are not at their default values -- useful for setting param styles etc. +func (nt *NetworkBase) NonDefaultParams() string { + nds := "" + en := nt.EmerNetwork + nlay := en.NumLayers() + for li := range nlay { + ly := en.EmerLayer(li) + nd := ly.NonDefaultParams() + nds += nd + } + return nds +} + +// AllParams returns a listing of all parameters in the Network. +func (nt *NetworkBase) AllParams() string { + nds := "" + en := nt.EmerNetwork + nlay := en.NumLayers() + for li := range nlay { + ly := en.EmerLayer(li) + nd := ly.AllParams() + nds += nd + } + return nds +} + +// SaveAllParams saves list of all parameters in Network to given file. +func (nt *NetworkBase) SaveAllParams(filename core.Filename) error { + str := nt.AllParams() + err := os.WriteFile(string(filename), []byte(str), 0666) + if err != nil { + log.Println(err) + } + return err +} + +// SaveNonDefaultParams saves list of all non-default parameters in Network to given file. +func (nt *NetworkBase) SaveNonDefaultParams(filename core.Filename) error { + str := nt.NonDefaultParams() + err := os.WriteFile(string(filename), []byte(str), 0666) + if err != nil { + log.Println(err) + } + return err +} + +// SetRandSeed sets random seed and calls ResetRandSeed +func (nt *NetworkBase) SetRandSeed(seed int64) { + nt.RandSeed = seed + nt.ResetRandSeed() +} + +// ResetRandSeed sets random seed to saved RandSeed, ensuring that the +// network-specific random seed generator has been created. +func (nt *NetworkBase) ResetRandSeed() { + if nt.Rand.Rand == nil { + nt.Rand.NewRand(nt.RandSeed) + } else { + nt.Rand.Seed(nt.RandSeed) + } +} From d439ab16b36d7723a5e76229a5102d7305aa966e Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 22:36:32 -0700 Subject: [PATCH 09/10] refactor axon building --- elog/context.go | 2 +- elog/stditems.go | 5 +- emer/layer.go | 8 +- emer/netparams.go | 2 +- emer/path.go | 42 +++++--- emer/weights.go | 154 ++++++++++++++++++++++++++ estats/actrf.go | 10 +- estats/funcs.go | 8 +- estats/rasters.go | 2 +- netparams/README.md | 61 ----------- netparams/diff.go | 72 ------------- netparams/doc.go | 73 ------------- netparams/io.go | 182 ------------------------------- netparams/netparams.go | 68 ------------ netparams/netparams_test.go | 210 ------------------------------------ netparams/typegen.go | 9 -- netview/events.go | 2 +- netview/netdata.go | 4 +- netview/netview.go | 8 +- netview/typegen.go | 6 +- params/apply.go | 6 +- params/flex.go | 2 +- params/styler.go | 4 +- params/typegen.go | 2 +- paths/circle.go | 4 +- paths/pooltile.go | 34 +++--- paths/pooltilesub.go | 34 +++--- paths/prjn_test.go | 4 +- paths/typegen.go | 2 +- 29 files changed, 257 insertions(+), 763 deletions(-) create mode 100644 emer/weights.go delete mode 100644 netparams/README.md delete mode 100644 netparams/diff.go delete mode 100644 netparams/doc.go delete mode 100644 netparams/io.go delete mode 100644 netparams/netparams.go delete mode 100644 netparams/netparams_test.go delete mode 100644 netparams/typegen.go diff --git a/elog/context.go b/elog/context.go index 45a53d99..439b7b63 100644 --- a/elog/context.go +++ b/elog/context.go @@ -237,7 +237,7 @@ func (ctx *Context) ItemColTensorScope(scope etime.ScopeKey, itemNm string) tens // Layer returns layer by name as the emer.Layer interface. // May then need to convert to a concrete type depending. func (ctx *Context) Layer(layNm string) emer.Layer { - return errors.Log1(ctx.Net.EmerLayerByName(layNm)) + return errors.Log1(ctx.Net.AsEmer().EmerLayerByName(layNm)) } // GetLayerTensor gets tensor of Unit values on a layer for given variable diff --git a/elog/stditems.go b/elog/stditems.go index 53fe7843..e85461ac 100644 --- a/elog/stditems.go +++ b/elog/stditems.go @@ -293,10 +293,11 @@ func (lg *Logs) RunStats(stats ...string) { // to it so there aren't any duplicate items. // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (lg *Logs) AddLayerTensorItems(net emer.Network, varNm string, mode etime.Modes, etm etime.Times, layClasses ...string) { - layers := net.AsEmer().LayersByClass(layClasses...) + en := net.AsEmer() + layers := en.LayersByClass(layClasses...) for _, lnm := range layers { clnm := lnm - cly := errors.Log1(net.EmerLayerByName(clnm)) + cly := errors.Log1(en.EmerLayerByName(clnm)) itmNm := clnm + "_" + varNm itm, has := lg.ItemByName(itmNm) if has { diff --git a/emer/layer.go b/emer/layer.go index 48445160..3c84e93a 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -167,15 +167,15 @@ type LayerBase struct { // Layers are typically accessed directly by name, via a map. Name string - // Info contains descriptive information about the layer. - // This is displayed in a tooltip in the network view. - Info string - // Class is for applying parameter styles across multiple layers // that all get the same parameters. This can be space separated // with multple classes. Class string + // Info contains descriptive information about the layer. + // This is displayed in a tooltip in the network view. + Info string + // Off turns off the layer, removing from all computations. // This provides a convenient way to dynamically test for // the contributions of the layer, for example. diff --git a/emer/netparams.go b/emer/netparams.go index 2bd2031c..f61445a7 100644 --- a/emer/netparams.go +++ b/emer/netparams.go @@ -76,7 +76,7 @@ func (pr *NetParams) Name() string { // RunName returns standard name simulation run based on params Name() // and starting run number. func (pr *NetParams) RunName(startRun int) string { - return fmt.Sprintf("%s_%03d", pr.Name(), startRun) + return fmt.Sprintf("%s_%03d", pr.Name, startRun) } // Validate checks that the Network has been set diff --git a/emer/path.go b/emer/path.go index 6501ee1c..a9e2b0a7 100644 --- a/emer/path.go +++ b/emer/path.go @@ -107,29 +107,29 @@ type Path interface { // AllParams returns a listing of all parameters in the Pathway. AllParams() string - // WriteWtsJSON writes the weights from this pathway + // WriteWeightsJSON writes the weights from this pathway // from the receiver-side perspective in a JSON text format. // We build in the indentation logic to make it much faster and // more efficient. - WriteWtsJSON(w io.Writer, depth int) + WriteWeightsJSON(w io.Writer, depth int) - // ReadWtsJSON reads the weights from this pathway + // ReadWeightsJSON reads the weights from this pathway // from the receiver-side perspective in a JSON text format. // This is for a set of weights that were saved *for one path only* - // and is not used for the network-level ReadWtsJSON, - // which reads into a separate structure -- see SetWts method. - ReadWtsJSON(r io.Reader) error + // and is not used for the network-level ReadWeightsJSON, + // which reads into a separate structure -- see SetWeights method. + ReadWeightsJSON(r io.Reader) error - // SetWts sets the weights for this pathway from weights.Path + // SetWeights sets the weights for this pathway from weights.Path // decoded values - SetWts(pw *weights.Path) error + SetWeights(pw *weights.Path) error } // PathBase defines the basic shared data for a pathway // which connects two layers, using a specific Pattern // of connectivity, and with its own set of parameters. -// Name is set automatically to: -// Nothing algorithm-specific is implemented here. +// The same struct token is added to the Recv and Send +// layer path lists, type PathBase struct { // EmerPath provides access to the emer.Path interface // methods for functions defined in the PathBase type. @@ -147,17 +147,26 @@ type PathBase struct { // with multple classes. Class string + // Info contains descriptive information about the pathway. + // This is displayed in a tooltip in the network view. + Info string + + // can record notes about this pathway here. + Notes string + // Pattern specifies the pattern of connectivity // for interconnecting the sending and receiving layers. Pattern paths.Pattern + + // Off inactivates this pathway, allowing for easy experimentation. + Off bool } // InitPath initializes the path, setting the EmerPath interface -// to provide access to it for PathBase methods, along with the name. -func InitPath(l Path, name string) { - lb := l.AsEmer() - lb.EmerPath = l - lb.Name = name +// to provide access to it for PathBase methods. +func InitPath(pt Path) { + pb := pt.AsEmer() + pb.EmerPath = pt } func (pt *PathBase) AsEmer() *PathBase { return pt } @@ -166,6 +175,7 @@ func (pt *PathBase) AsEmer() *PathBase { return pt } func (pt *PathBase) StyleType() string { return "Path" } func (pt *PathBase) StyleClass() string { return pt.EmerPath.TypeName() + " " + pt.Class } func (pt *PathBase) StyleName() string { return pt.Name } +func (pt *PathBase) Label() string { return pt.Name } // AddClass adds a CSS-style class name(s) for this path, // ensuring that it is not a duplicate, and properly space separated. @@ -175,8 +185,6 @@ func (pt *PathBase) AddClass(cls ...string) *PathBase { return pt } -func (pt *PathBase) Label() string { return pt.Name } - // SynValue returns value of given variable name on the synapse // between given send, recv unit indexes (1D, flat indexes). // Returns math32.NaN() for access errors. diff --git a/emer/weights.go b/emer/weights.go new file mode 100644 index 00000000..2f9a3414 --- /dev/null +++ b/emer/weights.go @@ -0,0 +1,154 @@ +// Copyright (c) 2019, The Emergent Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package emer + +import ( + "bufio" + "compress/gzip" + "errors" + "fmt" + "io" + "log" + "os" + "path/filepath" + + "cogentcore.org/core/base/indent" + "cogentcore.org/core/core" + "github.com/emer/emergent/v2/weights" +) + +// SaveWeightsJSON saves network weights (and any other state that adapts with learning) +// to a JSON-formatted file. If filename has .gz extension, then file is gzip compressed. +func (nt *NetworkBase) SaveWeightsJSON(filename core.Filename) error { //types:add + fp, err := os.Create(string(filename)) + defer fp.Close() + if err != nil { + log.Println(err) + return err + } + ext := filepath.Ext(string(filename)) + if ext == ".gz" { + gzr := gzip.NewWriter(fp) + err = nt.WriteWeightsJSON(gzr) + gzr.Close() + } else { + bw := bufio.NewWriter(fp) + err = nt.WriteWeightsJSON(bw) + bw.Flush() + } + return err +} + +// OpenWeightsJSON opens network weights (and any other state that adapts with learning) +// from a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed. +func (nt *NetworkBase) OpenWeightsJSON(filename core.Filename) error { //types:add + fp, err := os.Open(string(filename)) + defer fp.Close() + if err != nil { + log.Println(err) + return err + } + ext := filepath.Ext(string(filename)) + if ext == ".gz" { + gzr, err := gzip.NewReader(fp) + defer gzr.Close() + if err != nil { + log.Println(err) + return err + } + return nt.ReadWeightsJSON(gzr) + } else { + return nt.ReadWeightsJSON(bufio.NewReader(fp)) + } +} + +// todo: proper error handling here! + +// WriteWeightsJSON writes the weights from this layer from the receiver-side perspective +// in a JSON text format. We build in the indentation logic to make it much faster and +// more efficient. +func (nt *NetworkBase) WriteWeightsJSON(w io.Writer) error { + en := nt.EmerNetwork + nlay := en.NumLayers() + + depth := 0 + w.Write(indent.TabBytes(depth)) + w.Write([]byte("{\n")) + depth++ + w.Write(indent.TabBytes(depth)) + w.Write([]byte(fmt.Sprintf("\"Network\": %q,\n", nt.Name))) // note: can't use \n in `` so need " + w.Write(indent.TabBytes(depth)) + onls := make([]Layer, 0, nlay) + for li := range nlay { + ly := en.EmerLayer(li) + if !ly.AsEmer().Off { + onls = append(onls, ly) + } + } + nl := len(onls) + if nl == 0 { + w.Write([]byte("\"Layers\": null\n")) + } else { + w.Write([]byte("\"Layers\": [\n")) + depth++ + for li, ly := range onls { + ly.WriteWeightsJSON(w, depth) + if li == nl-1 { + w.Write([]byte("\n")) + } else { + w.Write([]byte(",\n")) + } + } + depth-- + w.Write(indent.TabBytes(depth)) + w.Write([]byte("]\n")) + } + depth-- + w.Write(indent.TabBytes(depth)) + _, err := w.Write([]byte("}\n")) + return err +} + +// ReadWeightsJSON reads network weights from the receiver-side perspective +// in a JSON text format. Reads entire file into a temporary weights.Weights +// structure that is then passed to Layers etc using SetWeights method. +func (nt *NetworkBase) ReadWeightsJSON(r io.Reader) error { + nw, err := weights.NetReadJSON(r) + if err != nil { + return err // note: already logged + } + err = nt.SetWeights(nw) + if err != nil { + log.Println(err) + } + return err +} + +// SetWeights sets the weights for this network from weights.Network decoded values +func (nt *NetworkBase) SetWeights(nw *weights.Network) error { + var errs []error + if nw.Network != "" { + nt.Name = nw.Network + } + if nw.MetaData != nil { + if nt.MetaData == nil { + nt.MetaData = nw.MetaData + } else { + for mk, mv := range nw.MetaData { + nt.MetaData[mk] = mv + } + } + } + for li := range nw.Layers { + lw := &nw.Layers[li] + ly, err := nt.EmerLayerByName(lw.Layer) + if err != nil { + errs = append(errs, err) + continue + } + ly.SetWeights(lw) + } + return errors.Join(errs...) +} diff --git a/estats/actrf.go b/estats/actrf.go index 736594da..75e2f6fd 100644 --- a/estats/actrf.go +++ b/estats/actrf.go @@ -24,10 +24,11 @@ import ( // If Source is not a layer, it must be populated prior to these calls. func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error { var err error + en := net.AsEmer() for _, anm := range arfs { sp := strings.Split(anm, ":") lnm := sp[0] - _, err = net.EmerLayerByName(lnm) + _, err = en.EmerLayerByName(lnm) if err != nil { fmt.Printf("estats.InitActRFs: %s\n", err) continue @@ -36,7 +37,7 @@ func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error lvt := st.SetLayerSampleTensor(net, lnm, varnm, 0) tnm := sp[1] var tvt *tensor.Float32 - _, err = net.EmerLayerByName(tnm) + _, err = en.EmerLayerByName(tnm) if err == nil { tvt = st.SetLayerSampleTensor(net, tnm, varnm, 0) } else { @@ -61,18 +62,19 @@ func (st *Stats) InitActRFs(net emer.Network, arfs []string, varnm string) error // varnm, and given threshold (0.01 recommended) // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) UpdateActRFs(net emer.Network, varnm string, thr float32, di int) { + en := net.AsEmer() for _, rf := range st.ActRFs.RFs { anm := rf.Name sp := strings.Split(anm, ":") lnm := sp[0] - _, err := net.EmerLayerByName(lnm) + _, err := en.EmerLayerByName(lnm) if err != nil { continue } lvt := st.SetLayerSampleTensor(net, lnm, varnm, di) tnm := sp[1] var tvt *tensor.Float32 - _, err = net.EmerLayerByName(tnm) + _, err = en.EmerLayerByName(tnm) if err == nil { tvt = st.SetLayerSampleTensor(net, tnm, varnm, di) } else { // random state diff --git a/estats/funcs.go b/estats/funcs.go index eefab34e..a66f8867 100644 --- a/estats/funcs.go +++ b/estats/funcs.go @@ -19,7 +19,7 @@ import ( // to a F32Tensor with name = layNm // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) SetLayerTensor(net emer.Network, layNm, unitVar string, di int) *tensor.Float32 { - ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() + ly := errors.Log1(net.AsEmer().EmerLayerByName(layNm)).AsEmer() tsr := st.F32TensorDi(layNm, di) ly.UnitValuesTensor(tsr, unitVar, di) return tsr @@ -29,7 +29,7 @@ func (st *Stats) SetLayerTensor(net emer.Network, layNm, unitVar string, di int) // for given variable to a F32Tensor with name = layNm // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) SetLayerSampleTensor(net emer.Network, layNm, unitVar string, di int) *tensor.Float32 { - ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() + ly := errors.Log1(net.AsEmer().EmerLayerByName(layNm)).AsEmer() tsr := st.F32TensorDi(layNm, di) ly.UnitValuesSampleTensor(tsr, unitVar, di) return tsr @@ -38,7 +38,7 @@ func (st *Stats) SetLayerSampleTensor(net emer.Network, layNm, unitVar string, d // LayerVarsCorrel returns the correlation between two variables on a given layer // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 { - ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() + ly := errors.Log1(net.AsEmer().EmerLayerByName(layNm)).AsEmer() tsrA := st.F32TensorDi(layNm, di) // standard re-used storage tensor ly.UnitValuesTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor @@ -50,7 +50,7 @@ func (st *Stats) LayerVarsCorrel(net emer.Network, layNm, unitVarA, unitVarB str // Rep version uses representative units. // di is a data parallel index di, for networks capable of processing input patterns in parallel. func (st *Stats) LayerVarsCorrelRep(net emer.Network, layNm, unitVarA, unitVarB string, di int) float32 { - ly := errors.Log1(net.EmerLayerByName(layNm)).AsEmer() + ly := errors.Log1(net.AsEmer().EmerLayerByName(layNm)).AsEmer() tsrA := st.F32TensorDi(layNm, di) // standard re-used storage tensor ly.UnitValuesSampleTensor(tsrA, unitVarA, di) tsrB := st.F32TensorDi(layNm+"_alt", di) // alternative storage tensor diff --git a/estats/rasters.go b/estats/rasters.go index f04e5efd..94549b75 100644 --- a/estats/rasters.go +++ b/estats/rasters.go @@ -15,7 +15,7 @@ import ( func (st *Stats) ConfigRasters(net emer.Network, maxCyc int, layers []string) { st.Rasters = layers for _, lnm := range st.Rasters { - ly := errors.Log1(net.EmerLayerByName(lnm)).AsEmer() + ly := errors.Log1(net.AsEmer().EmerLayerByName(lnm)).AsEmer() sr := st.F32Tensor("Raster_" + lnm) nu := len(ly.SampleIndexes) if nu == 0 { diff --git a/netparams/README.md b/netparams/README.md deleted file mode 100644 index 48716aa8..00000000 --- a/netparams/README.md +++ /dev/null @@ -1,61 +0,0 @@ -Docs: [GoDoc](https://pkg.go.dev/github.com/emer/emergent/netparams) - -See [Wiki Params](https://github.com/emer/emergent/wiki/Params) page for detailed docs. - -Package `netparams` provides general-purpose parameter management functionality for organizing multiple sets of parameters efficiently, and basic IO for saving / loading from JSON files and generating Go code to embed into applications, and a basic GUI for viewing and editing. - -`netparams` is a simplification of `params` focused only on setting `Network` params -- eliminating the `params.Set` and `params.Sheets` levels that support setting params on arbitrary non-Network objects. It is better to use [econfig](../econfig) to configure standard struct Config objects outside of the Network -- the styling flexibility is really only needed for Network level params. - -The `netparams.Sets` contains one or more `params.Sheet`s (akin to CSS style sheets) that constitute a coherent set of parameters. Here's the structure: - -``` -Sets { - "Base": { - Sel: "Layer" { - Params: { - "Layer.Inhib.Layer.Gi": "1.1", - ... - } - }, - Sel: ".Back" { - Params: { - "Path.PathScale.Rel": "0.2", - ... - } - } - }, -} -``` - - -The default supported organization is to have a "Base" Set that has all the best parameters so far, and then other sets can modify specific params relative to that one. Order of application is critical, as subsequent params applications overwrite earlier ones, and the typical order is: - -* `Defaults()` method called that establishes the hard-coded default parameters. -* Then apply "Base" `params.Sheet` for any changes relative to those. -* Then optionally apply one or more additional `params.Sheet`s with current experimental parameters or for other special use-cases. - -Critically, all of this is entirely up to the particular model program(s) to determine and control -- this package just provides the basic data structures for holding all of the parameters, and the IO / and Apply infrastructure. - -Each `params.Sheet` consists of a collection of params.Sel elements which finally contain the parameters. The `Sel` field specifies a CSS-style selector determining over what scope the parameters should be applied: - -* `Type` (no prefix) = name of a type -- anything having this type name will get these params. - -* `.Class` = anything with a given class label (each object can have multiple Class labels and thus receive multiple parameter settings, but again, order matters!) - -* `#Name` = a specific named object. - -The order of application within a given Sheet is also critical -- typically put the most general Type params first, then `.Class`, then the most specific `#Name` cases, to achieve within a given Sheet the same logic of establishing Base params for all types and then more specific overrides for special cases (e.g., an overall learning rate that appplies across all pathways, but maybe a faster or slower one for a .Class or specific #Name'd pathway). - -There is a params.Styler interface with methods that any Go type can implement to provide these different labels. The emer.Network, .Layer, and .Path interfaces each implement this interface. - -Parameter values are stored as strings, which can represent any value. - -Finally, there are methods to show where params.Sheet sets the same parameter differently, and to compare with the default settings on a given object type using go struct field tags of the form def:"val1[,val2...]". - -# Providing direct access to specific params - -The best way to provide the user direct access to specific parameter values through the Params mechanisms is to put the relevant params in the `Sim` object, where they will be editable fields, and then call `SetFloat` or `SetString` as appropriate with the path to the parameter in question, followed by a call to apply the params. - -The current value can be obtained by the `ParamVal` methods. - - diff --git a/netparams/diff.go b/netparams/diff.go deleted file mode 100644 index 213eff06..00000000 --- a/netparams/diff.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package netparams - -import ( - "sort" - - "golang.org/x/exp/maps" -) - -// DiffsAll reports all the cases where the same param path is being set -// to different values across different sets -func (ps *Sets) DiffsAll() string { - pd := "" - sz := len(*ps) - keys := maps.Keys(*ps) - sort.Strings(keys) - for i, sNm := range keys { - sheet := (*ps)[sNm] - for j := i + 1; j < sz; j++ { - osNm := keys[j] - osheet := (*ps)[osNm] - spd := sheet.Diffs(osheet, sNm, osNm) - if spd != "" { - pd += "//////////////////////////////////////\n" - pd += spd - } - } - } - return pd -} - -// DiffsFirst reports all the cases where the same param path is being set -// to different values between the "Base" sheet and all other sheets. -// Only works if there is a sheet named "Base". -func (ps *Sets) DiffsFirst() string { - pd := "" - sz := len(*ps) - if sz < 2 { - return "" - } - sheet, ok := (*ps)["Base"] - if !ok { - return "params.DiffsFirst: Sheet named 'Base' not found\n" - } - keys := maps.Keys(*ps) - sort.Strings(keys) - for _, sNm := range keys { - if sNm == "Base" { - continue - } - osheet := (*ps)[sNm] - spd := sheet.Diffs(osheet, "Base", sNm) - if spd != "" { - pd += "//////////////////////////////////////\n" - pd += spd - } - } - return pd -} - -// DiffsWithin reports all the cases where the same param path is being set -// to different values within different sheets in given sheet -func (ps *Sets) DiffsWithin(sheetName string) string { - sheet, err := ps.SheetByNameTry(sheetName) - if err != nil { - return err.Error() - } - return sheet.DiffsWithin(sheetName) -} diff --git a/netparams/doc.go b/netparams/doc.go deleted file mode 100644 index 56f2add8..00000000 --- a/netparams/doc.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package params provides general-purpose parameter management functionality -for organizing multiple sets of parameters efficiently, and basic IO for -saving / loading from JSON files and generating Go code to embed into -applications, and a basic GUI for viewing and editing. - -The main overall unit that is generally operated upon at run-time is the -params.Set, which is a collection of params.Sheet's (akin to CSS style -sheets) that constitute a coherent set of parameters. - -A good strategy is to have a "Base" Set that has all the best parameters so far, -and then other sets can modify specific params relative to that one. -Order of application is critical, as subsequent params applications overwrite -earlier ones, and the typical order is: - - - Defaults() method called that establishes the hard-coded default parameters. - - Then apply "Base" params.Set for any changes relative to those. - - Then optionally apply one or more additional params.Set's with current - experimental parameters. - -Critically, all of this is entirely up to the particular model program(s) to -determine and control -- this package just provides the basic data structures -for holding all of the parameters, and the IO / and Apply infrastructure. - -Within a params.Set, multiple different params.Sheet's can be organized, -with each CSS-style sheet achieving a relatively complete parameter styling -of a given element of the overal model, e.g., "Network", "Sim", "Env". -Or Network could be further broken down into "Learn" vs. "Act" etc, -or according to different brain areas ("Hippo", "PFC", "BG", etc). -Again, this is entirely at the discretion of the modeler and must be -performed under explict program control, especially because order is so critical. - -Each params.Sheet consists of a collection of params.Sel elements which actually -finally contain the parameters. The Sel field specifies a CSS-style selector -determining over what scope the parameters should be applied: - -* Type = name of a type -- anything having this type name will get these params. - -* .Class = anything with a given class label (each object can have multiple Class -labels and thus receive multiple parameter settings, but again, order matters!) - -* #Name = a specific named object. - -The order of application within a given Sheet is also critical -- typically -put the most general Type params first, then .Class, then the most specific #Name -cases, to achieve within a given Sheet the same logic of establishing Base params -for all types and then more specific overrides for special cases (e.g., an overall -learning rate that appplies across all pathways, but maybe a faster or slower -one for a .Class or specific #Name'd pathway). - -There is a params.Styler interface with methods that any Go type can implement -to provide these different labels. The emer.Network, .Layer, and .Path interfaces -each implement this interface. - -Otherwise, the Apply method will just directly apply params to a given struct -type if it does not implement the Styler interface. - -Parameter values are limited to float64 values *only*. These can be specified -using "enum" style const integer values, and can be applied to any numeric -type (they will be automatically converted), but internally this is the only -parameter value type, which greatly simplifies the overall interface, and handles -the vast majority of use-cases (especially because named options are just integers -and can be set as such). - -Finally, there are methods to show where params.Set's set the same parameter -differently, and to compare with the default settings on a given object type -using go struct field tags of the form default:"val1[,val2...]". -*/ -package netparams diff --git a/netparams/io.go b/netparams/io.go deleted file mode 100644 index eeaf2dde..00000000 --- a/netparams/io.go +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package netparams - -import ( - "bytes" - "encoding/json" - "io" - "io/ioutil" - "log" - "os" - - "cogentcore.org/core/base/indent" - "cogentcore.org/core/base/iox" - "cogentcore.org/core/base/iox/jsonx" - "cogentcore.org/core/base/iox/tomlx" - "cogentcore.org/core/core" - "github.com/BurntSushi/toml" - "github.com/emer/emergent/v2/params" -) - -// WriteGoPrelude writes the start of a go file in package main that starts a -// variable assignment to given variable -- for start of SaveGoCode methods. -func WriteGoPrelude(w io.Writer, varNm string) { - w.Write([]byte("// File generated by netparams.SaveGoCode\n\n")) - w.Write([]byte("package main\n\n")) - w.Write([]byte(`import "github.com/emer/emergent/v2/params"`)) - w.Write([]byte(`import "github.com/emer/emergent/v2/netparams"`)) - w.Write([]byte("\n\nvar " + varNm + " = ")) -} - -// OpenJSON opens params from a JSON-formatted file. -func (pr *Sets) OpenJSON(filename core.Filename) error { - *pr = make(Sets) // reset - b, err := ioutil.ReadFile(string(filename)) - if err != nil { - log.Println(err) - return err - } - return json.Unmarshal(b, pr) -} - -// SaveJSON saves params to a JSON-formatted file. -func (pr *Sets) SaveJSON(filename core.Filename) error { - return jsonx.Save(pr, string(filename)) -} - -// OpenTOML opens params from a TOML-formatted file. -func (pr *Sets) OpenTOML(filename core.Filename) error { - *pr = make(Sets) // reset - return tomlx.Open(pr, string(filename)) -} - -// SaveTOML saves params to a TOML-formatted file. -func (pr *Sets) SaveTOML(filename core.Filename) error { - // return tomlx.Save(pr, string(filename)) // pelletier/go-toml produces bad output on maps - return iox.Save(pr, string(filename), func(w io.Writer) iox.Encoder { - return toml.NewEncoder(w) - }) -} - -// WriteGoCode writes params to corresponding Go initializer code. -func (pr *Sets) WriteGoCode(w io.Writer, depth int) { - w.Write([]byte("netparams.Sets{\n")) - depth++ - for nm, st := range *pr { - w.Write(indent.TabBytes(depth)) - w.Write([]byte(`"` + nm + `": `)) - st.WriteGoCode(w, depth) - } - depth-- - w.Write(indent.TabBytes(depth)) - w.Write([]byte("}\n")) -} - -// StringGoCode returns Go initializer code as a byte string. -func (pr *Sets) StringGoCode() []byte { - var buf bytes.Buffer - pr.WriteGoCode(&buf, 0) - return buf.Bytes() -} - -// SaveGoCode saves params to corresponding Go initializer code. -func (pr *Sets) SaveGoCode(filename core.Filename) error { - fp, err := os.Create(string(filename)) - defer fp.Close() - if err != nil { - log.Println(err) - return err - } - params.WriteGoPrelude(fp, "SavedParamsSets") - pr.WriteGoCode(fp, 0) - return nil -} - -/* -var SetsProps = tree.Props{ - "ToolBar": tree.PropSlice{ - {"Save", tree.PropSlice{ - {"SaveTOML", tree.Props{ - "label": "Save As TOML...", - "desc": "save to TOML formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"SaveJSON", tree.Props{ - "label": "Save As JSON...", - "desc": "save to JSON formatted file", - "icon": "file-save", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - {"SaveGoCode", tree.Props{ - "label": "Save Code As...", - "desc": "save to Go-formatted initializer code in file", - "icon": "go", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".go", - }}, - }, - }}, - }}, - {"Open", tree.PropSlice{ - {"OpenTOML", tree.Props{ - "label": "Open...", - "desc": "open from TOML formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".toml", - }}, - }, - }}, - {"OpenJSON", tree.Props{ - "label": "Open...", - "desc": "open from JSON formatted file", - "icon": "file-open", - "Args": tree.PropSlice{ - {"File Name", tree.Props{ - "ext": ".json", - }}, - }, - }}, - }}, - {"StringGoCode", tree.Props{ - "label": "Show Code", - "desc": "shows the Go-formatted initializer code, can be copy / pasted into program", - "icon": "go", - "show-return": true, - }}, - {"sep-diffs", tree.BlankProp{}}, - {"DiffsAll", tree.Props{ - "desc": "between all sets, reports where the same param path is being set to different values", - "icon": "search", - "show-return": true, - }}, - {"DiffsFirst", tree.Props{ - "desc": "between first set (e.g., the Base set) and rest of sets, reports where the same param path is being set to different values", - "icon": "search", - "show-return": true, - }}, - {"DiffsWithin", tree.Props{ - "desc": "reports all the cases where the same param path is being set to different values within different sheets in given set", - "icon": "search", - "show-return": true, - "Args": tree.PropSlice{ - {"Set Name", tree.Props{}}, - }, - }}, - }, -} -*/ diff --git a/netparams/netparams.go b/netparams/netparams.go deleted file mode 100644 index dab47276..00000000 --- a/netparams/netparams.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package netparams - -//go:generate core generate -add-types - -import ( - "fmt" - "log" - - "github.com/emer/emergent/v2/params" -) - -// Sets is a collection of Sheets that can be chosen among -// depending on different desired configurations etc. Thus, each Set -// represents a collection of different possible specific configurations, -// and different such configurations can be chosen by name to apply as desired. -type Sets map[string]*params.Sheet //git:add - -// SheetByNameTry tries to find given set by name, and returns error -// if not found (also logs the error) -func (ps *Sets) SheetByNameTry(name string) (*params.Sheet, error) { - st, ok := (*ps)[name] - if ok { - return st, nil - } - err := fmt.Errorf("params.Sets: Param Sheet named %s not found", name) - log.Println(err) - return nil, err -} - -// SheetByName returns given sheet by name -- for use when confident -// that it exists, as a nil will return if not found with no error -func (ps *Sets) SheetByName(name string) *params.Sheet { - return (*ps)[name] -} - -// SetFloat sets the value of given parameter, in selection sel, -// in sheet and set. -func (ps *Sets) SetFloat(sheet, sel, param string, val float64) error { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return err - } - return sp.SetFloat(sel, param, val) -} - -// SetString sets the value of given parameter, in selection sel, -// in sheet and set. Returns error if anything is not found. -func (ps *Sets) SetString(sheet, sel, param string, val string) error { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return err - } - return sp.SetString(sel, param, val) -} - -// ParamVal returns the value of given parameter, in selection sel, -// in sheet and set. Returns error if anything is not found. -func (ps *Sets) ParamValue(sheet, sel, param string) (string, error) { - sp, err := ps.SheetByNameTry(sheet) - if err != nil { - return "", err - } - return sp.ParamValue(sel, param) -} diff --git a/netparams/netparams_test.go b/netparams/netparams_test.go deleted file mode 100644 index 60021273..00000000 --- a/netparams/netparams_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright (c) 2019, The Emergent Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package netparams - -import ( - "bytes" - "testing" - - "github.com/andreyvit/diff" - "github.com/emer/emergent/v2/params" - // "github.com/andreyvit/diff" -) - -var paramSets = Sets{ - "Base": { - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: params.Params{ - "Path.Learn.Norm.On": "true", - "Path.Learn.Momentum.On": "true", - "Path.Learn.WtBal.On": "false", - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: params.Hypers{ - "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, - }, - }, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: params.Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "DefaultInhib": { - {Sel: "#Output", Desc: "go back to default", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }}, - }, - "NoMomentum": { - {Sel: "Path", Desc: "no norm or momentum", - Params: params.Params{ - "Path.Learn.Norm.On": "false", - "Path.Learn.Momentum.On": "false", - }}, - }, - "WtBalOn": { - {Sel: "Path", Desc: "weight bal on", - Params: params.Params{ - "Path.Learn.WtBal.On": "true", - }}, - }, -} - -var trgCode = `netparams.Sets{ - "Base": { - {Sel: "Path", Desc: "norm and momentum on works better, but wt bal is not better for smaller nets", - Params: params.Params{ - "Path.Learn.Norm.On": "true", - "Path.Learn.Momentum.On": "true", - "Path.Learn.WtBal.On": "false", - }}, - {Sel: "Layer", Desc: "using default 1.8 inhib for all of network -- can explore", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }, - Hypers: params.Hypers{ - "Layer.Inhib.Layer.Gi": {"Min": "0.5", "StdDev": "0.1"}, - }, - }, - {Sel: "#Output", Desc: "output definitely needs lower inhib -- true for smaller layers in general", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.4", - }}, - {Sel: ".Back", Desc: "top-down back-pathways MUST have lower relative weight scale, otherwise network hallucinates", - Params: params.Params{ - "Path.WtScale.Rel": "0.2", - }}, - }, - "DefaultInhib": { - {Sel: "#Output", Desc: "go back to default", - Params: params.Params{ - "Layer.Inhib.Layer.Gi": "1.8", - }}, - }, - "NoMomentum": { - {Sel: "Path", Desc: "no norm or momentum", - Params: params.Params{ - "Path.Learn.Norm.On": "false", - "Path.Learn.Momentum.On": "false", - }}, - }, - "WtBalOn": { - {Sel: "Path", Desc: "weight bal on", - Params: params.Params{ - "Path.Learn.WtBal.On": "true", - }}, - }, -} -` - -func TestParamSetsWriteGo(t *testing.T) { - t.Skip("todo: need to sort the map for this to work now") - var buf bytes.Buffer - paramSets.WriteGoCode(&buf, 0) - dfb := buf.Bytes() - dfs := string(dfb) - // fmt.Printf("%v", dfs) - if dfs != trgCode { - t.Errorf("ParamStyle output incorrect at: %v!\n", diff.LineDiff(dfs, trgCode)) - // t.Errorf("ParamStyle output incorrect!\n%v\n", dfs) - } -} - -func TestParamSetsSet(t *testing.T) { - cval, err := paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - if err != nil { - t.Error(err) - } - // fmt.Printf("current value: %s\n", cval) - if cval != "false" { - t.Errorf("value should have been false: %s\n", cval) - } - err = paramSets.SetString("Base", "Path", "Path.Learn.WtBal.On", "true") - if err != nil { - t.Error(err) - } - cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - // fmt.Printf("new value: %s\n", cval) - if cval != "true" { - t.Errorf("value should have been true: %s\n", cval) - } - err = paramSets.SetFloat("Base", "Path", "Path.Learn.WtBal.On", 5.1) - if err != nil { - t.Error(err) - } - cval, err = paramSets.ParamValue("Base", "Path", "Path.Learn.WtBal.On") - // fmt.Printf("new value: %s\n", cval) - if cval != "5.1" { - t.Errorf("value should have been 5.1: %s\n", cval) - } - cval, err = paramSets.ParamValue("Basre", "Path", "Path.Learn.WtBal.On") - if err == nil { - t.Errorf("Should have had an error") - } - // fmt.Printf("error: %s\n", err) - cval, err = paramSets.ParamValue("Base", "Paths", "Path.Learn.WtBal.On") - if err == nil { - t.Errorf("Should have had an error") - } - // fmt.Printf("error: %s\n", err) -} - -var trgHypers = `{ - "Hidden1": { - "Nm": "Hidden1", - "Type": "Layer", - "Cls": "Hidden", - "Obj": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - } - }, - "Hidden2": { - "Nm": "Hidden2", - "Type": "Layer", - "Cls": "Hidden", - "Obj": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - } - }, - "Input": { - "Nm": "Input", - "Type": "Layer", - "Cls": "Input", - "Obj": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.8" - } - } - }, - "Output": { - "Nm": "Output", - "Type": "Layer", - "Cls": "Target", - "Obj": { - "Layer.Inhib.Layer.Gi": { - "Min": "0.5", - "StdDev": "0.1", - "Val": "1.4" - } - } - } -}` diff --git a/netparams/typegen.go b/netparams/typegen.go deleted file mode 100644 index a35b64f8..00000000 --- a/netparams/typegen.go +++ /dev/null @@ -1,9 +0,0 @@ -// Code generated by "core generate -add-types"; DO NOT EDIT. - -package netparams - -import ( - "cogentcore.org/core/types" -) - -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/netparams.Sets", IDName: "sets", Doc: "Sets is a collection of Sheets that can be chosen among\ndepending on different desired configurations etc. Thus, each Set\nrepresents a collection of different possible specific configurations,\nand different such configurations can be chosen by name to apply as desired."}) diff --git a/netview/events.go b/netview/events.go index 6aadb3c1..aba9ee32 100644 --- a/netview/events.go +++ b/netview/events.go @@ -48,7 +48,7 @@ func (sw *Scene) MouseDownEvent(e events.Event) { for _, n := range ns { ln, ok := n.(*LayName) if ok { - lay, _ := ln.NetView.Net.EmerLayerByName(ln.Text) + lay, _ := ln.NetView.Net.AsEmer().EmerLayerByName(ln.Text) if lay != nil { FormDialog(sw, lay, "Layer: "+lay.StyleName()) } diff --git a/netview/netdata.go b/netview/netdata.go index a4ad4352..eeb1c26a 100644 --- a/netview/netdata.go +++ b/netview/netdata.go @@ -444,7 +444,7 @@ func (nd *NetData) RecvUnitValue(laynm string, vnm string, uidx1d int) (float32, if nd.NoSynData || !ok || nd.PathLay == "" { return 0, false } - recvLay := errors.Ignore1(nd.Net.EmerLayerByName(nd.PathLay)).AsEmer() + recvLay := errors.Ignore1(nd.Net.AsEmer().EmerLayerByName(nd.PathLay)).AsEmer() if recvLay == nil { return 0, false } @@ -492,7 +492,7 @@ func (nd *NetData) SendUnitValue(laynm string, vnm string, uidx1d int) (float32, if nd.NoSynData || !ok || nd.PathLay == "" { return 0, false } - sendLay := errors.Ignore1(nd.Net.EmerLayerByName(nd.PathLay)).AsEmer() + sendLay := errors.Ignore1(nd.Net.AsEmer().EmerLayerByName(nd.PathLay)).AsEmer() if sendLay == nil { return 0, false } diff --git a/netview/netview.go b/netview/netview.go index 8df86a3d..812f791b 100644 --- a/netview/netview.go +++ b/netview/netview.go @@ -1227,25 +1227,25 @@ func (nv *NetView) MakeViewbar(p *tree.Plan) { // SaveWeights saves the network weights. func (nv *NetView) SaveWeights(filename core.Filename) { //types:add - nv.Net.SaveWtsJSON(filename) + nv.Net.AsEmer().SaveWeightsJSON(filename) } // OpenWeights opens the network weights. func (nv *NetView) OpenWeights(filename core.Filename) { //types:add - nv.Net.OpenWtsJSON(filename) + nv.Net.AsEmer().OpenWeightsJSON(filename) } // ShowNonDefaultParams shows a dialog of all the parameters that // are not at their default values in the network. Useful for setting params. func (nv *NetView) ShowNonDefaultParams() string { //types:add - nds := nv.Net.NonDefaultParams() + nds := nv.Net.AsEmer().NonDefaultParams() texteditor.TextDialog(nv, "Non Default Params: "+nv.Name, nds) return nds } // ShowAllParams shows a dialog of all the parameters in the network. func (nv *NetView) ShowAllParams() string { //types:add - nds := nv.Net.AllParams() + nds := nv.Net.AsEmer().AllParams() texteditor.TextDialog(nv, "All Params: "+nv.Name, nds) return nds } diff --git a/netview/typegen.go b/netview/typegen.go index b25910ea..ce0ae112 100644 --- a/netview/typegen.go +++ b/netview/typegen.go @@ -3,7 +3,11 @@ package netview import ( - "cogentcore.org/core/base/ordmap" + "sync" + + "cogentcore.org/core/colors/colormap" + "cogentcore.org/core/core" + "cogentcore.org/core/tree" "cogentcore.org/core/types" ) diff --git a/params/apply.go b/params/apply.go index e9f002c3..deb59f63 100644 --- a/params/apply.go +++ b/params/apply.go @@ -49,7 +49,7 @@ func (pr *Params) Apply(obj any, setMsg bool) error { if styler, has := obj.(Styler); has { objNm = styler.StyleName() if styob, has := obj.(StylerObject); has { - obj = styob.Object() + obj = styob.StyleObject() } } else if lblr, has := obj.(labels.Labeler); has { objNm = lblr.Label() @@ -107,7 +107,7 @@ func (pr *Hypers) Apply(obj any, setMsg bool) error { if styler, has := obj.(Styler); has { objNm = styler.StyleName() if styob, has := obj.(StylerObject); has { - obj = styob.Object() + obj = styob.StyleObject() } } else if lblr, has := obj.(labels.Labeler); has { objNm = lblr.Label() @@ -186,7 +186,7 @@ func (ps *Sel) SelMatch(obj any) bool { return true // default match if no styler.. } if styob, has := obj.(StylerObject); has { - obj = styob.Object() + obj = styob.StyleObject() } gotyp := reflectx.NonPointerType(reflect.TypeOf(obj)).Name() return SelMatch(ps.Sel, styler.StyleName(), styler.StyleClass(), styler.StyleType(), gotyp) diff --git a/params/flex.go b/params/flex.go index 9cc7a2ea..6281c271 100644 --- a/params/flex.go +++ b/params/flex.go @@ -46,7 +46,7 @@ func (fv *FlexVal) Name() string { return fv.Nm } -func (fv *FlexVal) Object() any { +func (fv *FlexVal) StyleObject() any { return fv.Obj } diff --git a/params/styler.go b/params/styler.go index ed50d63f..56f52196 100644 --- a/params/styler.go +++ b/params/styler.go @@ -37,9 +37,9 @@ type Styler interface { type StylerObject interface { Styler - // Object returns the object that will have its field values set by + // StyleObject returns the object that will have its field values set by // the params specifications. - Object() any + StyleObject() any } // AddClass adds given class(es) to current class string, diff --git a/params/typegen.go b/params/typegen.go index c696c715..6a67701c 100644 --- a/params/typegen.go +++ b/params/typegen.go @@ -30,6 +30,6 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Sear var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Styler", IDName: "styler", Doc: "The params.Styler interface exposes TypeName, Class, and Name methods\nthat allow the params.Sel CSS-style selection specifier to determine\nwhether a given parameter applies.\nAdding Set versions of Name and Class methods is a good idea but not\nneeded for this interface, so they are not included here.", Methods: []types.Method{{Name: "StyleType", Doc: "StyleType returns the name of this type for CSS-style matching.\nThis is used for CSS Sel selector with no prefix.\nThis type is used *in addition* to the actual Go type name\nof the object, and is a kind of type-category (e.g., Layer\nor Path in emergent network objects).", Returns: []string{"string"}}, {Name: "StyleClass", Doc: "StyleClass returns the space-separated list of class selectors (tags).\nParameters with a . prefix target class tags.\nDo NOT include the . in the Class tags on Styler objects;\nThe . is only used in the Sel selector on the params.Sel.", Returns: []string{"string"}}, {Name: "StyleName", Doc: "StyleName returns the name of this object.\nParameters with a # prefix target object names, which are typically\nunique. Note, do not include the # prefix in the actual object name,\nonly in the Sel selector on params.Sel.", Returns: []string{"string"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObject", IDName: "styler-object", Doc: "The params.StylerObject interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "Object", Doc: "Object returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.StylerObject", IDName: "styler-object", Doc: "The params.StylerObject interface extends Styler to include an arbitary\nfunction to access the underlying object type.", Methods: []types.Method{{Name: "StyleObject", Doc: "StyleObject returns the object that will have its field values set by\nthe params specifications.", Returns: []string{"any"}}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/params.Tweaks", IDName: "tweaks", Doc: "Tweaks holds parameter tweak values associated with one parameter selector.\nHas all the object values affected for a given parameter within one\nselector, that has a tweak hyperparameter set.", Fields: []types.Field{{Name: "Param", Doc: "the parameter path for this param"}, {Name: "Sel", Doc: "the param selector that set the specific value upon which tweak is based"}, {Name: "Search", Doc: "the search values for all objects covered by this selector"}}}) diff --git a/paths/circle.go b/paths/circle.go index cc1fb9ca..c337af17 100644 --- a/paths/circle.go +++ b/paths/circle.go @@ -15,7 +15,7 @@ import ( // Circle implements a circular pattern of connectivity between two layers // where the center moves in proportion to receiver position with offset // and multiplier factors, and a given radius is used (with wrap-around -// optionally). A corresponding Gaussian bump of TopoWts is available as well. +// optionally). A corresponding Gaussian bump of TopoWeights is available as well. // Makes for a good center-surround connectivity pattern. // 4D layers are automatically flattened to 2D for this connection. type Circle struct { @@ -36,7 +36,7 @@ type Circle struct { Wrap bool // if true, this path should set gaussian topographic weights, according to following parameters - TopoWts bool + TopoWeights bool // gaussian sigma (width) as a proportion of the radius of the circle Sigma float32 diff --git a/paths/pooltile.go b/paths/pooltile.go index 70cf6b93..7a0bc712 100644 --- a/paths/pooltile.go +++ b/paths/pooltile.go @@ -220,32 +220,32 @@ func (pt *PoolTile) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, re return } -// HasTopoWts returns true if some form of topographic weight patterns are set -func (pt *PoolTile) HasTopoWts() bool { +// HasTopoWeights returns true if some form of topographic weight patterns are set +func (pt *PoolTile) HasTopoWeights() bool { return pt.GaussFull.On || pt.GaussInPool.On || pt.SigFull.On || pt.SigInPool.On } -// TopoWts sets values in given 4D or 6D tensor according to *Topo settings. +// TopoWeights sets values in given 4D or 6D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 2D or 4D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTile) TopoWts(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTile) TopoWeights(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.On || pt.GaussInPool.On { if send.NumDims() == 2 { - return pt.TopoWtsGauss2D(send, recv, wts) + return pt.TopoWeightsGauss2D(send, recv, wts) } else { - return pt.TopoWtsGauss4D(send, recv, wts) + return pt.TopoWeightsGauss4D(send, recv, wts) } } if pt.SigFull.On || pt.SigInPool.On { if send.NumDims() == 2 { - return pt.TopoWtsSigmoid2D(send, recv, wts) + return pt.TopoWeightsSigmoid2D(send, recv, wts) } else { - return pt.TopoWtsSigmoid4D(send, recv, wts) + return pt.TopoWeightsSigmoid4D(send, recv, wts) } } - err := fmt.Errorf("PoolTile:TopoWts no Gauss or Sig params turned on") + err := fmt.Errorf("PoolTile:TopoWeights no Gauss or Sig params turned on") log.Println(err) return err } @@ -302,11 +302,11 @@ func (pt *PoolTile) GaussOff() { pt.GaussInPool.On = false } -// TopoWtsGauss2D sets values in given 4D tensor according to *Topo settings. +// TopoWeightsGauss2D sets values in given 4D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for sending layer size (2D = sender) -func (pt *PoolTile) TopoWtsGauss2D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTile) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.Sigma == 0 { pt.GaussFull.Defaults() } @@ -390,12 +390,12 @@ func (pt *PoolTile) TopoWtsGauss2D(send, recv *tensor.Shape, wts *tensor.Float32 return nil } -// TopoWtsGauss4D sets values in given 6D tensor according to *Topo settings. +// TopoWeightsGauss4D sets values in given 6D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 4D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTile) TopoWtsGauss4D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTile) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.Sigma == 0 { pt.GaussFull.Defaults() } @@ -512,11 +512,11 @@ func (gt *SigmoidTopo) ShouldDisplay(field string) bool { } } -// TopoWtsSigmoid2D sets values in given 4D tensor according to Topo settings. +// TopoWeightsSigmoid2D sets values in given 4D tensor according to Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for sending layer (2D = sender). -func (pt *PoolTile) TopoWtsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTile) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.SigFull.Gain == 0 { pt.SigFull.Defaults() } @@ -602,12 +602,12 @@ func (pt *PoolTile) TopoWtsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Float return nil } -// TopoWtsSigmoid4D sets values in given 6D tensor according to Topo settings. +// TopoWeightsSigmoid4D sets values in given 6D tensor according to Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTile) TopoWtsSigmoid4D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTile) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.SigFull.Gain == 0 { pt.SigFull.Defaults() } diff --git a/paths/pooltilesub.go b/paths/pooltilesub.go index 60a3e91c..dcbe54a6 100644 --- a/paths/pooltilesub.go +++ b/paths/pooltilesub.go @@ -249,32 +249,32 @@ func (pt *PoolTileSub) ConnectRecip(send, recv *tensor.Shape, same bool) (sendn, return } -// HasTopoWts returns true if some form of topographic weight patterns are set -func (pt *PoolTileSub) HasTopoWts() bool { +// HasTopoWeights returns true if some form of topographic weight patterns are set +func (pt *PoolTileSub) HasTopoWeights() bool { return pt.GaussFull.On || pt.GaussInPool.On || pt.SigFull.On || pt.SigInPool.On } -// TopoWts sets values in given 4D or 6D tensor according to *Topo settings. +// TopoWeights sets values in given 4D or 6D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 2D or 4D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTileSub) TopoWts(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTileSub) TopoWeights(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.On || pt.GaussInPool.On { if send.NumDims() == 2 { - return pt.TopoWtsGauss2D(send, recv, wts) + return pt.TopoWeightsGauss2D(send, recv, wts) } else { - return pt.TopoWtsGauss4D(send, recv, wts) + return pt.TopoWeightsGauss4D(send, recv, wts) } } if pt.SigFull.On || pt.SigInPool.On { if send.NumDims() == 2 { - return pt.TopoWtsSigmoid2D(send, recv, wts) + return pt.TopoWeightsSigmoid2D(send, recv, wts) } else { - return pt.TopoWtsSigmoid4D(send, recv, wts) + return pt.TopoWeightsSigmoid4D(send, recv, wts) } } - err := fmt.Errorf("PoolTileSub:TopoWts no Gauss or Sig params turned on") + err := fmt.Errorf("PoolTileSub:TopoWeights no Gauss or Sig params turned on") log.Println(err) return err } @@ -285,11 +285,11 @@ func (pt *PoolTileSub) GaussOff() { pt.GaussInPool.On = false } -// TopoWtsGauss2D sets values in given 4D tensor according to *Topo settings. +// TopoWeightsGauss2D sets values in given 4D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for sending layer size (2D = sender) -func (pt *PoolTileSub) TopoWtsGauss2D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTileSub) TopoWeightsGauss2D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.Sigma == 0 { pt.GaussFull.Defaults() } @@ -373,12 +373,12 @@ func (pt *PoolTileSub) TopoWtsGauss2D(send, recv *tensor.Shape, wts *tensor.Floa return nil } -// TopoWtsGauss4D sets values in given 6D tensor according to *Topo settings. +// TopoWeightsGauss4D sets values in given 6D tensor according to *Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within layer / pool // of recv layer (these are units over which topography is defined) // and remaing 4D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTileSub) TopoWtsGauss4D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTileSub) TopoWeightsGauss4D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.GaussFull.Sigma == 0 { pt.GaussFull.Defaults() } @@ -468,11 +468,11 @@ func (pt *PoolTileSub) TopoWtsGauss4D(send, recv *tensor.Shape, wts *tensor.Floa ///////////////////////////////////////////////////// // SigmoidTopo Wts -// TopoWtsSigmoid2D sets values in given 4D tensor according to Topo settings. +// TopoWeightsSigmoid2D sets values in given 4D tensor according to Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for sending layer (2D = sender). -func (pt *PoolTileSub) TopoWtsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTileSub) TopoWeightsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.SigFull.Gain == 0 { pt.SigFull.Defaults() } @@ -558,12 +558,12 @@ func (pt *PoolTileSub) TopoWtsSigmoid2D(send, recv *tensor.Shape, wts *tensor.Fl return nil } -// TopoWtsSigmoid4D sets values in given 6D tensor according to Topo settings. +// TopoWeightsSigmoid4D sets values in given 6D tensor according to Topo settings. // wts is shaped with first 2 outer-most dims as Y, X of units within pool // of recv layer (these are units over which topography is defined) // and remaing 2D is for receptive field Size by units within pool size for // sending layer. -func (pt *PoolTileSub) TopoWtsSigmoid4D(send, recv *tensor.Shape, wts *tensor.Float32) error { +func (pt *PoolTileSub) TopoWeightsSigmoid4D(send, recv *tensor.Shape, wts *tensor.Float32) error { if pt.SigFull.Gain == 0 { pt.SigFull.Defaults() } diff --git a/paths/prjn_test.go b/paths/prjn_test.go index dd8aa688..f504c0af 100644 --- a/paths/prjn_test.go +++ b/paths/prjn_test.go @@ -219,7 +219,7 @@ func TestPoolTile(t *testing.T) { // send = tensor.NewShape([]int{4, 4, 3, 3}) // recv = tensor.NewShape([]int{2, 2, 2, 2}) // wts := &tensor.Float32{} - // pj.TopoWts(send, recv, wts) + // pj.TopoWeights(send, recv, wts) // fmt.Printf("topo wts\n%v\n", wts) } @@ -280,7 +280,7 @@ func TestPoolTileRecip(t *testing.T) { // send = tensor.NewShape([]int{4, 4, 3, 3}) // recv = tensor.NewShape([]int{2, 2, 2, 2}) // wts := &tensor.Float32{} - // pj.TopoWts(send, recv, wts) + // pj.TopoWeights(send, recv, wts) // fmt.Printf("topo wts\n%v\n", wts) } diff --git a/paths/typegen.go b/paths/typegen.go index bbfd794d..874792c0 100644 --- a/paths/typegen.go +++ b/paths/typegen.go @@ -6,7 +6,7 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/paths.Circle", IDName: "circle", Doc: "Circle implements a circular pattern of connectivity between two layers\nwhere the center moves in proportion to receiver position with offset\nand multiplier factors, and a given radius is used (with wrap-around\noptionally). A corresponding Gaussian bump of TopoWts is available as well.\nMakes for a good center-surround connectivity pattern.\n4D layers are automatically flattened to 2D for this connection.", Fields: []types.Field{{Name: "Radius", Doc: "radius of the circle, in units from center in sending layer"}, {Name: "Start", Doc: "starting offset in sending layer, for computing the corresponding sending center relative to given recv unit position"}, {Name: "Scale", Doc: "scaling to apply to receiving unit position to compute sending center as function of recv unit position"}, {Name: "AutoScale", Doc: "auto-scale sending center positions as function of relative sizes of send and recv layers -- if Start is positive then assumes it is a border, subtracted from sending size"}, {Name: "Wrap", Doc: "if true, connectivity wraps around edges"}, {Name: "TopoWts", Doc: "if true, this path should set gaussian topographic weights, according to following parameters"}, {Name: "Sigma", Doc: "gaussian sigma (width) as a proportion of the radius of the circle"}, {Name: "MaxWt", Doc: "maximum weight value for GaussWts function -- multiplies values"}, {Name: "SelfCon", Doc: "if true, and connecting layer to itself (self pathway), then make a self-connection from unit to itself"}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/paths.Circle", IDName: "circle", Doc: "Circle implements a circular pattern of connectivity between two layers\nwhere the center moves in proportion to receiver position with offset\nand multiplier factors, and a given radius is used (with wrap-around\noptionally). A corresponding Gaussian bump of TopoWeights is available as well.\nMakes for a good center-surround connectivity pattern.\n4D layers are automatically flattened to 2D for this connection.", Fields: []types.Field{{Name: "Radius", Doc: "radius of the circle, in units from center in sending layer"}, {Name: "Start", Doc: "starting offset in sending layer, for computing the corresponding sending center relative to given recv unit position"}, {Name: "Scale", Doc: "scaling to apply to receiving unit position to compute sending center as function of recv unit position"}, {Name: "AutoScale", Doc: "auto-scale sending center positions as function of relative sizes of send and recv layers -- if Start is positive then assumes it is a border, subtracted from sending size"}, {Name: "Wrap", Doc: "if true, connectivity wraps around edges"}, {Name: "TopoWeights", Doc: "if true, this path should set gaussian topographic weights, according to following parameters"}, {Name: "Sigma", Doc: "gaussian sigma (width) as a proportion of the radius of the circle"}, {Name: "MaxWt", Doc: "maximum weight value for GaussWts function -- multiplies values"}, {Name: "SelfCon", Doc: "if true, and connecting layer to itself (self pathway), then make a self-connection from unit to itself"}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/paths.Full", IDName: "full", Doc: "Full implements full all-to-all pattern of connectivity between two layers", Fields: []types.Field{{Name: "SelfCon", Doc: "if true, and connecting layer to itself (self pathway), then make a self-connection from unit to itself"}}}) From 07982411cebd4ad1636a95a3b30f6b05ba218695 Mon Sep 17 00:00:00 2001 From: "Randall C. O'Reilly" Date: Sat, 10 Aug 2024 23:19:47 -0700 Subject: [PATCH 10/10] refactor working in axon now -- moving to main branch --- emer/layer.go | 8 ++++---- emer/network.go | 2 +- emer/typegen.go | 12 ++++++------ 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/emer/layer.go b/emer/layer.go index 3c84e93a..b8fce303 100644 --- a/emer/layer.go +++ b/emer/layer.go @@ -202,7 +202,7 @@ type LayerBase struct { // Index is a 0..n-1 index of the position of the layer within // the list of layers in the network. - Index int `display:"-" inactive:"-"` + Index int `display:"-" edit:"-"` // SampleIndexes are the current set of "sample" unit indexes, // which are a smaller subset of units that represent the behavior @@ -211,7 +211,7 @@ type LayerBase struct { // If none have been set, then all units are used. // See utility function CenterPoolIndexes that returns indexes of // units in the central pools of a 4D layer. - SampleIndexes []int + SampleIndexes []int `table:"-"` // SampleShape is the shape to use for the subset of sample // unit indexes, in terms of an array of dimensions. @@ -220,7 +220,7 @@ type LayerBase struct { // otherwise a 1D array of len SampleIndexes will be used. // See utility function CenterPoolShape that returns shape of // units in the central pools of a 4D layer. - SampleShape tensor.Shape + SampleShape tensor.Shape `table:"-"` } // InitLayer initializes the layer, setting the EmerLayer interface @@ -370,7 +370,7 @@ func (ly *LayerBase) NumPools() int { // Returns error on invalid var name. func (ly *LayerBase) UnitValues(vals *[]float32, varNm string, di int) error { nn := ly.NumUnits() - slicesx.SetLength(*vals, nn) + *vals = slicesx.SetLength(*vals, nn) vidx, err := ly.EmerLayer.UnitVarIndex(varNm) if err != nil { nan := math32.NaN() diff --git a/emer/network.go b/emer/network.go index 03b006b0..a3620ebd 100644 --- a/emer/network.go +++ b/emer/network.go @@ -120,7 +120,7 @@ type NetworkBase struct { // methods for functions defined in the NetworkBase type. // Must set this with a pointer to the actual instance // when created, using InitNetwork function. - EmerNetwork Network + EmerNetwork Network `display:"-"` // overall name of network, which helps discriminate if there are multiple. Name string diff --git a/emer/typegen.go b/emer/typegen.go index 62b63a80..6393d4b8 100644 --- a/emer/typegen.go +++ b/emer/typegen.go @@ -6,9 +6,9 @@ import ( "cogentcore.org/core/types" ) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWtsJSON, which reads into a separate\nstructure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Layer", IDName: "layer", Doc: "Layer defines the minimal interface for neural network layers,\nnecessary to support the visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nLayerBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the layer as an *emer.LayerBase,\nto access base functionality.", Returns: []string{"LayerBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of layer, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "UnitVarIndex", Doc: "UnitVarIndex returns the index of given variable within\nthe Neuron, according to *this layer's* UnitVarNames() list\n(using a map to lookup index), or -1 and error message if\nnot found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "UnitVal1D", Doc: "UnitVal1D returns value of given variable index on given unit,\nusing 1-dimensional index, and a data parallel index di,\nfor networks capable of processing multiple input patterns\nin parallel. Returns NaN on invalid index.\nThis is the core unit var access method used by other methods,\nso it is the only one that needs to be updated for derived layer types.", Args: []string{"varIndex", "idx", "di"}, Returns: []string{"float32"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}, {Name: "NumRecvPaths", Doc: "NumRecvPaths returns the number of receiving pathways.", Returns: []string{"int"}}, {Name: "RecvPath", Doc: "RecvPath returns a specific receiving pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "NumSendPaths", Doc: "NumSendPaths returns the number of sending pathways.", Returns: []string{"int"}}, {Name: "SendPath", Doc: "SendPath returns a specific sending pathway.", Args: []string{"idx"}, Returns: []string{"Path"}}, {Name: "RecvPathValues", Doc: "RecvPathValues fills in values of given synapse variable name,\nfor pathway from given sending layer and neuron 1D index,\nfor all receiving neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type;\nused if non-empty, useful when there are multiple pathways\nbetween two layers.\nReturns error on invalid var name.\nIf the receiving neuron is not connected to the given sending\nlayer or neuron then the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path\n(vals always set to nan on path err).", Args: []string{"vals", "varNm", "sendLay", "sendIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "SendPathValues", Doc: "SendPathValues fills in values of given synapse variable name,\nfor pathway into given receiving layer and neuron 1D index,\nfor all sending neurons in this layer,\ninto given float32 slice (only resized if not big enough).\npathType is the string representation of the path type -- used if non-empty,\nuseful when there are multiple pathways between two layers.\nReturns error on invalid var name.\nIf the sending neuron is not connected to the given receiving layer or neuron\nthen the value is set to math32.NaN().\nReturns error on invalid var name or lack of recv path (vals always set to nan on path err).", Args: []string{"vals", "varNm", "recvLay", "recvIndex1D", "pathType"}, Returns: []string{"error"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Layer\nand recv pathway parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this\nlayer and its recv pathways.\nCalls UpdateParams on anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm\neach parameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if\nthere were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Layer that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Layer", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWeightsJSON", Doc: "ReadWeightsJSON reads the weights from this layer from the\nreceiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved\n*for one layer only* and is not used for the\nnetwork-level ReadWeightsJSON, which reads into a separate\nstructure -- see SetWeights method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this layer from weights.Layer\ndecoded values", Args: []string{"lw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Info", Doc: "Info contains descriptive information about the layer.\nThis is displayed in a tooltip in the network view."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Off", Doc: "Off turns off the layer, removing from all computations.\nThis provides a convenient way to dynamically test for\nthe contributions of the layer, for example."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LayerBase", IDName: "layer-base", Doc: "LayerBase defines the basic shared data for neural network layers,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nNothing algorithm-specific is implemented here", Fields: []types.Field{{Name: "EmerLayer", Doc: "EmerLayer provides access to the emer.Layer interface\nmethods for functions defined in the LayerBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitLayer function."}, {Name: "Name", Doc: "Name of the layer, which must be unique within the network.\nLayers are typically accessed directly by name, via a map."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple layers\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Info", Doc: "Info contains descriptive information about the layer.\nThis is displayed in a tooltip in the network view."}, {Name: "Off", Doc: "Off turns off the layer, removing from all computations.\nThis provides a convenient way to dynamically test for\nthe contributions of the layer, for example."}, {Name: "Shape", Doc: "Shape of the layer, either 2D or 4D. Although spatial topology\nis not relevant to all algorithms, the 2D shape is important for\nefficiently visualizing large numbers of units / neurons.\n4D layers have 2D Pools of units embedded within a larger 2D\norganization of such pools. This is used for max-pooling or\npooled inhibition at a finer-grained level, and biologically\ncorresopnds to hypercolumns in the cortex for example.\nOrder is outer-to-inner (row major), so Y then X for 2D;\n4D: Y-X unit pools then Y-X neurons within pools."}, {Name: "Pos", Doc: "Pos specifies the relative spatial relationship to another\nlayer, which determines positioning. Every layer except one\n\"anchor\" layer should be positioned relative to another,\ne.g., RightOf, Above, etc. This provides robust positioning\nin the face of layer size changes etc.\nLayers are arranged in X-Y planes, stacked vertically along the Z axis."}, {Name: "Index", Doc: "Index is a 0..n-1 index of the position of the layer within\nthe list of layers in the network."}, {Name: "SampleIndexes", Doc: "SampleIndexes are the current set of \"sample\" unit indexes,\nwhich are a smaller subset of units that represent the behavior\nof the layer, for computationally intensive statistics and displays\n(e.g., PCA, ActRF, NetView rasters), when the layer is large.\nIf none have been set, then all units are used.\nSee utility function CenterPoolIndexes that returns indexes of\nunits in the central pools of a 4D layer."}, {Name: "SampleShape", Doc: "SampleShape is the shape to use for the subset of sample\nunit indexes, in terms of an array of dimensions.\nSee Shape for more info.\nLayers that set SampleIndexes should also set this,\notherwise a 1D array of len SampleIndexes will be used.\nSee utility function CenterPoolShape that returns shape of\nunits in the central pools of a 4D layer."}}}) var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetParams", IDName: "net-params", Doc: "NetParams handles standard parameters for a Network\n(use econfig and a Config struct for other configuration params).\nAssumes a Set named \"Base\" has the base-level parameters, which are\nalways applied first, followed optionally by additional Set(s)\nthat can have different parameters to try.", Fields: []types.Field{{Name: "Params", Doc: "full collection of param sets to use"}, {Name: "ExtraSheets", Doc: "optional additional sheets of parameters to apply after Base -- can use multiple names separated by spaces (don't put spaces in Sheet names!)"}, {Name: "Tag", Doc: "optional additional tag to add to file names, logs to identify params / run config"}, {Name: "Network", Doc: "the network to apply parameters to"}, {Name: "NetHypers", Doc: "list of hyper parameters compiled from the network parameters, using the layers and pathways from the network, so that the same styling logic as for regular parameters can be used"}, {Name: "SetMsg", Doc: "print out messages for each parameter that is set"}}}) @@ -16,10 +16,10 @@ var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.LaySiz var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetSize", IDName: "net-size", Doc: "NetSize is a network schema for holding a params for layer sizes.\nValues can be queried for getting sizes when configuring the network.\nUses params.Flex to support flexible parameter specification"}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the minimal interface for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nMost of the standard expected functionality is defined in the\nNetworkBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the network as an *emer.NetworkBase,\nto access base functionality.", Returns: []string{"NetworkBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "NumLayers", Doc: "NumLayers returns the number of layers in the network.", Returns: []string{"int"}}, {Name: "EmerLayer", Doc: "EmerLayer returns layer as emer.Layer interface at given index.\nDoes not do extra bounds checking.", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network."}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to layers\nand paths in this network.\nCalls UpdateParams on anything set to ensure derived parameters\nare all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nit always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Network that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Network", Returns: []string{"string"}}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on\nthe units in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all layers need to support all variables,\nbut must safely return math32.NaN() for unsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables\non the synapses in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all pathways need to support all variables,\nbut must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes network weights (and any other state\nthat adapts with learning) to JSON-formatted output.", Args: []string{"w"}, Returns: []string{"error"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads network weights (and any other state\nthat adapts with learning) from JSON-formatted input.\nReads into a temporary weights.Network structure that\nis then passed to SetWts to actually set the weights.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this network from weights.Network\ndecoded values.", Args: []string{"nw"}, Returns: []string{"error"}}, {Name: "SaveWtsJSON", Doc: "SaveWtsJSON saves network weights (and any other state\nthat adapts with learning) to a JSON-formatted file.\nIf filename has .gz extension, then file is gzip compressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWtsJSON", Doc: "OpenWtsJSON opens network weights (and any other state that\nadapts with learning) from a JSON-formatted file.\nIf filename has .gz extension, then file is gzip uncompressed.", Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "VarRange", Doc: "VarRange returns the min / max values for given variable", Args: []string{"varNm"}, Returns: []string{"min", "max", "err"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Network", IDName: "network", Doc: "Network defines the minimal interface for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.\nMost of the standard expected functionality is defined in the\nNetworkBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation.", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the network as an *emer.NetworkBase,\nto access base functionality.", Returns: []string{"NetworkBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "NumLayers", Doc: "NumLayers returns the number of layers in the network.", Returns: []string{"int"}}, {Name: "EmerLayer", Doc: "EmerLayer returns layer as emer.Layer interface at given index.\nDoes not do extra bounds checking.", Args: []string{"idx"}, Returns: []string{"Layer"}}, {Name: "MaxParallelData", Doc: "MaxParallelData returns the maximum number of data inputs that can be\nprocessed in parallel by the network.\nThe NetView supports display of up to this many data elements.", Returns: []string{"int"}}, {Name: "NParallelData", Doc: "NParallelData returns the current number of data inputs currently being\nprocessed in parallel by the network.\nLogging supports recording each of these where appropriate.", Returns: []string{"int"}}, {Name: "Defaults", Doc: "Defaults sets default parameter values for everything in the Network."}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Network parameters,\nbased on any other params that might have changed."}, {Name: "KeyLayerParams", Doc: "KeyLayerParams returns a listing for all layers in the network,\nof the most important layer-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "KeyPathParams", Doc: "KeyPathParams returns a listing for all Recv pathways in the network,\nof the most important pathway-level params (specific to each algorithm).", Returns: []string{"string"}}, {Name: "UnitVarNames", Doc: "UnitVarNames returns a list of variable names available on\nthe units in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all layers need to support all variables,\nbut must safely return math32.NaN() for unsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "UnitVarProps", Doc: "UnitVarProps returns a map of unit variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\ndesc:\"txt\" tooltip description of the variable\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables\non the synapses in this network.\nThis list determines what is shown in the NetView\n(and the order of vars list).\nNot all pathways need to support all variables,\nbut must safely return math32.NaN() for\nunsupported ones.\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarProps", Doc: "SynVarProps returns a map of synapse variable properties,\nwith the key being the name of the variable,\nand the value gives a space-separated list of\ngo-tag-style properties for that variable.\nThe NetView recognizes the following properties:\nrange:\"##\" = +- range around 0 for default display scaling\nmin:\"##\" max:\"##\" = min, max display range\nauto-scale:\"+\" or \"-\" = use automatic scaling instead of fixed range or not.\nzeroctr:\"+\" or \"-\" = control whether zero-centering is used\nNote: this is typically a global list so do not modify!", Returns: []string{"map[string]string"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetworkBase", IDName: "network-base", Doc: "NetworkBase defines the basic data for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.", Fields: []types.Field{{Name: "EmerNetwork", Doc: "EmerNetwork provides access to the emer.Network interface\nmethods for functions defined in the NetworkBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitNetwork function."}, {Name: "Name", Doc: "overall name of network, which helps discriminate if there are multiple."}, {Name: "WeightsFile", Doc: "filename of last weights file loaded or saved."}, {Name: "LayerNameMap", Doc: "map of name to layers, for EmerLayerByName methods"}, {Name: "LayerClassMap", Doc: "map from class name to layer names."}, {Name: "MinPos", Doc: "minimum display position in network"}, {Name: "MaxPos", Doc: "maximum display position in network"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}, {Name: "Rand", Doc: "random number generator for the network.\nall random calls must use this.\nSet seed here for weight initialization values."}, {Name: "RandSeed", Doc: "Random seed to be set at the start of configuring\nthe network and initializing the weights.\nSet this to get a different set of weights."}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.NetworkBase", IDName: "network-base", Doc: "NetworkBase defines the basic data for a neural network,\nused for managing the structural elements of a network,\nand for visualization, I/O, etc.", Methods: []types.Method{{Name: "SaveWeightsJSON", Doc: "SaveWeightsJSON saves network weights (and any other state that adapts with learning)\nto a JSON-formatted file. If filename has .gz extension, then file is gzip compressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}, {Name: "OpenWeightsJSON", Doc: "OpenWeightsJSON opens network weights (and any other state that adapts with learning)\nfrom a JSON-formatted file. If filename has .gz extension, then file is gzip uncompressed.", Directives: []types.Directive{{Tool: "types", Directive: "add"}}, Args: []string{"filename"}, Returns: []string{"error"}}}, Fields: []types.Field{{Name: "EmerNetwork", Doc: "EmerNetwork provides access to the emer.Network interface\nmethods for functions defined in the NetworkBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitNetwork function."}, {Name: "Name", Doc: "overall name of network, which helps discriminate if there are multiple."}, {Name: "WeightsFile", Doc: "filename of last weights file loaded or saved."}, {Name: "LayerNameMap", Doc: "map of name to layers, for EmerLayerByName methods"}, {Name: "LayerClassMap", Doc: "map from class name to layer names."}, {Name: "MinPos", Doc: "minimum display position in network"}, {Name: "MaxPos", Doc: "maximum display position in network"}, {Name: "MetaData", Doc: "optional metadata that is saved in network weights files,\ne.g., can indicate number of epochs that were trained,\nor any other information about this network that would be useful to save."}, {Name: "Rand", Doc: "random number generator for the network.\nall random calls must use this.\nSet seed here for weight initialization values."}, {Name: "RandSeed", Doc: "Random seed to be set at the start of configuring\nthe network and initializing the weights.\nSet this to get a different set of weights."}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse,\nusing the natural ordering of the synapses (sender based for Axon),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nIt always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Pathway that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Pathway.", Returns: []string{"string"}}, {Name: "WriteWtsJSON", Doc: "WriteWtsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWtsJSON", Doc: "ReadWtsJSON reads the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWtsJSON,\nwhich reads into a separate structure -- see SetWts method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWts", Doc: "SetWts sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.Path", IDName: "path", Doc: "Path defines the minimal interface for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThis supports visualization (NetView), I/O,\nand parameter setting functionality provided by emergent.\nMost of the standard expected functionality is defined in the\nPathBase struct, and this interface only has methods that must be\nimplemented specifically for a given algorithmic implementation,", Methods: []types.Method{{Name: "AsEmer", Doc: "AsEmer returns the path as an *emer.PathBase,\nto access base functionality.", Returns: []string{"PathBase"}}, {Name: "Label", Doc: "Label satisfies the core.Labeler interface for getting\nthe name of objects generically.", Returns: []string{"string"}}, {Name: "TypeName", Doc: "TypeName is the type or category of path, defined\nby the algorithm (and usually set by an enum).", Returns: []string{"string"}}, {Name: "SendLayer", Doc: "SendLayer returns the sending layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Send field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "RecvLayer", Doc: "RecvLayer returns the receiving layer for this pathway,\nas an emer.Layer interface. The actual Path implmenetation\ncan use a Recv field with the actual Layer struct type.", Returns: []string{"Layer"}}, {Name: "NumSyns", Doc: "NumSyns returns the number of synapses for this path.\nThis is the max idx for SynValue1D and the number\nof vals set by SynValues.", Returns: []string{"int"}}, {Name: "SynIndex", Doc: "SynIndex returns the index of the synapse between given send, recv unit indexes\n(1D, flat indexes). Returns -1 if synapse not found between these two neurons.\nThis requires searching within connections for receiving unit (a bit slow).", Args: []string{"sidx", "ridx"}, Returns: []string{"int"}}, {Name: "SynVarNames", Doc: "SynVarNames returns the names of all the variables on the synapse\nThis is typically a global list so do not modify!", Returns: []string{"[]string"}}, {Name: "SynVarNum", Doc: "SynVarNum returns the number of synapse-level variables\nfor this paths. This is needed for extending indexes in derived types.", Returns: []string{"int"}}, {Name: "SynVarIndex", Doc: "SynVarIndex returns the index of given variable within the synapse,\naccording to *this path's* SynVarNames() list (using a map to lookup index),\nor -1 and error message if not found.", Args: []string{"varNm"}, Returns: []string{"int", "error"}}, {Name: "SynValues", Doc: "SynValues sets values of given variable name for each synapse,\nusing the natural ordering of the synapses (sender based for Axon),\ninto given float32 slice (only resized if not big enough).\nReturns error on invalid var name.", Args: []string{"vals", "varNm"}, Returns: []string{"error"}}, {Name: "SynValue1D", Doc: "SynValue1D returns value of given variable index\n(from SynVarIndex) on given SynIndex.\nReturns NaN on invalid index.\nThis is the core synapse var access method used by other methods,\nso it is the only one that needs to be updated for derived types.", Args: []string{"varIndex", "synIndex"}, Returns: []string{"float32"}}, {Name: "UpdateParams", Doc: "UpdateParams() updates parameter values for all Path parameters,\nbased on any other params that might have changed."}, {Name: "ApplyParams", Doc: "ApplyParams applies given parameter style Sheet to this pathway.\nCalls UpdateParams if anything set to ensure derived\nparameters are all updated.\nIf setMsg is true, then a message is printed to confirm each\nparameter that is set.\nIt always prints a message if a parameter fails to be set.\nreturns true if any params were set, and error if there were any errors.", Args: []string{"pars", "setMsg"}, Returns: []string{"bool", "error"}}, {Name: "SetParam", Doc: "SetParam sets parameter at given path to given value.\nreturns error if path not found or value cannot be set.", Args: []string{"path", "val"}, Returns: []string{"error"}}, {Name: "NonDefaultParams", Doc: "NonDefaultParams returns a listing of all parameters in the Pathway that\nare not at their default values -- useful for setting param styles etc.", Returns: []string{"string"}}, {Name: "AllParams", Doc: "AllParams returns a listing of all parameters in the Pathway.", Returns: []string{"string"}}, {Name: "WriteWeightsJSON", Doc: "WriteWeightsJSON writes the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nWe build in the indentation logic to make it much faster and\nmore efficient.", Args: []string{"w", "depth"}}, {Name: "ReadWeightsJSON", Doc: "ReadWeightsJSON reads the weights from this pathway\nfrom the receiver-side perspective in a JSON text format.\nThis is for a set of weights that were saved *for one path only*\nand is not used for the network-level ReadWeightsJSON,\nwhich reads into a separate structure -- see SetWeights method.", Args: []string{"r"}, Returns: []string{"error"}}, {Name: "SetWeights", Doc: "SetWeights sets the weights for this pathway from weights.Path\ndecoded values", Args: []string{"pw"}, Returns: []string{"error"}}}}) -var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nName is set automatically to:\nNothing algorithm-specific is implemented here.", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}}}) +var _ = types.AddType(&types.Type{Name: "github.com/emer/emergent/v2/emer.PathBase", IDName: "path-base", Doc: "PathBase defines the basic shared data for a pathway\nwhich connects two layers, using a specific Pattern\nof connectivity, and with its own set of parameters.\nThe same struct token is added to the Recv and Send\nlayer path lists,", Fields: []types.Field{{Name: "EmerPath", Doc: "EmerPath provides access to the emer.Path interface\nmethods for functions defined in the PathBase type.\nMust set this with a pointer to the actual instance\nwhen created, using InitPath function."}, {Name: "Name", Doc: "Name of the path, which can be automatically set to\nSendLayer().Name + \"To\" + RecvLayer().Name via\nSetStandardName method."}, {Name: "Class", Doc: "Class is for applying parameter styles across multiple paths\nthat all get the same parameters. This can be space separated\nwith multple classes."}, {Name: "Info", Doc: "Info contains descriptive information about the pathway.\nThis is displayed in a tooltip in the network view."}, {Name: "Notes", Doc: "can record notes about this pathway here."}, {Name: "Pattern", Doc: "Pattern specifies the pattern of connectivity\nfor interconnecting the sending and receiving layers."}, {Name: "Off", Doc: "Off inactivates this pathway, allowing for easy experimentation."}}})