package goevo
import (
"encoding/json"
"fmt"
"math"
"slices"
"strings"
"gonum.org/v1/gonum/mat"
)
// Activation is an enum representing the different activation functions that can be used in a neural network.
type Activation int
const (
Relu Activation = iota
Linear
Sigmoid
Tanh
Sin
Cos
Binary
Relum
Reln
Sawtooth
Abs
Softmax
)
// AllSingleActivations is a list of all possible activations that can be applied to a single value,
// i.e. they can be used in Activate()
var AllSingleActivations = []Activation{Relu, Linear, Sigmoid, Tanh, Sin, Cos, Binary, Reln, Relum, Sawtooth, Abs}
// AllLayerActivations is a list of all possible activations that can be applied to a vector,
// i.e. they can be used in ActivateVecor.
// This is a superset of [AllSingleActivations].
var AllVectorActivations = append([]Activation{Softmax}, AllSingleActivations...)
// String returns the string representation of the activation.
func (a Activation) String() string {
switch a {
case Relu:
return "relu"
case Linear:
return "linear"
case Sigmoid:
return "sigmoid"
case Tanh:
return "tanh"
case Sin:
return "sin"
case Cos:
return "cos"
case Binary:
return "binary"
case Reln:
return "reln"
case Relum:
return "relum"
case Sawtooth:
return "sawtooth"
case Abs:
return "abs"
case Softmax:
return "softmax"
}
panic("unknown activation")
}
// CanSingleApply returns if the activation can be applied to a single value,
// i.e. with [Activate]
func (a Activation) CanSingleApply() bool {
return slices.Contains(AllSingleActivations, a)
}
// Activate applies the activation function to the given value.
// Not all activation functions (for example softmax) can be applied with this function, you can check with [Activation.CanSingleApply]
func (a Activation) ActivateValue(x float64) float64 {
switch a {
case Relu:
if x < 0 {
return 0
}
return x
case Linear:
return x
case Sigmoid:
return 1 / (1 + math.Exp(-x))
case Tanh:
return math.Tanh(x)
case Sin:
return math.Sin(x)
case Cos:
return math.Cos(x)
case Binary:
if x > 0 {
return 1
} else {
return 0
}
case Relum:
if x < 0 {
return 0
} else if x > 1 {
return 1
}
return x
case Reln:
if x < 0 {
return 0
}
return math.Log(x + 1)
case Sawtooth:
xr := math.Round(x)
if xr > x {
xr--
}
return x - xr
case Abs:
return math.Abs(x)
case Softmax:
panic(fmt.Sprintf("the activation '%s' is not supported for activating a single node", a))
}
panic("unknown activation")
}
// ActivateVector applies the given activation function into the vector.
// For most activation functions, this falls back to element-wise [Activate].
func (a Activation) ActivateVector(x *mat.VecDense) {
switch a {
case Softmax:
total := 0.0
for i := range x.Len() {
total += math.Exp(x.AtVec(i))
}
if total == 0 {
for i := range x.Len() {
x.SetVec(i, 0)
}
} else {
for i := range x.Len() {
x.SetVec(i, math.Exp(x.AtVec(i))/total)
}
}
default:
for i := range x.Len() {
x.SetVec(i, a.ActivateValue(x.AtVec(i)))
}
}
}
// Implementations
var _ json.Marshaler = Relu
var dummy = Relu
var _ json.Unmarshaler = &dummy
// UnmarshalJSON implements [json.Unmarshaler].
func (a *Activation) UnmarshalJSON(bs []byte) error {
s := strings.TrimPrefix(strings.TrimSuffix(string(bs), "\""), "\"")
switch s {
case Relu.String():
*a = Relu
case Linear.String():
*a = Linear
case Sigmoid.String():
*a = Sigmoid
case Tanh.String():
*a = Tanh
case Sin.String():
*a = Sin
case Cos.String():
*a = Cos
case Binary.String():
*a = Binary
case Reln.String():
*a = Reln
case Relum.String():
*a = Relum
case Sawtooth.String():
*a = Sawtooth
case Abs.String():
*a = Abs
case Softmax.String():
*a = Softmax
default:
return fmt.Errorf("invalid activation: '%s'", s)
}
return nil
}
// MarshalJSON implements [json.Marshaler].
func (a Activation) MarshalJSON() ([]byte, error) {
return []byte("\"" + a.String() + "\""), nil
}
package goevo
// Agent is a container for a genotype and its fitness.
// The genotype can be of any type.
type Agent[T any] struct {
Genotype T
Fitness float64
}
// NewAgent creates a new agent with the given genotype.
func NewAgent[T any](gt T) *Agent[T] {
return &Agent[T]{Genotype: gt}
}
package goevo
import (
"sync/atomic"
)
// Counter is a simple counter that can be used to generate unique IDs.
type Counter struct {
n int64
}
// NewCounter creates a new counter, starting at 0.
func NewCounter() *Counter {
return &Counter{0}
}
// Next returns the next value of the counter
func (c *Counter) Next() int {
return int(atomic.AddInt64(&c.n, 1))
}
package goevo
import "math/rand/v2"
type Generator[T any] interface {
Next() T
}
type generatorNormal[T floatType] struct {
mean float64
std float64
}
// NewGeneratorNormal creates a new [Generator] that generates floating point numbers
// within a normal distribution.
func NewGeneratorNormal[T floatType](mean, std T) Generator[T] {
if std < 0 {
panic("cannot have std < 0")
}
return &generatorNormal[T]{
mean: float64(mean),
std: float64(std),
}
}
func (s *generatorNormal[T]) Next() T {
v := rand.NormFloat64()*s.std + s.mean
return T(v)
}
type generatorChoice[T any] struct {
choices []T
}
// NewGeneratorChoices creates a new [Generator] that chooses values from
// the given choices slice.
func NewGeneratorChoices[T any](choices []T) Generator[T] {
if len(choices) == 0 {
panic("cannot have no choices")
}
return &generatorChoice[T]{
choices: choices,
}
}
func (c *generatorChoice[T]) Next() T {
return c.choices[rand.IntN(len(c.choices))]
}
package goevo
// Cloneable is an interface that must be implemented by any object that wants to be cloned.
// The clone method must return a new object that is a deep copy of the original object.
// This new method is typed as any. To clone while including the type, use [Clone], which is generic so will perform the cast.
type Cloneable interface {
Clone() any
}
// Clone clones an object that implements the [Cloneable] interface.
// It also casts the child object to the type of the parent object.
func Clone[T Cloneable](obj T) T {
return obj.Clone().(T)
}
package goevo
// Population is an interface for a population with genotypes with type T.
// It stores its genotypes wrapped in the [Agent] struct, to keep track of fitness.
// The population may also store a reference to a [Reproduction] and a [Selection]
// to be used in the [NextGeneration] method.
type Population[T any] interface {
// NextGeneration returns the population resulting from agents selected using this population's selection strategy
// reproducing using this population's reproduction strategy.
NextGeneration() Population[T]
// All returns every [Agent] in the population.
// This may have no particular order.
All() []*Agent[T]
}
func NextGeneration[T any, U Population[T]](pop U) U {
return pop.NextGeneration().(U)
}
package goevo
// Selection is a strategy for selecting an [Agent] from a slice.
// It acts on agents of type T.
type Selection[T any] interface {
// SetAgents caches the [Agent]s which this selection will use until it is called again.
// This is called once per generation. You may wish to perform slow operations here such as sorting by fitness.
SetAgents(agents []*Agent[T])
// Select returns an [Agent] selected from the cached pool set by [SelectionStrategy.SetAgents].
Select() *Agent[T]
}
// SelectN selects n [Agent]s from the given selection strategy, returning them in a slice.
func SelectN[T any](selection Selection[T], n int) []*Agent[T] {
agents := make([]*Agent[T], n)
for i := range agents {
agents[i] = selection.Select()
}
return agents
}
// SelectNGenotypes selects n genotypes from the given selection strategy, returning them in a slice.
func SelectNGenotypes[T any](selection Selection[T], n int) []T {
gts := make([]T, n)
for i := range gts {
gts[i] = selection.Select().Genotype
}
return gts
}
package goevo
// ================================== Utilities ==================================
// Generators
var _ Generator[float64] = NewGeneratorNormal(0.0, 0.0)
var _ Generator[rune] = NewGeneratorChoices([]rune("abcdefg"))
// Reproductions
var _ Reproduction[any] = &twoPhaseReproduction[any]{}
// ================================== Genotypes ==================================
// Array genotypes
var _ Cloneable = &ArrayGenotype[int]{}
var _ Crossover[*ArrayGenotype[any]] = NewArrayCrossoverUniform[any]()
var _ Crossover[*ArrayGenotype[any]] = NewArrayCrossoverAsexual[any]()
var _ Crossover[*ArrayGenotype[any]] = NewArrayCrossoverKPoint[any](0)
var _ Mutation[*ArrayGenotype[float64]] = NewArrayMutationGeneratorAdd(NewGeneratorNormal(0.0, 0.0), 0.0)
var _ Mutation[*ArrayGenotype[bool]] = NewArrayMutationGeneratorReplace(NewGeneratorChoices([]bool{true, false}), 0.0)
var _ Mutation[*ArrayGenotype[bool]] = NewArrayMutationGenerator(NewGeneratorChoices([]bool{true, false}), func(old, new bool) bool { return old && new }, 0.0)
// Dense genotypes
var _ Cloneable = &DenseGenotype{}
var _ Forwarder = &DenseGenotype{}
var _ Crossover[*DenseGenotype] = &denseCrossoverUniform{}
var _ Mutation[*DenseGenotype] = &denseMutationUniform{}
// NEAT genotypes + phenotypes
var _ Cloneable = &NeatGenotype{}
var _ Buildable = &NeatGenotype{}
var _ Forwarder = &NeatPhenotype{}
var _ Crossover[*NeatGenotype] = &neatCrossoverSimple{}
var _ Crossover[*NeatGenotype] = &neatCrossoverAsexual{}
var _ Mutation[*NeatGenotype] = &neatMutationStd{}
// ================================== Selections ==================================
// Elite selection
var _ Selection[any] = &eliteSelection[any]{}
// Tournament selection
var _ Selection[any] = NewTournamentSelection[any](3)
// ================================== Populations ==================================
// Simple population
var _ Population[any] = &SimplePopulation[any]{}
// Speiated population
var _ Population[any] = &SpeciatedPopulation[any]{}
// Hill climber population
var _ Population[any] = &HillClimberPopulation[any]{}
package goevo
import (
"sort"
"math/rand/v2"
)
// ArrayGenotype is a genotype that is a slice of values.
type ArrayGenotype[T any] struct {
values []T
}
func NewArrayGenotype[T any](length int, gen Generator[T]) *ArrayGenotype[T] {
if length <= 0 {
panic("must have length > 1")
}
if gen == nil {
panic("must have non-nil generator")
}
vals := make([]T, length)
for i := range vals {
vals[i] = gen.Next()
}
return &ArrayGenotype[T]{
values: vals,
}
}
func (g *ArrayGenotype[T]) Len() int {
return len(g.values)
}
func (g *ArrayGenotype[T]) At(i int) T {
return g.values[i]
}
func (g *ArrayGenotype[T]) Set(i int, v T) {
g.values[i] = v
}
// Clone returns a new genotype that is a copy of this genotype.
func (g *ArrayGenotype[T]) Clone() any {
clone := make([]T, len(g.values))
copy(clone, g.values)
return &ArrayGenotype[T]{values: clone}
}
// arrayCrossoverUniform is a crossover strategy that selects each gene from one of the parents with equal probability.
// The location of a gene has no effect on the probability of it being selected from either parent.
// It requires two parents.
type arrayCrossoverUniform[T any] struct{}
func NewArrayCrossoverUniform[T any]() Crossover[*ArrayGenotype[T]] {
return &arrayCrossoverUniform[T]{}
}
// Crossover implements CrossoverStrategy.
func (p *arrayCrossoverUniform[T]) Crossover(gs []*ArrayGenotype[T]) *ArrayGenotype[T] {
if len(gs) != 2 {
panic("PointCrossoverStrategy requires exactly 2 parents")
}
pa, pb := gs[0], gs[1]
if len(pa.values) != len(pb.values) {
panic("genotypes must have the same length for PointCrossoverStrategy")
}
child := make([]T, len(pa.values))
for i := range child {
if rand.Float64() < 0.5 {
child[i] = pa.values[i]
} else {
child[i] = pb.values[i]
}
}
return &ArrayGenotype[T]{values: child}
}
// NumParents implements CrossoverStrategy.
func (p *arrayCrossoverUniform[T]) NumParents() int {
return 2
}
// arrayCrossoverAsexual is a crossover strategy that clones the parent.
// It only requires one parent.
type arrayCrossoverAsexual[T any] struct{}
func NewArrayCrossoverAsexual[T any]() Crossover[*ArrayGenotype[T]] {
return &arrayCrossoverAsexual[T]{}
}
// Crossover implements CrossoverStrategy.
func (p *arrayCrossoverAsexual[T]) Crossover(gs []*ArrayGenotype[T]) *ArrayGenotype[T] {
if len(gs) != 1 {
panic("AsexualCrossoverStrategy requires exactly 1 parent")
}
return Clone(gs[0])
}
// NumParents implements CrossoverStrategy.
func (p *arrayCrossoverAsexual[T]) NumParents() int {
return 1
}
// arrayCrossoverKPoint is a crossover strategy that selects K locations in the genome to switch parents.
// It requires two parents.
type arrayCrossoverKPoint[T any] struct {
k int
}
func NewArrayCrossoverKPoint[T any](k int) Crossover[*ArrayGenotype[T]] {
if k < 0 {
panic("k must be > 0")
}
return &arrayCrossoverKPoint[T]{
k: k,
}
}
func (p *arrayCrossoverKPoint[T]) Crossover(gs []*ArrayGenotype[T]) *ArrayGenotype[T] {
if len(gs) != 2 {
panic("KPointCrossoverStrategy requires exactly 2 parents")
}
pa, pb := gs[0], gs[1]
if len(pa.values) != len(pb.values) {
panic("genotypes must have the same length for KPointCrossoverStrategy")
}
crossoverPoints := make([]int, p.k)
for i := 0; i < p.k; i++ {
crossoverPoints[i] = rand.N(len(pa.values))
}
sort.Ints(crossoverPoints)
child := make([]T, len(pa.values))
fromParentA := rand.Float64() < 0.5
currentCrossoverPoint := 0
for i := range child {
if currentCrossoverPoint < len(crossoverPoints) && crossoverPoints[currentCrossoverPoint] == i {
fromParentA = !fromParentA
currentCrossoverPoint++
}
if fromParentA {
child[i] = pa.values[i]
} else {
child[i] = pb.values[i]
}
}
return &ArrayGenotype[T]{values: child}
}
func (p *arrayCrossoverKPoint[T]) NumParents() int {
return 2
}
type arrayMutationGenerator[T any] struct {
combine func(old, new T) T
gen Generator[T]
chance float64
}
func NewArrayMutationGeneratorAdd[T numberType](gen Generator[T], chance float64) Mutation[*ArrayGenotype[T]] {
combine := func(old, new T) T {
return old + new
}
return NewArrayMutationGenerator(gen, combine, chance)
}
func NewArrayMutationGeneratorReplace[T any](gen Generator[T], chance float64) Mutation[*ArrayGenotype[T]] {
combine := func(_, new T) T {
return new
}
return NewArrayMutationGenerator(gen, combine, chance)
}
func NewArrayMutationGenerator[T any](gen Generator[T], combine func(old, new T) T, chance float64) Mutation[*ArrayGenotype[T]] {
if gen == nil {
panic("cannot have nil generator")
}
if combine == nil {
panic("cannot have nil combine")
}
if chance < 0 {
panic("cannot have chance < 0")
}
return &arrayMutationGenerator[T]{
combine: combine,
gen: gen,
chance: chance,
}
}
// Mutate implements Mutation.
func (m *arrayMutationGenerator[T]) Mutate(g *ArrayGenotype[T]) {
for i := range g.Len() {
g.values[i] = m.combine(g.values[i], m.gen.Next())
}
}
package goevo
import "gonum.org/v1/gonum/mat"
// DenseGenotype is a type of genotype/phenotype that is a dense feed-forward neural network.
type DenseGenotype struct {
weights []*mat.Dense
biases []*mat.VecDense
buffers []*mat.VecDense
inputActivation Activation
hiddenActivation Activation
outputActivation Activation
}
// NewDenseGenotype creates a new dense genotype of a shape with activations,
// pulling weights from the generator
func NewDenseGenotype(shape []int, input, hidden, output Activation, weights, biases Generator[float64]) *DenseGenotype {
if len(shape) < 2 {
panic("cannot have fewer than two layers")
}
numWeights := len(shape) - 1
g := &DenseGenotype{
weights: make([]*mat.Dense, numWeights),
biases: make([]*mat.VecDense, numWeights+1),
buffers: make([]*mat.VecDense, numWeights+1),
inputActivation: input,
hiddenActivation: hidden,
outputActivation: output,
}
for wi := range numWeights {
r, c := shape[wi+1], shape[wi]
data := make([]float64, r*c)
for i := range data {
data[i] = weights.Next()
}
g.weights[wi] = mat.NewDense(r, c, data)
}
for bi := range numWeights + 1 {
r := shape[bi]
data := make([]float64, r)
for i := range data {
data[i] = biases.Next()
}
g.biases[bi] = mat.NewVecDense(r, data)
g.buffers[bi] = mat.NewVecDense(r, nil)
}
return g
}
// Forward implements Forwarder.
func (d *DenseGenotype) Forward(input []float64) []float64 {
if len(input) != d.buffers[0].Len() {
panic("incorrect input length")
}
// Copy input in
copy(d.buffers[0].RawVector().Data, input)
// Perform forward pass
for bi := range len(d.buffers) {
// Add bias
d.buffers[bi].AddVec(d.buffers[bi], d.biases[bi])
// Activate
var ac Activation
if bi == 0 {
ac = d.inputActivation
} else if bi == len(d.buffers)-1 {
ac = d.outputActivation
} else {
ac = d.hiddenActivation
}
ac.ActivateVector(d.buffers[bi])
// Multiply by weights if not last
if bi < len(d.buffers)-1 {
d.buffers[bi+1].MulVec(d.weights[bi], d.buffers[bi])
}
}
// Copy result out
lastBuf := d.buffers[len(d.buffers)-1]
result := make([]float64, lastBuf.Len())
copy(result, lastBuf.RawVector().Data)
return result
}
// Clone implements Cloneable.
func (d *DenseGenotype) Clone() any {
gn := &DenseGenotype{
weights: make([]*mat.Dense, len(d.weights)),
biases: make([]*mat.VecDense, len(d.biases)),
buffers: make([]*mat.VecDense, len(d.buffers)),
inputActivation: d.inputActivation,
hiddenActivation: d.hiddenActivation,
outputActivation: d.outputActivation,
}
for bi := range d.buffers {
gn.biases[bi] = mat.VecDenseCopyOf(d.biases[bi])
gn.buffers[bi] = mat.VecDenseCopyOf(d.buffers[bi])
}
for wi := range d.weights {
gn.weights[wi] = mat.DenseCopyOf(d.weights[wi])
}
return gn
}
// denseMutationUniform is a type of mutation for dense genotypes.
// For each weight and bias, with a certain chance, it mutates the value within a normal distribution.
// It then caps all values at the maximum value.
type denseMutationUniform struct {
genWeights Generator[float64]
combineWeights func(old, new float64) float64
genBiases Generator[float64]
combineBiases func(old, new float64) float64
weightsChance float64
biasesChance float64
}
func NewDenseMutationUniform(
genWeights Generator[float64],
combineWeights func(old, new float64) float64,
weightsChance float64,
genBiases Generator[float64],
combineBiases func(old, new float64) float64,
biasesChance float64,
) Mutation[*DenseGenotype] {
if genWeights == nil || genBiases == nil {
panic("cannot have nil generator")
}
if combineWeights == nil || combineBiases == nil {
panic("cannot have nil combine func")
}
if weightsChance < 0 || weightsChance > 1 || biasesChance < 0 || biasesChance > 1 {
panic("cannot have chance out of range 0-1")
}
return &denseMutationUniform{
genWeights: genWeights,
genBiases: genBiases,
combineWeights: combineWeights,
combineBiases: combineBiases,
weightsChance: weightsChance,
biasesChance: biasesChance,
}
}
// Mutate implements Mutation.
func (m *denseMutationUniform) Mutate(g *DenseGenotype) {
for _, w := range g.weights {
rs, cs := w.Dims()
for r := range rs {
for c := range cs {
v := w.At(r, c)
v = m.combineWeights(v, m.genWeights.Next())
w.Set(r, c, v)
}
}
}
for _, b := range g.biases {
rs := b.Len()
for r := range rs {
v := b.AtVec(r)
v = m.combineBiases(v, m.genBiases.Next())
b.SetVec(r, v)
}
}
}
// denseCrossoverUniform is a type of crossover for dense genotypes.
// For each weight and bias, it chooses randomly from one of its parents.
// The number of parents is a parameter.
type denseCrossoverUniform struct {
parents int
}
func NewDenseCrossoverUniform(parents int) Crossover[*DenseGenotype] {
if parents <= 0 {
panic("must have at least one parent")
}
return &denseCrossoverUniform{
parents: parents,
}
}
// Crossover implements Crossover.
func (c *denseCrossoverUniform) Crossover(parents []*DenseGenotype) *DenseGenotype {
if len(parents) != c.parents {
panic("incorrect number of parents")
}
if c.parents <= 0 {
panic("must have at least one parent")
}
g := Clone(parents[0])
for _, p := range parents {
if len(p.weights) != len(g.weights) {
panic("inconsistent parent num layers")
}
}
for wi, w := range g.weights {
wr, wc := w.Dims()
pws := make([]mutMat, len(parents))
for pi, p := range parents {
pr, pc := p.weights[wi].Dims()
if wr != pr || wc != pc {
panic("incorrect weight sizes")
}
pws[pi] = p.weights[wi]
}
randomChoiceMatrix(w, pws)
}
for bi, b := range g.biases {
br := b.Len()
pbs := make([]mutMat, len(parents))
for pi, p := range parents {
pr := p.biases[bi].Len()
if br != pr {
panic("incorrect bias sizes")
}
pbs[pi] = &mutVecWrapper{p.biases[bi]}
}
randomChoiceMatrix(&mutVecWrapper{b}, pbs)
}
return g
}
// NumParents implements Crossover.
func (c *denseCrossoverUniform) NumParents() int {
return c.parents
}
package goevo
import "math"
type eliteSelection[T any] struct {
lastBest *Agent[T]
}
func NewEliteSelection[T any]() Selection[T] {
return &eliteSelection[T]{}
}
func (s *eliteSelection[T]) SetAgents(agents []*Agent[T]) {
bestFitness := math.Inf(-1)
var bestAgent *Agent[T]
for _, agent := range agents {
if agent.Fitness > bestFitness {
bestFitness = agent.Fitness
bestAgent = agent
}
}
s.lastBest = bestAgent
}
func (s *eliteSelection[T]) Select() *Agent[T] {
if s.lastBest == nil {
panic("must call SetAgents before selecting (also ensure at least one agent has fitness greater than -inf)")
}
return s.lastBest
}
package goevo
type HillClimberPopulation[T any] struct {
a *Agent[T]
b *Agent[T]
selection Selection[T]
reproduction Reproduction[T]
}
func NewHillClimberPopulation[T any](initialA, initialB T, selection Selection[T], reproduction Reproduction[T]) *HillClimberPopulation[T] {
if selection == nil {
panic("cannot have nil selection")
}
if reproduction == nil {
panic("cannot have nil reproduction")
}
return &HillClimberPopulation[T]{
a: NewAgent(initialA),
b: NewAgent(initialB),
selection: selection,
reproduction: reproduction,
}
}
func (p *HillClimberPopulation[T]) NextGeneration() Population[T] {
if p.reproduction.NumParents() != 1 {
panic("Hillclimber only supports reproduction with 1 parent")
}
p.selection.SetAgents(p.All())
parent := p.selection.Select()
a := NewAgent(parent.Genotype)
b := NewAgent(p.reproduction.Reproduce([]T{parent.Genotype}))
return &HillClimberPopulation[T]{a: a, b: b, selection: p.selection, reproduction: p.reproduction}
}
func (p *HillClimberPopulation[T]) All() []*Agent[T] {
return []*Agent[T]{p.a, p.b}
}
func (p *HillClimberPopulation[T]) Both() (*Agent[T], *Agent[T]) {
return p.a, p.b
}
package goevo
import (
"encoding/json"
"fmt"
"image"
"maps"
"math"
"math/rand"
"slices"
"github.com/goccy/go-graphviz"
)
// NeatNeuronID is the unique identifier for a neuron in a NEATGenotype
type NeatNeuronID int
// NeatSynapseID is the unique identifier for a synapse in a NEATGenotype
type NeatSynapseID int
// NeatSynapseEP is the endpoints of a synapse in a NEATGenotype
type NeatSynapseEP struct {
From NeatNeuronID
To NeatNeuronID
}
// NeatGenotype is a genotype for a neural network using the NEAT algorithm.
// It is conceptually similar to the DNA of an organism: it encodes how to build a neural network, but is not the neural network itself.
// This means if you want to actually run the neural network, you need to use the [NeatGenotype.Build] method to create a [NeatPhenotype].
type NeatGenotype struct {
maxSynapseValue float64
numInputs int
numOutputs int
neuronOrder []NeatNeuronID
inverseNeuronOrder map[NeatNeuronID]int
activations map[NeatNeuronID]Activation
weights map[NeatSynapseID]float64
synapseEndpointLookup map[NeatSynapseID]NeatSynapseEP
endpointSynapseLookup map[NeatSynapseEP]NeatSynapseID
forwardSynapses []NeatSynapseID // With these three we just track which synapses are of what type
backwardSynapses []NeatSynapseID // A synapse can NEVER change type
selfSynapses []NeatSynapseID
}
// NewNeatGenotype creates a new NEATGenotype with the given number of inputs and outputs, and the given output activation function.
// All output neurons will have the same activation function, and all input neurons will have the linear activation function.
// The genotype will have no synapses.
func NewNeatGenotype(counter *Counter, inputs, outputs int, outputActivation Activation) *NeatGenotype {
if inputs <= 0 || outputs <= 0 {
panic("must have at least one input and one output")
}
neuronOrder := make([]NeatNeuronID, 0)
inverseNeuronOrder := make(map[NeatNeuronID]int)
activations := make(map[NeatNeuronID]Activation)
weights := make(map[NeatSynapseID]float64)
synapseEndpointLookup := make(map[NeatSynapseID]NeatSynapseEP)
endpointSynapseLookup := make(map[NeatSynapseEP]NeatSynapseID)
forwardSyanpses := make([]NeatSynapseID, 0)
backwardSyanpses := make([]NeatSynapseID, 0)
selfSyanpses := make([]NeatSynapseID, 0)
for i := 0; i < inputs; i++ {
id := NeatNeuronID(counter.Next())
neuronOrder = append(neuronOrder, id)
inverseNeuronOrder[id] = len(neuronOrder) - 1
activations[id] = Linear
}
for i := 0; i < outputs; i++ {
id := NeatNeuronID(counter.Next())
neuronOrder = append(neuronOrder, id)
inverseNeuronOrder[id] = len(neuronOrder) - 1
activations[id] = outputActivation
}
return &NeatGenotype{
maxSynapseValue: 3,
numInputs: inputs,
numOutputs: outputs,
neuronOrder: neuronOrder,
inverseNeuronOrder: inverseNeuronOrder,
activations: activations,
weights: weights,
synapseEndpointLookup: synapseEndpointLookup,
endpointSynapseLookup: endpointSynapseLookup,
forwardSynapses: forwardSyanpses,
backwardSynapses: backwardSyanpses,
selfSynapses: selfSyanpses,
}
}
// Clone returns a new genotype that is an exact copy of this genotype.
func (g *NeatGenotype) Clone() any {
gc := &NeatGenotype{
g.maxSynapseValue,
g.numInputs,
g.numOutputs,
slices.Clone(g.neuronOrder),
maps.Clone(g.inverseNeuronOrder),
maps.Clone(g.activations),
maps.Clone(g.weights),
maps.Clone(g.synapseEndpointLookup),
maps.Clone(g.endpointSynapseLookup),
slices.Clone(g.forwardSynapses),
slices.Clone(g.backwardSynapses),
slices.Clone(g.selfSynapses),
}
return gc
}
func (g *NeatGenotype) isInputOrder(order int) bool {
return order < g.numInputs
}
func (g *NeatGenotype) isOutputOrder(order int) bool {
return order >= len(g.neuronOrder)-g.numOutputs
}
// NumInputNeurons returns the number of input neurons in the genotype.
func (g *NeatGenotype) NumInputNeurons() int {
return g.numInputs
}
// NumOutputNeurons returns the number of output neurons in the genotype.
func (g *NeatGenotype) NumOutputNeurons() int {
return g.numOutputs
}
// NumHiddenNeurons returns the number of hidden neurons in the genotype.
func (g *NeatGenotype) NumHiddenNeurons() int {
return len(g.activations) - g.numInputs - g.numOutputs
}
// NumNeurons returns the total number of neurons in the genotype.
func (g *NeatGenotype) NumNeurons() int {
return len(g.activations)
}
// NumSynapses returns the total number of synapses in the genotype.
func (g *NeatGenotype) NumSynapses() int {
return len(g.weights)
}
// Validate runs as many checks as possible to check the genotype is valid.
// It is really only designed to be used as part of a test suite to catch errors with the package.
// This should never throw an error, but if it does either there is a bug in the package, or the user has somehow invalidated the genotype.
func (g *NeatGenotype) Validate() error {
// Check there are enough inputs and outputs
if g.numInputs <= 0 {
return fmt.Errorf("not enough inputs: %v", g.numInputs)
}
if g.numOutputs <= 0 {
return fmt.Errorf("not enough outputs: %v", g.numOutputs)
}
// Check there are at least enough input and output nodes
if len(g.neuronOrder) < g.numInputs+g.numOutputs {
return fmt.Errorf("than number of inputs (%v) and outputs (%v) is not possible with the number of nodes loaded (%v)", g.numInputs, g.numOutputs, len(g.neuronOrder))
}
// Check max synapse value is valid
if g.maxSynapseValue <= 0 {
return fmt.Errorf("invalid maximum synapse value: %v", g.maxSynapseValue)
}
// Ensure that all node indexes have same length
if len(g.neuronOrder) != len(g.inverseNeuronOrder) {
return fmt.Errorf("inverse neuron order has length %v but neuron order has length %v", len(g.inverseNeuronOrder), len(g.neuronOrder))
}
if len(g.neuronOrder) != len(g.activations) {
return fmt.Errorf("activations has length %v but neuron order has length %v", len(g.activations), len(g.neuronOrder))
}
// Ensure that all weight indexes have same length
if len(g.weights) != len(g.synapseEndpointLookup) {
return fmt.Errorf("synapse endpoint lookup has length %v but weights has length %v", len(g.synapseEndpointLookup), len(g.weights))
}
if len(g.weights) != len(g.endpointSynapseLookup) {
return fmt.Errorf("endpoint synapse lookup has length %v but weights has length %v", len(g.endpointSynapseLookup), len(g.weights))
}
if len(g.weights) != len(g.forwardSynapses)+len(g.backwardSynapses)+len(g.selfSynapses) {
return fmt.Errorf("forward, backward, and self synapses have combined length %v but weights has length %v", len(g.forwardSynapses)+len(g.backwardSynapses)+len(g.selfSynapses), len(g.weights))
}
// Ensure that there are no ids that are the same between the neurons and the synapses
foundIDs := make(map[int]bool)
for id := range g.activations {
if _, ok := foundIDs[int(id)]; ok {
return fmt.Errorf("repeated id: %v", id)
}
foundIDs[int(id)] = true
}
for id := range g.weights {
if _, ok := foundIDs[int(id)]; ok {
return fmt.Errorf("repeated id: %v", id)
}
foundIDs[int(id)] = true
}
// Check that synapseEPLookup and EPSynapseLookup are synced.
// Only need to do this one way because we have already checked that they have same length
for id, ep := range g.synapseEndpointLookup {
if id2, ok := g.endpointSynapseLookup[ep]; !ok {
return fmt.Errorf("missing id that exists in synapse endpoint lookup but not in endpoint synapse lookup: %v", id)
} else if id != id2 {
return fmt.Errorf("synapse endpoint lookup and endpoint synapse lookup are not symmetrical with id: %v (there and back becomes %v)", id, id2)
}
}
// Check that weights and synapseEPLookup are synced.
// Again, they already have same length.
for id := range g.synapseEndpointLookup {
if _, ok := g.weights[id]; !ok {
return fmt.Errorf("missing id that exists in synapse endpoint lookup but not in weights: %v", id)
}
}
// Check that neuron order and inverse neuron order are synced
for i := range g.neuronOrder {
if g.inverseNeuronOrder[g.neuronOrder[i]] != i {
return fmt.Errorf("order %v is not symmetrical in neuron order and inverse neuron order", i)
}
}
// Check that all classes of synapse are correctly categorised
for _, sid := range g.forwardSynapses {
ep := g.synapseEndpointLookup[sid]
of, ot := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
if ot <= of {
return fmt.Errorf("synapse with id %v is incorrectly categorised as forward", sid)
}
}
for _, sid := range g.backwardSynapses {
ep := g.synapseEndpointLookup[sid]
of, ot := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
if ot >= of {
return fmt.Errorf("synapse with id %v is incorrectly categorised as backward", sid)
}
}
for _, sid := range g.selfSynapses {
ep := g.synapseEndpointLookup[sid]
of, ot := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
if ot != of {
return fmt.Errorf("synapse with id %v is incorrectly categorised as self", sid)
}
}
return nil
}
// AddRandomNeuron adds a new neuron to the genotype on a random forward synapse.
// It will return false if there are no forward synapses to add to.
// The new neuron will have a random activation function from the given list of activations.
func (g *NeatGenotype) AddRandomNeuron(counter *Counter, activations ...Activation) bool {
if len(g.forwardSynapses) == 0 {
return false
}
// We only ever want to add nodes on forward synapses
sid := g.forwardSynapses[rand.Intn(len(g.forwardSynapses))]
ep := g.synapseEndpointLookup[sid]
newSid := NeatSynapseID(counter.Next())
newNid := NeatNeuronID(counter.Next())
epa := NeatSynapseEP{ep.From, newNid}
epb := NeatSynapseEP{newNid, ep.To}
// Swap the old connection for a, which will also retain the original weight
delete(g.endpointSynapseLookup, ep)
g.endpointSynapseLookup[epa] = sid
g.synapseEndpointLookup[sid] = epa
// Don't need to modify weights because weights[sid] is already there
// Create a new connection for b, with weight of 1 to minimise affect on behaviour
g.endpointSynapseLookup[epb] = newSid
g.synapseEndpointLookup[newSid] = epb
g.weights[newSid] = 1
// Find the two original endpoints orders, and also which was first and which was second
ao, bo := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
// Add b to the index of its synapse class
if ao < bo {
g.forwardSynapses = append(g.forwardSynapses, newSid)
} else if ao > bo {
g.backwardSynapses = append(g.backwardSynapses, newSid)
} else {
g.selfSynapses = append(g.selfSynapses, newSid)
}
// Create a new neuron
firstO, secondO := ao, bo
if bo < ao {
firstO, secondO = bo, ao
}
// Check that the synapse is valid. If it is not, somthing has gone wrong
if g.isInputOrder(ao) && g.isInputOrder(bo) {
panic("trying to insert a node on a connection between two inputs, either a bug has occured or you have created an invalid genotype")
} else if g.isOutputOrder(ao) && g.isOutputOrder(bo) {
panic("trying to insert a node on a connection between two outputs, either a bug has occured or you have created an invalid genotype")
} else if g.isInputOrder(bo) {
panic("trying to insert a node on a connection that ends in an input, either a bug has occured or you have created an invalid genotype")
}
// Find the new order of the neuron
no := int(math.Round((float64(ao) + float64(bo)) / 2.0)) // Find the order that is halfway between them
startPosition := max(g.numInputs, firstO+1) // First valid position INCLUSIVE
endPosition := min(len(g.neuronOrder)-g.numOutputs, secondO) // Last valid position INCLUSIVE
if startPosition > endPosition {
panic("failed to find valid placement of neuron, this should not have happened")
}
if no < startPosition {
no = startPosition
} else if no > endPosition {
no = endPosition
}
// Insert the neuron at that order
newNeuronOrder := make([]NeatNeuronID, len(g.neuronOrder)+1)
copy(newNeuronOrder, g.neuronOrder[:no])
newNeuronOrder[no] = newNid
copy(newNeuronOrder[no+1:], g.neuronOrder[no:])
g.neuronOrder = newNeuronOrder
for i := no; i < len(g.neuronOrder); i++ {
g.inverseNeuronOrder[g.neuronOrder[i]] = i
}
// Add the activation
g.activations[newNid] = activations[rand.Intn(len(activations))]
return true
}
// AddRandomSynapse adds a new synapse to the genotype between two nodes.
// It will return false if it failed to find a place to put the synapse after 10 tries.
// The synapse will have a random weight from a normal distribution with the given standard deviation.
// If recurrent is true, the synapse will be recurrent, otherwise it will not.
func (g *NeatGenotype) AddRandomSynapse(counter *Counter, weightStd float64, recurrent bool) bool {
// Almost always find a new connection after 10 tries
for i := 0; i < 10; i++ {
ao := rand.Intn(len(g.neuronOrder))
bo := rand.Intn(len(g.neuronOrder))
if ao == bo && !recurrent {
continue // No self connections if non recurrent
}
if (!recurrent && ao > bo) || (recurrent && bo > ao) {
ao, bo = bo, ao // Ensure that this connection is of correct type
}
if (g.isInputOrder(bo)) || (g.isOutputOrder(ao) && g.isOutputOrder(bo)) {
continue // Trying to connect either anything-input or output-output
}
aid, bid := g.neuronOrder[ao], g.neuronOrder[bo]
ep := NeatSynapseEP{aid, bid}
if _, ok := g.endpointSynapseLookup[ep]; ok {
continue // This connection already exists, try to find another
}
sid := NeatSynapseID(counter.Next())
g.endpointSynapseLookup[ep] = sid
g.synapseEndpointLookup[sid] = ep
g.weights[sid] = clamp(rand.NormFloat64()*weightStd, -g.maxSynapseValue, g.maxSynapseValue)
if !recurrent {
g.forwardSynapses = append(g.forwardSynapses, sid)
} else if ep.From == ep.To {
g.selfSynapses = append(g.selfSynapses, sid)
} else {
g.backwardSynapses = append(g.backwardSynapses, sid)
}
return true
}
return false
}
// MutateRandomSynapse will change the weight of a random synapse by a random amount from a normal distribution with the given standard deviation.
// It will return false if there are no synapses to mutate.
func (g *NeatGenotype) MutateRandomSynapse(std float64) bool {
if len(g.weights) == 0 {
return false
}
sid := randomMapKey(g.weights)
g.weights[sid] = clamp(g.weights[sid]+rand.NormFloat64()*std, -g.maxSynapseValue, g.maxSynapseValue)
return true
}
// RemoveRandomSynapse will remove a random synapse from the genotype.
// It will return false if there are no synapses to remove.
func (g *NeatGenotype) RemoveRandomSynapse() bool {
if len(g.weights) == 0 {
return false
}
sid := randomMapKey(g.weights)
ep := g.synapseEndpointLookup[sid]
fo, to := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
if fo < to {
idx := slices.Index(g.forwardSynapses, sid)
g.forwardSynapses[idx] = g.forwardSynapses[len(g.forwardSynapses)-1]
g.forwardSynapses = g.forwardSynapses[:len(g.forwardSynapses)-1]
} else if fo > to {
idx := slices.Index(g.backwardSynapses, sid)
g.backwardSynapses[idx] = g.backwardSynapses[len(g.backwardSynapses)-1]
g.backwardSynapses = g.backwardSynapses[:len(g.backwardSynapses)-1]
} else {
idx := slices.Index(g.selfSynapses, sid)
g.selfSynapses[idx] = g.selfSynapses[len(g.selfSynapses)-1]
g.selfSynapses = g.selfSynapses[:len(g.selfSynapses)-1]
}
delete(g.weights, sid)
delete(g.synapseEndpointLookup, sid)
delete(g.endpointSynapseLookup, ep)
return true
}
// ResetRandomSynapse will reset the weight of a random synapse to 0.
// It will return false if there are no synapses to reset.
func (g *NeatGenotype) ResetRandomSynapse() bool {
if len(g.weights) == 0 {
return false
}
sid := randomMapKey(g.weights)
g.weights[sid] = 0
return true
}
// MutateRandomActivation will change the activation function of a random hidden neuron to
// a random activation function from the given list of activations.
// It will return false if there are no hidden neurons to mutate.
func (g *NeatGenotype) MutateRandomActivation(activations ...Activation) bool {
numHidden := len(g.neuronOrder) - g.numInputs - g.numOutputs
if numHidden <= 0 {
return false
}
i := g.numInputs + rand.Intn(numHidden)
g.activations[g.neuronOrder[i]] = activations[rand.Intn(len(activations))]
return true
}
// neatMutationStd is a reproduction strategy that uses a standard deviation for the number of mutations in each category.
// The standard deviation is not scaled by the size of the network, meaning that larger networks will tend to have more mutations than smaller networks.
type neatMutationStd struct {
// The standard deviation for the number of new synapses
stdNumNewSynapses float64
// The standard deviation for the number of new recurrent synapses
stdNumNewRecurrentSynapses float64
// The standard deviation for the number of new neurons
stdNumNewNeurons float64
// The standard deviation for the number of synapses to mutate
stdNumMutateSynapses float64
// The standard deviation for the number of synapses to prune
stdNumPruneSynapses float64
// The standard deviation for the number of activations to mutate
stdNumMutateActivations float64
// The standard deviation for the weight of new synapses
stdNewSynapseWeight float64
// The standard deviation for the weight of mutated synapses
stdMutateSynapseWeight float64
// The maximum number of hidden neurons this mutation can add
maxHiddenNeurons int
// The counter to use for new synapse IDs
counter *Counter
// The possible activations to use for new neurons
possibleActivations []Activation
}
func NewNeatMutationStd(
counter *Counter,
activations []Activation,
stdNumNewForwardSynapses float64,
stdNumNewRecurrentSynapses float64,
stdNumNewNeurons float64,
stdNumMutateSynapses float64,
stdNumPruneSynapses float64,
stdNumMutateActivations float64,
stdNewSynapseWeight float64,
stdMutateSynapseWeight float64,
maxHiddenNeurons int,
) Mutation[*NeatGenotype] {
if counter == nil {
panic("cannot have nil counter")
}
if len(activations) == 0 {
panic("cannot have no activations")
}
// TODO: Should probably check stds are all above 0 but it wont break anything
return &neatMutationStd{
counter: counter,
possibleActivations: activations,
stdNumNewSynapses: stdNumNewForwardSynapses,
stdNumNewRecurrentSynapses: stdNumNewRecurrentSynapses,
stdNumNewNeurons: stdNumNewNeurons,
stdNumMutateSynapses: stdNumMutateSynapses,
stdNumPruneSynapses: stdNumPruneSynapses,
stdNumMutateActivations: stdNumMutateActivations,
stdNewSynapseWeight: stdNewSynapseWeight,
stdMutateSynapseWeight: stdMutateSynapseWeight,
maxHiddenNeurons: maxHiddenNeurons,
}
}
// Reproduce creates a new genotype by crossing over and mutating the given genotypes.
func (r *neatMutationStd) Mutate(g *NeatGenotype) {
for i := 0; i < stdN(r.stdNewSynapseWeight); i++ {
g.AddRandomSynapse(r.counter, r.stdNewSynapseWeight, false)
}
for i := 0; i < stdN(r.stdNumNewRecurrentSynapses); i++ {
g.AddRandomSynapse(r.counter, r.stdNewSynapseWeight, true)
}
for i := 0; i < stdN(r.stdNumNewNeurons); i++ {
if r.maxHiddenNeurons < 0 || g.NumHiddenNeurons() < r.maxHiddenNeurons {
g.AddRandomNeuron(r.counter, r.possibleActivations...)
}
}
for i := 0; i < stdN(r.stdNumMutateSynapses); i++ {
g.MutateRandomSynapse(r.stdMutateSynapseWeight)
}
for i := 0; i < stdN(r.stdNumPruneSynapses); i++ {
g.RemoveRandomSynapse()
}
for i := 0; i < stdN(r.stdNumMutateActivations); i++ {
g.MutateRandomActivation(r.possibleActivations...)
}
}
type neatCrossoverSimple struct{}
func NewNeatCrossoverSimple() Crossover[*NeatGenotype] {
return &neatCrossoverSimple{}
}
// Crossover implements CrossoverStrategy.
func (s *neatCrossoverSimple) Crossover(gs []*NeatGenotype) *NeatGenotype {
if len(gs) != 2 {
panic("expected 2 parents for simple crossover")
}
g, g2 := gs[0], gs[1]
gc := &NeatGenotype{
g.maxSynapseValue,
g.numInputs,
g.numOutputs,
slices.Clone(g.neuronOrder),
maps.Clone(g.inverseNeuronOrder),
maps.Clone(g.activations),
maps.Clone(g.weights),
maps.Clone(g.synapseEndpointLookup),
maps.Clone(g.endpointSynapseLookup),
slices.Clone(g.forwardSynapses),
slices.Clone(g.backwardSynapses),
slices.Clone(g.selfSynapses),
}
for sid, sw := range g2.weights {
if _, ok := gc.weights[sid]; ok {
if rand.Float64() > 0.5 {
gc.weights[sid] = sw
}
}
}
return gc
}
// NumParents implements CrossoverStrategy.
func (s *neatCrossoverSimple) NumParents() int {
return 2
}
type neatCrossoverAsexual struct{}
func NewNeatCrossoverAsexual() Crossover[*NeatGenotype] {
return &neatCrossoverAsexual{}
}
func (s *neatCrossoverAsexual) Crossover(gs []*NeatGenotype) *NeatGenotype {
if len(gs) != 1 {
panic("expected 1 parent for asexual crossover")
}
return Clone(gs[0])
}
func (s *neatCrossoverAsexual) NumParents() int {
return 1
}
type phenotypeConnection struct {
toIdx int
w float64
}
// NeatPhenotype is a phenotype for a NEAT genotype.
// It conceptually represents a neural network, built according to the instructions in the NEATGenotype (DNA).
// Once built, the NeatPhenotype can be used to forward propagate inputs through the network,
// but it cannot be modified though mutation or corss-over.
type NeatPhenotype struct {
numIn int
numOut int
accumulators []float64
lastAccumulators []float64
activations []Activation
forwardWeights [][]phenotypeConnection
recurrentWeights [][]phenotypeConnection
}
// Build a NEATPhenotype from a NEATGenotype.
func (g *NeatGenotype) Build() Forwarder {
accs := make([]float64, len(g.neuronOrder))
laccs := make([]float64, len(g.neuronOrder))
acts := make([]Activation, len(g.neuronOrder))
fwdWeights := make([][]phenotypeConnection, len(g.neuronOrder))
recurrentWeights := make([][]phenotypeConnection, len(g.neuronOrder))
for no, nid := range g.neuronOrder {
acts[no] = g.activations[nid]
fwdWeights[no] = make([]phenotypeConnection, 0)
recurrentWeights[no] = make([]phenotypeConnection, 0)
}
for sid, w := range g.weights {
ep := g.synapseEndpointLookup[sid]
oa, ob := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
if ob > oa {
fwdWeights[oa] = append(fwdWeights[oa], phenotypeConnection{ob, w})
} else {
recurrentWeights[oa] = append(recurrentWeights[oa], phenotypeConnection{ob, w})
}
}
return &NeatPhenotype{
numIn: g.numInputs,
numOut: g.numOutputs,
accumulators: accs,
lastAccumulators: laccs,
activations: acts,
forwardWeights: fwdWeights,
recurrentWeights: recurrentWeights,
}
}
// Forward propagate inputs through the network, returning the resulting outputs.
func (p *NeatPhenotype) Forward(x []float64) []float64 {
if len(x) != p.numIn {
panic("incorrect number of inputs")
}
// Reset accumulators to default vals (remember what they were before incase we need recurrent connections)
if len(p.recurrentWeights) > 0 { // For efficiency
copy(p.lastAccumulators, p.accumulators)
}
for i := 0; i < len(p.accumulators); i++ {
if i < len(x) {
p.accumulators[i] = x[i]
} else {
p.accumulators[i] = 0
}
}
if len(p.recurrentWeights) > 0 { // For efficiency
// Apply recurrent connections (does not matter what order we do this in)
for i := 0; i < len(p.accumulators); i++ {
for _, w := range p.recurrentWeights[i] {
p.accumulators[w.toIdx] += w.w * p.lastAccumulators[i]
}
}
}
// Apply forward connections
for i := 0; i < len(p.accumulators); i++ {
p.accumulators[i] = p.activations[i].ActivateValue(p.accumulators[i])
for _, w := range p.forwardWeights[i] {
p.accumulators[w.toIdx] += w.w * p.accumulators[i]
}
}
// Copy output array to avoid modification of internal state
outs := make([]float64, p.numOut)
copy(outs, p.accumulators[len(p.accumulators)-p.numOut:])
// Reuturn
return outs
}
// Reset will clear the recurrent memories of the phenotype.
func (p *NeatPhenotype) Reset() {
for i := range p.accumulators {
p.accumulators[i] = 0
}
}
// Make sure we implement json marshalling
var _ json.Marshaler = &NeatGenotype{}
var _ json.Unmarshaler = &NeatGenotype{}
type marshallableNeuron struct {
ID NeatNeuronID `json:"id"`
Activation Activation `json:"activation"`
}
type marshallableSynapse struct {
ID NeatSynapseID `json:"id"`
From NeatNeuronID `json:"from"`
To NeatNeuronID `json:"to"`
Weight float64 `json:"weight"`
}
type marshallableGenotype struct {
NumIn int `json:"num_in"`
NumOut int `json:"num_out"`
Neurons []marshallableNeuron `json:"neurons"`
Synapses []marshallableSynapse `json:"synapses"`
MaxSynapseVal float64 `json:"max_synapse_val"`
}
// MarshalJSON implements json.Marshaler, allowing the genotype to be marshalled to JSON.
func (g *NeatGenotype) MarshalJSON() ([]byte, error) {
mns := make([]marshallableNeuron, len(g.neuronOrder))
for no, nid := range g.neuronOrder {
mns[no] = marshallableNeuron{nid, g.activations[nid]}
}
mss := make([]marshallableSynapse, 0, len(g.weights))
for sid, w := range g.weights {
mss = append(mss, marshallableSynapse{
ID: sid,
From: g.synapseEndpointLookup[sid].From,
To: g.synapseEndpointLookup[sid].To,
Weight: w,
})
}
mg := marshallableGenotype{g.numInputs, g.numOutputs, mns, mss, g.maxSynapseValue}
return json.Marshal(&mg)
}
// UnmarshalJSON implements json.Unmarshaler, allowing the genotype to be unmarshalled from JSON.
//
// TODO(Needs more validation)
func (g *NeatGenotype) UnmarshalJSON(bs []byte) error {
mg := marshallableGenotype{}
err := json.Unmarshal(bs, &mg)
if err != nil {
return err
}
g.neuronOrder = make([]NeatNeuronID, len(mg.Neurons))
g.inverseNeuronOrder = make(map[NeatNeuronID]int)
g.activations = make(map[NeatNeuronID]Activation)
for ni, mn := range mg.Neurons {
g.activations[mn.ID] = mn.Activation
g.neuronOrder[ni] = mn.ID
g.inverseNeuronOrder[mn.ID] = ni
}
g.weights = make(map[NeatSynapseID]float64)
g.synapseEndpointLookup = make(map[NeatSynapseID]NeatSynapseEP)
g.endpointSynapseLookup = make(map[NeatSynapseEP]NeatSynapseID)
g.forwardSynapses = make([]NeatSynapseID, 0)
g.backwardSynapses = make([]NeatSynapseID, 0)
g.selfSynapses = make([]NeatSynapseID, 0)
for _, ms := range mg.Synapses {
ep := NeatSynapseEP{ms.From, ms.To}
g.weights[ms.ID] = ms.Weight
g.endpointSynapseLookup[ep] = ms.ID
g.synapseEndpointLookup[ms.ID] = ep
fromOrder := g.inverseNeuronOrder[ep.From]
toOrder := g.inverseNeuronOrder[ep.To]
if fromOrder < toOrder {
g.forwardSynapses = append(g.forwardSynapses, ms.ID)
} else if fromOrder > toOrder {
g.backwardSynapses = append(g.backwardSynapses, ms.ID)
} else {
g.selfSynapses = append(g.selfSynapses, ms.ID)
}
}
g.numInputs = mg.NumIn
g.numOutputs = mg.NumOut
g.maxSynapseValue = mg.MaxSynapseVal
if err := g.Validate(); err != nil {
return fmt.Errorf("genotype was invalid upon loading: %v", err)
}
return nil
}
// RenderDot returns a string in the DOT language that represents the genotype.
// This DOT code cannot be use to recreate the genotype, but can be used to visualise it using Graphviz.
func (g *NeatGenotype) RenderDot(width, height float64) string {
graphDrawer := newSimpleGraphvizWriter()
graphDrawer.writeGraphParam("rankdir", "LR")
graphDrawer.writeGraphParam("ratio", "fill")
graphDrawer.writeGraphParam("size", fmt.Sprintf("%v,%v", width, height))
graphDrawer.writeGraphParam("layout", "dot")
inputRanks := []string{}
outputRanks := []string{}
for no, nid := range g.neuronOrder {
name := fmt.Sprintf("N%v", nid)
label := fmt.Sprintf("N%v [%v]\n%v", nid, no, g.activations[nid])
color := "black"
if no < g.numInputs {
color = "green"
inputRanks = append(inputRanks, name)
} else if no >= len(g.neuronOrder)-g.numOutputs {
color = "red"
outputRanks = append(outputRanks, name)
}
graphDrawer.writeNode(name, label, color)
}
graphDrawer.writeMinRank(inputRanks)
graphDrawer.writeMaxRank(outputRanks)
for wid, w := range g.weights {
ep := g.synapseEndpointLookup[wid]
of, ot := g.inverseNeuronOrder[ep.From], g.inverseNeuronOrder[ep.To]
fromName := fmt.Sprintf("N%v", ep.From)
toName := fmt.Sprintf("N%v", ep.To)
label := fmt.Sprintf("C%v\n%.3f", wid, w)
color := "black"
if of >= ot {
color = "red"
}
graphDrawer.writeEdge(fromName, toName, label, color)
}
return graphDrawer.dot()
}
// RenderImage returns an image of the genotype using graphviz.
func (g *NeatGenotype) RenderImage(width, height float64) image.Image {
graph, err := graphviz.ParseBytes([]byte(g.RenderDot(width, height)))
if err != nil {
panic(fmt.Sprintf("error when creating a dot graph, this should not have happened (please report bug): %v", err))
}
gv := graphviz.New()
img, err := gv.RenderImage(graph)
if err != nil {
panic(fmt.Sprintf("error when creating an image from dot, this should not have happened (please report bug): %v", err))
}
return img
}
package goevo
// SimplePopulation has a single species, and generates the entire next generation by selcting and breeding from the previous one.
type SimplePopulation[T any] struct {
agents []*Agent[T]
selection Selection[T]
reproduction Reproduction[T]
}
// NewSimplePopulation creates a new SimplePopulation with n agents, each with a new genotype created by newGenotype.
func NewSimplePopulation[T any](newGenotype func() T, n int, selection Selection[T], reproduction Reproduction[T]) *SimplePopulation[T] {
if selection == nil {
panic("cannot have nil selection")
}
if reproduction == nil {
panic("cannot have nil reproduction")
}
if n <= 0 {
panic("cannot create population with less than 1 member")
}
agents := make([]*Agent[T], n)
for i := range agents {
agents[i] = NewAgent(newGenotype())
}
return &SimplePopulation[T]{
agents: agents,
selection: selection,
reproduction: reproduction,
}
}
// NextGeneration creates a new SimplePopulation from the current one, using the given selection and reproduction strategies.
func (p *SimplePopulation[T]) NextGeneration() Population[T] {
p.selection.SetAgents(p.agents)
return NewSimplePopulation(func() T {
parents := SelectNGenotypes(p.selection, p.reproduction.NumParents())
return p.reproduction.Reproduce(parents)
}, len(p.agents), p.selection, p.reproduction)
}
// Agents returns the agents in the population.
//
// TODO(change this to an iterator once they get added to the language, as this will increase performance in other cases)
func (p *SimplePopulation[T]) All() []*Agent[T] {
return p.agents
}
package goevo
import (
"fmt"
"math"
"math/rand"
"slices"
)
// SpeciatedPopulation is a speciated population of agents.
// Each species has the same number of agents, and there are always the same number of species.
// Each generation, with a chance, the worst species is removed, and replaced with a random species or the best species.
type SpeciatedPopulation[T any] struct {
// species is a map of species ID to a slice of agents.
species map[int][]*Agent[T]
// removeWorstSpeciesChance is the chance that the worst species will be removed each generation.
removeWorstSpeciesChance float64
// stdNumAgentsSwap is the standard deviation of the number of agents to swap between species each generation.
// An agent is swapped by moving it to another random species, and moving another agent from that species to this species.
stdNumAgentsSwap float64
// counter is a counter to keep track of the new species.
counter *Counter
// The selection strategy to use when selecting agents to reproduce.
selection Selection[T]
// The reproduction strategy to use when creating new agents.
reproduction Reproduction[T]
}
// NewSpeciatedPopulation creates a new speciated population.
func NewSpeciatedPopulation[T any](
counter *Counter,
newGenotype func() T,
numSpecies int,
numAgentsPerSpecies int,
removeWorstSpeciesChance float64,
stdNumAgentsSwap float64,
selection Selection[T],
reproduction Reproduction[T],
) *SpeciatedPopulation[T] {
species := make(map[int][]*Agent[T])
for i := 0; i < numSpecies; i++ {
agents := make([]*Agent[T], numAgentsPerSpecies)
for j := 0; j < numAgentsPerSpecies; j++ {
agents[j] = NewAgent(newGenotype())
}
species[counter.Next()] = agents
}
return NewSpeciatedPopulationFrom(counter, species, removeWorstSpeciesChance, stdNumAgentsSwap, selection, reproduction)
}
func NewSpeciatedPopulationFrom[T any](
counter *Counter,
species map[int][]*Agent[T],
removeWorstSpeciesChance float64,
stdNumAgentsSwap float64,
selection Selection[T],
reproduction Reproduction[T],
) *SpeciatedPopulation[T] {
if len(species) == 0 {
panic("can only create speciated population with at least one species")
}
if selection == nil {
panic("cannot have nil selection")
}
if reproduction == nil {
panic("cannot have nil reproduction")
}
firstKey := -1
for k := range species {
firstKey = k
break
}
speciesLength := len(species[firstKey])
if speciesLength == 0 {
panic("species must have at least one member")
}
speciesCopy := make(map[int][]*Agent[T])
for id, agents := range species {
if len(agents) != speciesLength {
panic("all species must be same length")
}
speciesCopy[id] = slices.Clone(agents)
}
return &SpeciatedPopulation[T]{
species: speciesCopy,
removeWorstSpeciesChance: removeWorstSpeciesChance,
stdNumAgentsSwap: stdNumAgentsSwap,
counter: counter,
selection: selection,
reproduction: reproduction,
}
}
// NextGeneration implements [Population].
func (p *SpeciatedPopulation[T]) NextGeneration() Population[T] {
var agentsPerGen int
for _, agents := range p.species {
agentsPerGen = len(agents)
break
}
if agentsPerGen == 0 {
panic("population: no agents in species, should not have happened")
}
numSpecies := len(p.species)
// Calculate the average fitness of each species, checking if it is the worst.
worstFitness := math.Inf(1)
worstSpecies := 0
for i, agents := range p.species {
var sum float64
for _, agent := range agents {
sum += agent.Fitness
}
avgFitness := sum / float64(agentsPerGen)
if avgFitness < worstFitness {
worstFitness, worstSpecies = avgFitness, i
}
}
// Calculate the ids of the species to reproduce
type speciesToReproduce struct {
fromId int
newId int
}
toReproduce := make([]speciesToReproduce, 0, numSpecies)
deleteWorst := rand.Float64() < p.removeWorstSpeciesChance
for id := range p.species {
if id == worstSpecies && deleteWorst {
continue
}
toReproduce = append(toReproduce, speciesToReproduce{id, id})
}
if deleteWorst {
// pick and index of the species we already know are going to reproduce
fromIndex := rand.Intn(len(toReproduce))
// give that species a new id in the next generation
toReproduce[fromIndex].newId = p.counter.Next()
// using the same parent species, add a new species also with a new id
toReproduce = append(toReproduce, speciesToReproduce{toReproduce[fromIndex].fromId, p.counter.Next()})
}
// Create the new species
newSpecies := make(map[int][]*Agent[T])
for _, r := range toReproduce {
p.selection.SetAgents(p.species[r.fromId])
newAgents := make([]*Agent[T], agentsPerGen)
for i := range newAgents {
parents := SelectNGenotypes(p.selection, p.reproduction.NumParents())
newAgents[i] = NewAgent(p.reproduction.Reproduce(parents))
}
newSpecies[r.newId] = newAgents
}
// Swap agents between species
numToSwap := int(math.Round(math.Abs(rand.NormFloat64()) * float64(p.stdNumAgentsSwap)))
for i := 0; i < numToSwap; i++ {
aIndex, bIndex := rand.Intn(len(toReproduce)), rand.Intn(len(toReproduce))
aId, bId := toReproduce[aIndex].newId, toReproduce[bIndex].newId
aAgentIndex, bAgentIndex := rand.Intn(agentsPerGen), rand.Intn(agentsPerGen)
newSpecies[aId][aAgentIndex], newSpecies[bId][bAgentIndex] = newSpecies[bId][bAgentIndex], newSpecies[aId][aAgentIndex]
}
// Sanity checks
if len(newSpecies) != numSpecies {
fmt.Println(len(newSpecies), numSpecies, len(toReproduce), toReproduce)
panic("population: wrong number of species, should not have happened")
}
sanityFoundIds := make(map[int]struct{})
for id, agents := range newSpecies {
if len(agents) != agentsPerGen {
panic("population: wrong number of agents in species, should not have happened")
}
if id == worstSpecies && deleteWorst {
panic("population: worst species was not deleted, should not have happened")
}
if _, ok := sanityFoundIds[id]; ok {
panic("population: duplicate species id, should not have happened")
}
sanityFoundIds[id] = struct{}{}
}
// Return the new population
return &SpeciatedPopulation[T]{
species: newSpecies,
removeWorstSpeciesChance: p.removeWorstSpeciesChance,
stdNumAgentsSwap: p.stdNumAgentsSwap,
counter: p.counter,
selection: p.selection,
reproduction: p.reproduction,
}
}
// All implements [Population].
func (p *SpeciatedPopulation[T]) All() []*Agent[T] {
all := make([]*Agent[T], 0, len(p.species)*len(p.species[0]))
for _, agents := range p.species {
all = append(all, agents...)
}
return all
}
func (p *SpeciatedPopulation[T]) AllSpecies() map[int][]*Agent[T] {
res := make(map[int][]*Agent[T])
for id, agents := range p.species {
res[id] = slices.Clone(agents)
}
return res
}
package goevo
import (
"math/rand"
)
// tournamentSelection is a tournamentSelection strategy that selects the best agent from a random tournament of agents.
// It implements [tournamentSelection].
type tournamentSelection[T any] struct {
// The number of agents to include in each tournament.
tournamentSize int
agents []*Agent[T]
}
func NewTournamentSelection[T any](tournamentSize int) Selection[T] {
if tournamentSize <= 0 {
panic("must have at least tournament size of 1")
}
return &tournamentSelection[T]{
tournamentSize: tournamentSize,
agents: nil,
}
}
// SetAgents sets the agents to select from for this generation.
func (t *tournamentSelection[T]) SetAgents(agents []*Agent[T]) {
t.agents = agents
}
// Select returns an agent selected from the population using a tournament.
func (t *tournamentSelection[T]) Select() *Agent[T] {
if t.agents == nil {
panic("must call SetAgents before selecting")
}
if len(t.agents) == 0 {
panic("must have at least one agent")
}
best := t.agents[rand.Intn(len(t.agents))]
for i := 0; i < t.tournamentSize-1; i++ {
testIndex := rand.Intn(len(t.agents))
if t.agents[testIndex].Fitness > best.Fitness {
best = t.agents[testIndex]
}
}
return best
}
package goevo
// twoPhaseReproduction is a [Reproduction] that first performs a [Crossover]
// and then a [Mutation] on the resulting child.
type twoPhaseReproduction[T any] struct {
crossover Crossover[T]
mutate Mutation[T]
}
// NewTwoPhaseReproduction creates a new [twoPhaseReproduction] with the given [Crossover] and [Mutation].
func NewTwoPhaseReproduction[T any](crossover Crossover[T], mutate Mutation[T]) Reproduction[T] {
if crossover == nil {
panic("cannot have nil crossover")
}
if mutate == nil {
panic("cannot have nil mutate")
}
return &twoPhaseReproduction[T]{
crossover: crossover,
mutate: mutate,
}
}
// Reproduce implements the [Reproduction] interface.
func (r *twoPhaseReproduction[T]) Reproduce(parents []T) T {
if len(parents) != r.crossover.NumParents() {
panic("incorrect number of parents")
}
child := r.crossover.Crossover(parents)
r.mutate.Mutate(child)
return child
}
// NumParents implements the [Reproduction] interface.
func (r *twoPhaseReproduction[T]) NumParents() int {
return r.crossover.NumParents()
}
package goevo
import (
"fmt"
"math"
"math/rand"
"strings"
"gonum.org/v1/gonum/mat"
)
type floatType interface {
float32 | float64
}
type numberType interface {
floatType | int | int16 | int32 | int64
}
func stdN(std float64) int {
v := math.Abs(rand.NormFloat64() * std)
if v > std*10 {
v = std * 10 // Lets just cap this at 10 std to prevent any sillyness
}
return int(math.Round(v))
}
func randomMapKey[T comparable, U any](m map[T]U) T {
n := rand.Intn(len(m))
i := 0
for k := range m {
if i == n {
return k
}
i++
}
panic("cannot occur")
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
func clamp(x, min, max float64) float64 {
if x < min {
return min
}
if x > max {
return max
}
return x
}
type mutMat interface {
mat.Matrix
Set(r, c int, v float64)
}
type mutVecWrapper struct {
*mat.VecDense
}
func (m *mutVecWrapper) Set(r, c int, v float64) {
if c != 0 {
panic("cannot set vector at anything other than column 0")
}
m.SetVec(r, v)
}
// make sure to check the shapes first!!
func randomChoiceMatrix(into mutMat, ms []mutMat) {
rs, cs := into.Dims()
for ri := range rs {
for ci := range cs {
idx := rand.Intn(len(ms))
into.Set(ri, ci, ms[idx].At(ri, ci))
}
}
}
// Utility struct to build up a simple graphviz graph one statement at a time
type simpleGraphvizWriter struct {
lines []string
}
func newSimpleGraphvizWriter() *simpleGraphvizWriter {
return &simpleGraphvizWriter{[]string{}}
}
func (s *simpleGraphvizWriter) writeGraphParam(name, value string) {
s.lines = append(s.lines, fmt.Sprintf("%s=\"%s\";", name, value))
}
func (s *simpleGraphvizWriter) writeMinRank(nodes []string) {
nodesList := strings.Join(nodes, "; ") + ";"
s.lines = append(s.lines, fmt.Sprintf("{rank=min; %s}", nodesList))
}
func (s *simpleGraphvizWriter) writeMaxRank(nodes []string) {
nodesList := strings.Join(nodes, "; ") + ";"
s.lines = append(s.lines, fmt.Sprintf("{rank=max; %s}", nodesList))
}
func (s *simpleGraphvizWriter) writeNode(name, label, color string) {
s.lines = append(s.lines, fmt.Sprintf("%s [label=\"%s\", color=\"%s\", shape=rect];", name, label, color))
}
func (s *simpleGraphvizWriter) writeEdge(from, to, label, color string) {
s.lines = append(s.lines, fmt.Sprintf("%s -> %s [label=\"%s\", color=\"%s\"];", from, to, label, color))
}
func (s *simpleGraphvizWriter) dot() string {
body := strings.Join(s.lines, "\n\t")
return fmt.Sprintf("digraph G {\n\t%s\n}", body)
}