Skip to content

Commit

Permalink
First draft of mutation-based Trainer.Sync
Browse files Browse the repository at this point in the history
- mutations:
- alter_weights
- alter_bias
- resst_weights
- reset_bias

- TODO: state machine for training/production phases based
        on fitness tolerance.
  • Loading branch information
zampino committed Jun 17, 2015
1 parent 8fb42d0 commit 8452822
Show file tree
Hide file tree
Showing 17 changed files with 217 additions and 171 deletions.
2 changes: 1 addition & 1 deletion lib/exnn/actuator.ex
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ defmodule EXNN.Actuator do

defmacro __using__(options) do
caller_mod = __CALLER__.module
custom_state = options[:with_state]||[]
custom_state = options[:state]||[]
quote location: :keep do
use EXNN.NodeServer
defstruct Keyword.merge unquote(custom_state), [id: nil, ins: []]
Expand Down
57 changes: 28 additions & 29 deletions lib/exnn/connectome.ex
Original file line number Diff line number Diff line change
@@ -1,37 +1,44 @@
defmodule EXNN.Connectome do
import EXNN.Utils.Logger
@moduledoc """
Stores genomes forming the system,
with their inner information.
@doc "accepts patterns of the form:
On boot it expands
a pattern of the form:
[sensor: N_s, neuron: [l1: N_1, ..., ld: N_d], actuator: N_a]
[sensor: [S_ID_1, S_ID_2, ..., S_ID_l],
neuron: {N_1, N_2, ..., N_m},
actuator: [A_ID_1, A_ID_2, ..., A_ID_n]
where N_s, N_i, N_a are natural numbers.
"
where N_s, N_i, N_a are natural numbers,
into a linking information between nodes with
m hidden layers each of size N_i
"""

require Logger
alias EXNN.Utils.Random
import EXNN.Utils.Logger

# TODO: decouple storage from link/patterns
# into Connectome.Links and Connectome.Pattern
def start_link do
{:ok, pid} = Agent.start_link(fn() -> HashDict.new end,
name: __MODULE__)

{pattern, dimensions} = EXNN.Config.get_pattern
Random.seed

pattern
store = pattern
|> EXNN.Pattern.build_layers
|> link([], dimensions)
|> store_all
|> Enum.reduce HashDict.new, &store/2

{:ok, pid}
Agent.start_link(fn() -> store end, name: __MODULE__)
end

@doc "returns the list of all genomes"
def all do
unkey = fn(dict)->
dict |> Enum.map &(elem(&1, 1))
end
unkey = fn(dict)-> dict |> Enum.map &(elem(&1, 1)) end
Agent.get __MODULE__, unkey
end

@doc "returns all neuron genomes"
def neurons do
Enum.filter all, &(:neuron == &1.type)
end
Expand All @@ -40,11 +47,6 @@ defmodule EXNN.Connectome do
Agent.get __MODULE__, &(Dict.get &1, id)
end

def store(genome) do
Agent.update __MODULE__,
&HashDict.put(&1, genome.id, genome)
end

@doc "accepts anything map or dict like"
def update(id, dict) do
# skim out unwanted keys!
Expand All @@ -61,39 +63,36 @@ defmodule EXNN.Connectome do
Agent.update(__MODULE__, update_fun)
end

defp store_all(collection) when is_list(collection) do
collection = List.flatten collection
Enum.each collection, &store(&1)
defp store(genome, hash) do
HashDict.put(hash, genome.id, genome)
end

# TOPOLOGY AND CONNECTIONS

def link([], acc, _), do: acc
defp link([], acc, _), do: List.flatten(acc)

@doc "actuators are processe as first"
def link([{:actuator, list} | rest], [], dimensions) do
defp link([{:actuator, list} | rest], [], dimensions) do
[{previous_type, previous_list} | tail] = rest
genomes = EXNN.Genome.collect(:actuator, list)
|> EXNN.Genome.set_ins(previous_list)
link(rest, [genomes], dimensions)
end

@doc "and sensors are last"
def link([{:sensor, first_list}], acc, dimensions) do
defp link([{:sensor, first_list}], acc, dimensions) do
[outs | rest] = acc
genomes = EXNN.Genome.collect(:sensor, first_list)
|> EXNN.Genome.set_outs(outs)
link([], [genomes | acc], dimensions)
end

def link([{type, list} | rest], acc, dimensions) do
defp link([{type, list} | rest], acc, dimensions) do
[{previous_type, previous_list} | tail] = rest
[outs | tail] = acc
genomes = EXNN.Genome.collect(type, list)
|> EXNN.Genome.set_ins(previous_list, dimensions)
|> EXNN.Genome.set_outs(outs)
link(rest, [genomes | acc], dimensions)
end


end
27 changes: 24 additions & 3 deletions lib/exnn/genome.ex
Original file line number Diff line number Diff line change
@@ -1,10 +1,31 @@
defmodule EXNN.Genome do
defstruct id: nil, type: nil, ins: [], outs: []
@moduledoc """
# EXNN.Genome
a genome is the basic information
a cell cares about
it will specialized once injected into a Specific NodeServer
where it will drop the type.
On the contrary
"""

# NOTE: struct is only used in update operations
defstruct id: nil, type: nil, ins: [], outs: [], bias: 0, activation: nil

alias EXNN.Utils.Random
alias EXNN.Utils.Math

def collect(type, ids) do
ids |> Enum.map &build(type, &1)
end

def build(:neuron, id) do
%{type: :neuron, id: id, bias: random_bias, activation: &Math.sin(&1)}
end

def random_bias, do: Math.pi * Random.uniform

def build(type, id) do
%{type: type, id: id}
end
Expand All @@ -22,10 +43,10 @@ defmodule EXNN.Genome do

def set_ins(:neuron, genome, in_ids, dimensions) do
in_ids = inflate_ins(in_ids, dimensions)

with_random_weight = fn in_id ->
{in_id, :random.uniform}
{in_id, Random.uniform}
end

ins = Enum.map in_ids, with_random_weight
Map.merge genome, %{ins: ins}
end
Expand Down
4 changes: 2 additions & 2 deletions lib/exnn/neuron.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ defmodule EXNN.Neuron do
alias EXNN.Utils.Math

defstruct id: nil, ins: [], outs: [], bias: 0,
activation: &Math.id/1, acc: [], trigger: [], metadata: []
activation: nil, acc: [], trigger: [], metadata: []

def initialize(genome) do
Dict.merge(genome, trigger: Dict.keys(genome.ins), acc: [])
Expand Down Expand Up @@ -57,7 +57,7 @@ defmodule EXNN.Neuron do
|> fire
end

defimpl EXNN.Connection, for: __MODULE__ do
defimpl EXNN.Connection do
def signal(neuron, message, metadata) do
EXNN.Neuron.signal(neuron, message, metadata)
end
Expand Down
4 changes: 2 additions & 2 deletions lib/exnn/sensor.ex
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ defmodule EXNN.Sensor do
They share the underlying genome as state, which can
be merged with custom attributes and default values
passign a with_state option to the use macro.
passign a state option to the use macro.
A sensor receives or propagates a signal from the outside world
and broadcasts it to the neuron of the front layer.
Expand All @@ -29,7 +29,7 @@ defmodule EXNN.Sensor do
"""

defmacro __using__(options) do
state_keyword = options[:with_state] || []
state_keyword = options[:state] || []
quote do
use EXNN.NodeServer
alias __MODULE__, as: CurrentSensorBase
Expand Down
16 changes: 11 additions & 5 deletions lib/exnn/trainer/mutations.ex
Original file line number Diff line number Diff line change
Expand Up @@ -29,24 +29,30 @@ defmodule EXNN.Trainer.Mutations do
GenServer.call __MODULE__, :revert
end

def reset do
GenServer.call __MODULE__, :reset
end

# server callbacks

def handle_call :step, _from, state do
log "step", [], :debug
mutation_set = Set.generate(state.neurons)
{:ok, neurons} = Agent.apply mutation_set

{:reply, :ok, %{state |
neurons: neurons,
history: [mutation_set | state.history]}
}
end

def handle_call :revert, _from, state do
log "revert", [], :debug
[mutation_set | rest] = state.history
inverse_set = Set.invert mutation_set
{:ok, neurons} = Agent.apply inverse_set
{:ok, neurons} = Set.invert(mutation_set) |> Agent.apply
{:reply, :ok, %{state | neurons: neurons, history: rest}}
end

def handle_call :reset, _from, state do
mutation = Set.reset(state.neurons)
{:ok, neurons} = Agent.apply mutation
{:reply, :ok, %{state | neurons: neurons, history: []}}
end
end
19 changes: 16 additions & 3 deletions lib/exnn/trainer/mutations/agent.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ defmodule EXNN.Trainer.Mutations.Agent do
Task.async __MODULE__, :apply_mutation, [mutation]
end

def apply_mutation %Mutation{
type: :alter_weights,
def apply_mutation(%Mutation{
type: type,
id: id,
changes: changes} do
changes: changes}) when type in [:alter_weights, :reset_weigths] do

patch_fn = fn(genome)->
new_weights = changes |> Enum.reduce genome.ins, fn({key, old, new}, weights)->
Expand All @@ -30,4 +30,17 @@ defmodule EXNN.Trainer.Mutations.Agent do
res = EXNN.NodeServer.patch(id, patch_fn)
EXNN.Connectome.update(id, res)
end

def apply_mutation(%Mutation{
type: type,
id: id,
changes: {:bias, old, new}}) do

patch_fn = fn(genome)->
%{bias: new}
end

res = EXNN.NodeServer.patch(id, patch_fn)
EXNN.Connectome.update(id, res)
end
end
65 changes: 57 additions & 8 deletions lib/exnn/trainer/mutations/set.ex
Original file line number Diff line number Diff line change
Expand Up @@ -13,35 +13,55 @@ defmodule EXNN.Trainer.Mutations.Set do
# :delete_link,
# :add_node,
# :remove_node
:alter_bias
# :alter_activation
# :swap_activation
]

def generate(neurons) do
Random.seed
length = Enum.count neurons
candidates = Random.sample neurons, Math.inv_sqrt(length)
Enum.map candidates, &generate_for(&1)
end

def generate_for(genome) do
# type = Random.sample @mutation_types
Mutation.new genome, type: :alter_weights
def reset(neurons) do
Random.seed
do_reset(neurons, [])
end

def invert(set), do: invert(set, [])

def invert([], done), do: done
defp generate_for(genome) do
Mutation.new genome, type: Random.take(@mutation_types)
end

defp invert([], done), do: done

def invert([first|rest], done) do
defp invert([first|rest], done) do
invert(rest, [Mutation.inverse(first) | done])
end

defp do_reset([], acc), do: acc

defp do_reset([neuron | rest], acc) do
do_reset rest, [ Mutation.new(neuron, type: :reset_weigths) |
[ Mutation.new(neuron, type: :reset_bias) |
acc
]
]
end

defmodule Mutation do
defstruct type: nil, id: nil, changes: []

def new(genome, type: type) do
struct(__MODULE__, [type: type, id: genome.id])
|> build_changes(genome)
log "MUTATE:", {genome.id, type}, :debug
struct(__MODULE__, [type: type, id: genome.id]) |> build_changes(genome)
end

# Alter weights

def build_changes %Mutation{type: :alter_weights}=mutation, genome do
weights = genome.ins
keys = Keyword.keys(weights)
Expand All @@ -55,10 +75,39 @@ defmodule EXNN.Trainer.Mutations.Set do
end
end

def inverse(%Mutation{type: :alter_weights, changes: changes}=mutation) do
def inverse %Mutation{type: :alter_weights, changes: changes}=mutation do
inverse_changes = Enum.map changes,
fn({key, old, new}) -> {key, new, old} end
%{mutation | changes: inverse_changes}
end

# Reset Weights

def build_changes %Mutation{type: :reset_weigths}=mutation, genome do
weights = genome.ins

Keyword.keys(weights)
|> Enum.reduce mutation, fn(key, acc)->
old = weights[key]
new = Random.uniform
%{acc | changes: [{key, old, new} | acc.changes]}
end
end

# Alter Bias
def build_changes %Mutation{type: :alter_bias}=mutation, genome do
bias = genome.bias
%{mutation | changes: {:bias, bias, Random.coefficient(bias)}}
end

def inverse %Mutation{type: :alter_bias, changes: {:bias, old, new}}=mutation do
%{mutation | changes: {:bias, new, old}}
end

def build_changes %Mutation{type: :reset_bias}=mutation, genome do
bias = genome.bias
%{mutation | changes: {:bias, bias, EXNN.Genome.random_bias}}
end

end
end
Loading

0 comments on commit 8452822

Please sign in to comment.