Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Back Propagation algorithm for learning #1

Open
wants to merge 20 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions lib/neuron_system/back_prop/net.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
defmodule NeuronSystem.BackProp.Net do
@moduledoc """
Represents an extension of the Net to support back propagation learning algorithm.

## Usage

Use this module in the origin Net module:

```elixir
defmodule NeuronSystem.Net do
...
use NeuronSystem.BackProp.Net
...
end
```

After that you can learn your net by calling:
```elixir
net = ... # Net creation flow
# Add neurons, connections...
NeuronSystem.Net.back_prop!(net, %{y: 1})
```
"""

defmacro __using__(_opts \\ :empty) do
quote do
@spec back_prop!(NeuronSystem.Models.Net.t, map) :: :ok
def back_prop!(net, valid_output) do
NeuronSystem.BackProp.Utils.BackPropRunner.call(net, valid_output)
end
end
end
end
67 changes: 67 additions & 0 deletions lib/neuron_system/back_prop/neuron/hidden_processor.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
defmodule NeuronSystem.BackProp.Neuron.HiddenProcessor do
alias NeuronSystem.{Models, Processes}
alias NeuronSystem.BackProp.Utils

def call(net, from_neuron_id, lapse, {%Models.Neuron{id: neuron_id} = neuron_model, options}) do
new_options = options |> push_new_income_lapse(from_neuron_id, lapse)
neuron_out_connections = NeuronSystem.Net.neuron_out_connections(net, neuron_id)
if received_all?(new_options, neuron_out_connections) do
lapse = calc_overall_lapse(new_options)
income_payloads = options |> Map.get(:income_payloads)
neuron_in_connections = NeuronSystem.Net.neuron_in_connections(net, neuron_id)
neuron_in_connections |> Enum.each(fn(connection) ->
connection_income = fetch_income_for_connection(connection, income_payloads)
delta_w = Utils.Calculations.delta_weight(lapse, connection_income)
new_w = Utils.Calculations.new_weight(connection.weight, delta_w)
connection_lapse = Utils.Calculations.connection_lapse(lapse, connection.weight)
NeuronSystem.Net.set_connection_weight(net, connection, new_w)
if %Models.Connection{} = connection do
send_back_prop_inside(net, neuron_id, connection, connection_lapse)
end
end)
if at_least_one_is_in_connection?(neuron_in_connections) do
send_back_prop_completed(net, neuron_id)
end
end
{neuron_model, new_options}
end

@spec push_new_income_lapse(map, bitstring, float) :: map
defp push_new_income_lapse(options, from_neuron_id, lapse) do
income_lapse = options |> Map.get(:income_lapse, %{})
new_income_lapse = income_lapse |> Map.put(from_neuron_id, lapse)
Map.put(options, :income_lapse, new_income_lapse)
end

defp received_all?(%{income_lapse: income_lapse} = _options, out_connections) do
income_count = income_lapse |> Map.keys |> Enum.count
connections_count = out_connections |> Enum.count
income_count == connections_count
end

defp fetch_income_for_connection(connection, income_payloads) do
connection_source = NeuronSystem.Connection.source(connection)
income_payloads[connection_source]
end

defp calc_overall_lapse(%{income_lapse: income_lapse} = _options) do
income_lapse |> Map.values |> Enum.sum
end

defp send_back_prop_inside(net, neuron_id, connection, lapse) do
connection_source = NeuronSystem.Connection.source(connection)
neuron_process_pid = NeuronSystem.Net.neuron_process_pid(net, connection_source)
Processes.Neuron.back_prop(neuron_process_pid, {:hidden, net, neuron_id, lapse})
end

defp at_least_one_is_in_connection?(connections) do
Enum.any?(connections, fn
(%Models.InConnection{} = _conn) -> true
_ -> false
end)
end

defp send_back_prop_completed(%Models.Net{root_pid: root_pid} = _net, neuron_id) do
send root_pid, {:back_prop_completed, neuron_id}
end
end
50 changes: 50 additions & 0 deletions lib/neuron_system/back_prop/neuron/output_processor.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
defmodule NeuronSystem.BackProp.Neuron.OutputProcessor do
@moduledoc """
Processor of Back Propagation algorithm messages for an output neuron.

The algorithm for the output neurons is a little bit different then for the input and hidden ones
and contains the following steps:

1. Calculate a lapse value using an needed output value and value after a forward pass.
2. For each neuron's input connection calculate a delta value.
3. Calculate a new weight for each ouput connection
4. Change weights of all input connections in a Connection Manager
5. Send messages to hidden neurons with needed calculated values: in this particular case it is `lapse`.
"""

alias NeuronSystem.{Processes, Models}
alias NeuronSystem.BackProp.Utils

@doc """
Calls processor for an output neuron.
"""
def call(net, valid_output, {%Models.Neuron{id: neuron_id} = neuron_model, %{income_payloads: income_payloads} = options}) do
lapse = calc_lapse(valid_output, options)
NeuronSystem.Net.neuron_in_connections(net, neuron_id)
|> Enum.each(fn(connection) ->
connection_income = fetch_income_for_connection(connection, income_payloads)
delta_w = Utils.Calculations.delta_weight(lapse, connection_income)
new_w = Utils.Calculations.new_weight(connection.weight, delta_w)
connection_lapse = Utils.Calculations.connection_lapse(lapse, connection.weight)
NeuronSystem.Net.set_connection_weight(net, connection, new_w)
send_back_prop_inside(net, neuron_id, connection, connection_lapse)
end)
end

defp calc_lapse(valid_output, options) do
out_value = options |> Map.get(:out_value)
d_out_value = options |> Map.get(:d_out_value)
Utils.Calculations.out_lapse(out_value, d_out_value, valid_output)
end

defp fetch_income_for_connection(connection, income_payloads) do
connection_source = NeuronSystem.Connection.source(connection)
income_payloads[connection_source]
end

defp send_back_prop_inside(net, neuron_id, connection, lapse) do
connection_source = NeuronSystem.Connection.source(connection)
neuron_process_pid = NeuronSystem.Net.neuron_process_pid(net, connection_source)
Processes.Neuron.back_prop(neuron_process_pid, {:hidden, net, neuron_id, lapse})
end
end
29 changes: 29 additions & 0 deletions lib/neuron_system/back_prop/processes/neuron.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
defmodule NeuronSystem.BackProp.Processes.Neuron do
@moduledoc """
Acts as an extension for an original Neuron process.

This extension provides additional API specifically for Back Propagation feature.
"""

defmacro __using__(opts \\ :empty) do
quote do
@spec back_prop(pid, {:output | :hidden, NeuronSystem.Models.Net.t, float}) :: :ok
def back_prop(pid, {:output, net, valid_output}) do
GenServer.cast(pid, {:back_prop, :output, net, valid_output})
end
def back_prop(pid, {:hidden, net, lapse}) do
GenServer.cast(pid, {:back_prop, :hidden, net, lapse})
end

def handle_cast({:back_prop, :output, net, valid_output}, state) do
NeuronSystem.BackProp.Neuron.OutputProcessor.call(net, valid_output, state)
{:noreply, state}
end

def handle_cast({:back_prop, :hidden, net, from_neuron_id, lapse}, state) do
new_state = NeuronSystem.BackProp.Neuron.HiddenProcessor.call(net, from_neuron_id, lapse, state)
{:noreply, state}
end
end
end
end
56 changes: 56 additions & 0 deletions lib/neuron_system/back_prop/utils/back_prop_runner.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
defmodule NeuronSystem.BackProp.Utils.BackPropRunner do
@moduledoc """
Contains logic to start Back Propagation learning algorithm.

From very superficial sight an algorithm steps are:

1. Detect output connection of a Net
2. Send to each output neuron a message with a needed value inside
3. Put a receive to wait for learning end
"""

alias NeuronSystem.{Models, Processes}

@doc """
Starts back propagation learning procedure for a specific Net using a some pair from a learning set.
"""
@spec call(NeuronSystem.Models.Net.t, map) :: :ok
def call(%Models.Net{} = net, valid_output) do
net
|> send_propagation(valid_output)
|> collect_results
end

defp send_propagation(net, valid_output) do
net
|> NeuronSystem.Net.out_connections
|> Enum.each(&send_event_to_source_neuron(&1, net, valid_output))
net
end

def send_event_to_source_neuron(connection, net, valid_output) do
neuron_id = connection.source_neuron
neuron_process_pid = NeuronSystem.Net.neuron_process_pid(net, neuron_id)
needed_output = valid_output[connection.key]
Processes.Neuron.back_prop(neuron_process_pid, {:output, net, needed_output})
end

defp collect_results(net) do
net
|> detect_input_neurons
|> Enum.map(&collect_result_for_connection(&1))
end

def detect_input_neurons(net) do
net
|> NeuronSystem.Net.in_connections
|> Enum.map(&(&1.target_neuron))
|> Enum.uniq
end

defp collect_result_for_connection(neuron_id) do
receive do
{:back_prop_completed, ^neuron_id} -> neuron_id
end
end
end
39 changes: 39 additions & 0 deletions lib/neuron_system/back_prop/utils/calculations.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
defmodule NeuronSystem.BackProp.Utils.Calculations do
@moduledoc """
In this module are encapsulated all calculations for Back Prop algorithm.
"""

@learning_speed 0.4

@doc """
Calculates a lapse for an output neuron.
"""
@spec out_lapse(float, float, float) :: float
def out_lapse(calculated_output, d_calculated_output, needed_output) do
(needed_output - calculated_output) * d_calculated_output
end

@doc """
Calculates a delta value for a weight of a connection.
"""
@spec delta_weight(float, float) :: float
def delta_weight(lapse, income) do
@learning_speed * lapse * income
end

@doc """
Calculates new weight of a connection by old one and delta value.
"""
@spec new_weight(float, float) :: float
def new_weight(weight, delta_w) do
weight + delta_w
end

@doc """
Calculates connection's lapse by output lapse and connection's weight.
"""
@spec connection_lapse(float, float) :: float
def connection_lapse(lapse, weight) do
lapse * weight
end
end
4 changes: 3 additions & 1 deletion lib/neuron_system/models/connection.ex
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@ defmodule NeuronSystem.Models.Connection do
its weight.
"""

defstruct [:source_neuron, :target_neuron, :weight]
defstruct [:id, :source_neuron, :target_neuron, :weight]

alias NeuronSystem.Models
alias NeuronSystem.Utils.CommonHelper

@type t :: %__MODULE__{}

Expand All @@ -18,6 +19,7 @@ defmodule NeuronSystem.Models.Connection do
@spec build(Models.Neuron.t, Models.Neuron.t, float) :: Models.Connection.t
def build(%Models.Neuron{id: source_id}, %Models.Neuron{id: target_id}, weight) do
%__MODULE__{
id: CommonHelper.gen_process_id("connection"),
source_neuron: source_id,
target_neuron: target_id,
weight: weight
Expand Down
4 changes: 3 additions & 1 deletion lib/neuron_system/models/in_connection.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ defmodule NeuronSystem.Models.InConnection do
Represents an input connection for a Net.
"""

defstruct [:target_neuron, :weight, :key]
defstruct [:id, :target_neuron, :weight, :key]

alias NeuronSystem.Models
alias NeuronSystem.Utils.CommonHelper

@type t :: %__MODULE__{}

Expand All @@ -15,6 +16,7 @@ defmodule NeuronSystem.Models.InConnection do
@spec build(Models.Neuron.t, float, atom) :: __MODULE__.t
def build(%Models.Neuron{id: neuron_id}, weight, key) do
%__MODULE__{
id: CommonHelper.gen_process_id("connection"),
target_neuron: neuron_id,
weight: weight,
key: key
Expand Down
2 changes: 1 addition & 1 deletion lib/neuron_system/models/net.ex
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ defmodule NeuronSystem.Models.Net do
Contains the only one value - the PID of the supervisor process of the Net.
"""

defstruct [:pid]
defstruct [:pid, :root_pid]
end
4 changes: 3 additions & 1 deletion lib/neuron_system/models/out_connection.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ defmodule NeuronSystem.Models.OutConnection do
Represents an output connection for a Net.
"""

defstruct [:source_neuron, :weight, :key]
defstruct [:id, :source_neuron, :weight, :key]

alias NeuronSystem.Models
alias NeuronSystem.Utils.CommonHelper

@type t :: %__MODULE__{}

Expand All @@ -15,6 +16,7 @@ defmodule NeuronSystem.Models.OutConnection do
@spec build(Models.Neuron.t, float, atom) :: __MODULE__.t
def build(%Models.Neuron{id: neuron_id}, weight, key) do
%__MODULE__{
id: CommonHelper.gen_process_id("connection"),
source_neuron: neuron_id,
weight: weight,
key: key
Expand Down
Loading