Class: Sabina::MultilayerPerceptron

Inherits:
Object
  • Object
show all
Defined in:
lib/sabina/multilayer_perceptron.rb

Direct Known Subclasses

AutoEncoder

Constant Summary collapse

LAMBDA =
0.001
MU =
0.5

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(options = {}) ⇒ MultilayerPerceptron

Returns a new instance of MultilayerPerceptron.



17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# File 'lib/sabina/multilayer_perceptron.rb', line 17

def initialize(options = {})
  load_config(options)
  check_layers
  @L = @layers.size - 1
  @K = @layers.last.size
  @delta_W_prev = []
  @delta_b_prev = []

  @layers[0].J = @layers[0].size
  (1..@L).each do |j|
    @layers[j].I = @layers[j-1].size
    @layers[j].J = @layers[j].size
    @layers[j].init_weight
    @delta_W_prev[j] = Marshal.load(Marshal.dump(@layers[j].W * 0.0))
    @delta_b_prev[j] = Marshal.load(Marshal.dump(@layers[j].b * 0.0))
  end

  @mini_batches =
    @training_data.shuffle
    .each_slice(@mini_batch_size).to_a
end

Class Method Details

.load_csv(file_name) ⇒ Object



7
8
9
10
11
12
13
14
15
# File 'lib/sabina/multilayer_perceptron.rb', line 7

def self.load_csv(file_name)
  table = CSV.table(file_name)
  k = table[:label].max + 1
  table.map do |data|
    x = data[0..-2]
    d = Array.new(k) { 0 }.tap { |ary| ary[data[:label]] = 1 }
    { :x => x, :d => d }
  end
end

Instance Method Details

#check_layersObject

Check if ‘@layers` is valid.



48
49
50
51
52
# File 'lib/sabina/multilayer_perceptron.rb', line 48

def check_layers
  if @layers.size < 3
    raise "The number of layers must be more than three."
  end
end

#error(test_data) ⇒ Object

Error function (a example of cross entropy)



55
56
57
58
59
60
61
62
63
# File 'lib/sabina/multilayer_perceptron.rb', line 55

def error(test_data)
  x = Matrix.columns( test_data.map { |data| data[:x] } )
  y = propagate_forward(x)
  test_data.each_with_index.inject(0.0) do |mn, (data, n)|
    mn + data[:d].each_with_index.inject(0.0) do |mk, (d, k)|
      mk - d * Math.log(y[k, n])
    end
  end
end

#learnObject

A learning step consists of a forward propagation, a backward propagation and updating the weights of this multi-layer perceptron.



68
69
70
71
72
73
74
75
76
77
# File 'lib/sabina/multilayer_perceptron.rb', line 68

def learn
  @mini_batches.each do |mini_batch|
    @X = Matrix.columns( mini_batch.map { |data| data[:x] } ) # (Dim, N)
    @D = Matrix.columns( mini_batch.map { |data| data[:d] } ) # (Dim, N)

    propagate_forward(@X)
    propagate_backward
    update
  end
end

#load_config(options = {}) ⇒ Object

Load the configuration.



40
41
42
43
44
45
# File 'lib/sabina/multilayer_perceptron.rb', line 40

def load_config(options = {})
  merged_options = Sabina.options.merge(options)
  Configuration::OPTIONS_KEYS.each do |key|
    send("#{key}=".to_sym, merged_options[key])
  end
end

#propagate_backwardObject

Training errors are propagated backwards.



100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
# File 'lib/sabina/multilayer_perceptron.rb', line 100

def propagate_backward
  @Delta = []

  # l = L
  @Delta[@L] = @Y - @D

  # l = (L-1)..1
  [*(1...@L)].reverse.each do |l|
    f_u = Matrix.columns( @U[l].t.to_a.map { |u| @layers[l].activate_(u) } )
    w_d = @layers[l+1].W.t * @Delta[l+1]
    @Delta[l] = @layers[l].J.times.map do |j|
      @N.times.map do |n|
        f_u[j, n] * w_d[j, n]
      end
    end.tap { |ary| break Matrix[*ary] }
  end
end

#propagate_forward(x_mat) ⇒ Object

Input values are propagated forward.



80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# File 'lib/sabina/multilayer_perceptron.rb', line 80

def propagate_forward(x_mat)
  @X = x_mat
  @N = @X.column_size
  @Z, @U = [], []

  # l = 0
  @Z[0] = @X

  # l = 1..L
  ones = Matrix[Array.new(@N) { 1.0 }]
  (1..@L).each do |l|
    @U[l] = @layers[l].W*@Z[l-1] + @layers[l].b*ones
    @Z[l] = Matrix.columns( @U[l].t.to_a.map { |u| @layers[l].activate(u) } )
  end

  # Oputput (K, N)
  @Y = @Z[@L]
end

#updateObject

Update the weights of this multi-layer perceptron.



119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# File 'lib/sabina/multilayer_perceptron.rb', line 119

def update
  ones = Matrix.columns( [Array.new(@N) { 1.0 }] )
  (1..@L).each do |l|
    delta_W = ( MU*@delta_W_prev[l] ) -
      @learning_rate*( (@Delta[l] * @Z[l-1].t / @N) + LAMBDA*@layers[l].W )
    delta_b = ( MU*@delta_b_prev[l] ) -
      @learning_rate*( @Delta[l] * ones )

    @delta_W_prev[l] = Marshal.load(Marshal.dump(delta_W))
    @delta_b_prev[l] = Marshal.load(Marshal.dump(delta_b))

    @layers[l].W = @layers[l].W + delta_W
    @layers[l].b = @layers[l].b + delta_b
  end
end