Class: Sabina::SparseAutoEncoder

Inherits:
AutoEncoder show all
Defined in:
lib/sabina/sparse_auto_encoder.rb

Constant Summary collapse

BETA =
0.1

Constants inherited from MultilayerPerceptron

MultilayerPerceptron::LAMBDA, MultilayerPerceptron::MU

Instance Method Summary collapse

Methods inherited from AutoEncoder

#check_layers, #error, load_csv, #next_input_data, #update

Methods inherited from MultilayerPerceptron

#check_layers, #error, #learn, #load_config, load_csv, #propagate_forward, #update

Constructor Details

#initialize(options = {}) ⇒ SparseAutoEncoder

Returns a new instance of SparseAutoEncoder.



5
6
7
8
9
10
# File 'lib/sabina/sparse_auto_encoder.rb', line 5

def initialize(options = {})
  super

  @RHO_const = Matrix[Array.new(@layers[1].J) { 0.05 }]
  @RHO_prev = Matrix[Array.new(@layers[1].J) { 1.0 }]
end

Instance Method Details

#calc_rhoObject

Calculate average activities



41
42
43
44
45
46
47
# File 'lib/sabina/sparse_auto_encoder.rb', line 41

def calc_rho
  @RHO = @Z[1].to_a.map do |z_ary|
    z_ary.inject(0.0, :+)
  end.tap { |ary| break Matrix[ary] / @N }

  @RHO = 0.9*@RHO_prev + 0.1*@RHO
end

#propagate_backwardObject

Errors are propagated backwards.



13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# File 'lib/sabina/sparse_auto_encoder.rb', line 13

def propagate_backward
  calc_rho
  @Delta = []

  # l = L
  @Delta[@L] = @Y - @D

  # l = 1
  l = 1
  f_u = Matrix.columns( @U[l].t.to_a.map { |u| @layers[l].activate_(u) } )
  w_d = @layers[l+1].W.t * @Delta[l+1]
  sps = @layers[l].J.times.map do |j|
    ((1.0 - @RHO_const[0, j]) / (1.0 - @RHO[0, j])) -
    (@RHO_const[0, j] / @RHO[0, j])
  end.tap do |ary|
    ary = ary.map { |v| v > 1e10 ? 1e10 : v }
    break Matrix.columns(Array.new(@N) { ary })
  end
  w_d_s = w_d + BETA*sps

  @Delta[l] = @layers[l].J.times.map do |j|
    @N.times.map do |n|
      f_u[j, n] * w_d_s[j, n]
    end
  end.tap { |ary| break Matrix[*ary] }
end