Class: Torch::NN::SelfOrganizedMap

Inherits:
Module
  • Object
show all
Defined in:
lib/torch/som.rb,
lib/torch/som/version.rb,
lib/torch/som/descent/base.rb,
lib/torch/som/descent/monotonic.rb

Defined Under Namespace

Modules: Descent

Constant Summary collapse

VERSION =
"0.1.0"

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(x, y, dim:, alpha: nil, sigma: nil, iterations: nil) ⇒ SelfOrganizedMap

Returns a new instance of SelfOrganizedMap.

Raises:

  • (ArgumentError)


11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# File 'lib/torch/som.rb', line 11

def initialize(
  x, y, 
  dim:,
  alpha: nil, sigma: nil,
  iterations: nil
)
  super()
  
  raise ArgumentError, "X must be a positive integer" unless x.is_a?(Integer) && x > 0
  @x = x

  raise ArgumentError, "Y must be a positive integer" unless y.is_a?(Integer) && y > 0
  @y = y

  raise ArgumentError, "Dimension must be a positive integer" unless dim.is_a?(Integer) && dim > 0
  @dim = dim

  unless alpha.nil? or alpha.is_a?(Descent::Base)
    raise ArgumentError, "alpha(t) must be a Descent::Base subclass"
  end
  
  unless sigma.nil? or sigma.is_a?(Descent::Base)
    raise ArgumentError, "sigma(t) must be a Descent::Base subclass"
  end
  
  if iterations.nil? && (alpha.nil? or sigma.nil?)
    raise ArgumentError, "Iterations number must be provided if no alpha(t) or sigma(t) given"
  end

  if iterations && (alpha or sigma)
    raise ArgumentError, "Steps must not be provided if alpha(t) or sigma(t) given"
  end

  if alpha&.iterations && sigma&.iterations && alpha.iterations != sigma.iterations
    raise ArgumentError, "alpha(t) and sigma(t) are designed for different iterations count"
  end

  @steps = alpha&.iterations || sigma&.iterations || iterations

  @alpha_t = alpha || Descent::Monotonic.new(initial: 0.25, iterations: @steps)
  @sigma_t = sigma || Descent::Monotonic.new(initial: [@x, @y].max / 2.0, iterations: @steps)

  @weights = Torch.rand(@x * @y, @dim)
  @meter = Torch::NN::PairwiseDistance.new

  @node_coordinates = Torch.tensor(@x.times.to_a.product(@y.times.to_a), dtype: :long)
end

Instance Attribute Details

#node_coordinatesObject (readonly)

Returns the value of attribute node_coordinates.



9
10
11
# File 'lib/torch/som.rb', line 9

def node_coordinates
  @node_coordinates
end

#weightsObject (readonly)

Returns the value of attribute weights.



9
10
11
# File 'lib/torch/som.rb', line 9

def weights
  @weights
end

Instance Method Details

#fit(vectors, progress: [:itself]) ⇒ Object



85
86
87
88
89
90
91
92
93
# File 'lib/torch/som.rb', line 85

def fit(vectors, progress: [:itself])
  @steps.times.public_send(*progress).each do |i|
    vectors.each do |x|
      forward x, i
    end
  end
  
  locations_for vectors
end

#forward(x, step_number = nil) ⇒ Object



59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# File 'lib/torch/som.rb', line 59

def forward(x, step_number = nil)
  x = tensor!(x)
  input = Torch.stack(Array.new(@x * @y) { x })
  
  bmu_1d_index = @meter.(input, @weights).argmin(dim: 0).item
  bmu_2d_index = Torch.tensor([bmu_1d_index.div(@y), bmu_1d_index % @y])
  
  alpha = @alpha_t.(step_number)
  sigma = @sigma_t.(step_number)

  dists = Torch.stack(Array.new(@x * @y) { bmu_2d_index }) - @node_coordinates
  sq_dists = (dists * dists).sum(dim: 1)
  
  h = (sq_dists / 2.0 / sigma / sigma).neg.exp * alpha

  delta = Torch.einsum 'ij,i->ij', [input - @weights, h]
  @weights += delta
end

#locations_for(vectors) ⇒ Object



78
79
80
81
82
83
# File 'lib/torch/som.rb', line 78

def locations_for(vectors)
  vectors.map do |x|
    bmu_1d_index = @meter.(tensor!(x), @weights).argmin(dim: 0).item
    [bmu_1d_index.div(@y), bmu_1d_index % @y]
  end
end

#weights_2dObject



95
96
97
# File 'lib/torch/som.rb', line 95

def weights_2d
  @weights.view(@x, @y, @dim)
end