Class: DNN::Losses::SoftmaxCrossEntropy

Inherits:
Loss
  • Object
show all
Includes:
DNN::Layers::MergeLayerNode
Defined in:
lib/dnn/core/losses.rb

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods included from DNN::Layers::MergeLayerNode

#backward, #forward

Methods inherited from Loss

call, #call, #clean, #forward, from_hash, #loss, #regularizers_backward, #regularizers_forward

Constructor Details

#initialize(eps: 1e-7) ⇒ SoftmaxCrossEntropy

Returns a new instance of SoftmaxCrossEntropy.

Parameters:

  • eps (Float) (defaults to: 1e-7)

    Value to avoid nan.



151
152
153
# File 'lib/dnn/core/losses.rb', line 151

def initialize(eps: 1e-7)
  @eps = eps
end

Instance Attribute Details

#epsObject

Returns the value of attribute eps.



140
141
142
# File 'lib/dnn/core/losses.rb', line 140

def eps
  @eps
end

Class Method Details

.softmax(y) ⇒ Object Also known as: activation



143
144
145
# File 'lib/dnn/core/losses.rb', line 143

def softmax(y)
  Xumo::NMath.exp(y) / Xumo::NMath.exp(y).sum(1, keepdims: true)
end

Instance Method Details

#backward_node(d) ⇒ Object



161
162
163
# File 'lib/dnn/core/losses.rb', line 161

def backward_node(d)
  d * (@x - @t) / @x.shape[0]
end

#forward_node(y, t) ⇒ Object



155
156
157
158
159
# File 'lib/dnn/core/losses.rb', line 155

def forward_node(y, t)
  @t = t
  @x = SoftmaxCrossEntropy.softmax(y)
  -(t * Xumo::NMath.log(@x + @eps)).mean(0).sum
end

#load_hash(hash) ⇒ Object



169
170
171
# File 'lib/dnn/core/losses.rb', line 169

def load_hash(hash)
  initialize(eps: hash[:eps])
end

#to_hashObject



165
166
167
# File 'lib/dnn/core/losses.rb', line 165

def to_hash
  super(eps: @eps)
end