Class: DNN::Layers::Dropout
Instance Attribute Summary collapse
Attributes inherited from Layer
#input_shape
Instance Method Summary
collapse
Methods included from LayerNode
#backward, #forward
Methods inherited from Layer
#build, #built?, #call, call, #clean, #forward, from_hash, #output_shape
Constructor Details
#initialize(dropout_ratio = 0.5, seed: rand(1 << 31), use_scale: true) ⇒ Dropout
440
441
442
443
444
445
446
447
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 440
def initialize(dropout_ratio = 0.5, seed: rand(1 << 31), use_scale: true)
super()
@dropout_ratio = dropout_ratio
@seed = seed
@use_scale = use_scale
@mask = nil
@rnd = Random.new(@seed)
end
|
Instance Attribute Details
#dropout_ratio ⇒ Object
Returns the value of attribute dropout_ratio.
434
435
436
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 434
def dropout_ratio
@dropout_ratio
end
|
#use_scale ⇒ Object
Returns the value of attribute use_scale.
435
436
437
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 435
def use_scale
@use_scale
end
|
Instance Method Details
#backward_node(dy) ⇒ Object
460
461
462
463
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 460
def backward_node(dy)
dy[@mask] = 0
dy
end
|
#forward_node(x) ⇒ Object
449
450
451
452
453
454
455
456
457
458
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 449
def forward_node(x)
if DNN.learning_phase
Xumo::SFloat.srand(@rnd.rand(1 << 31))
@mask = Xumo::SFloat.new(*x.shape).rand < @dropout_ratio
x[@mask] = 0
elsif @use_scale
x *= (1 - @dropout_ratio)
end
x
end
|
#load_hash(hash) ⇒ Object
469
470
471
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 469
def load_hash(hash)
initialize(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale])
end
|
#to_hash ⇒ Object
465
466
467
|
# File 'lib/dnn/core/layers/basic_layers.rb', line 465
def to_hash
super(dropout_ratio: @dropout_ratio, seed: @seed, use_scale: @use_scale)
end
|