Class: DNN::Layers::UnPool2D
Instance Attribute Summary collapse
Attributes inherited from Layer
#input_shape, #output_shape
Instance Method Summary
collapse
calc_conv2d_out_size, calc_conv2d_padding_size, calc_conv2d_transpose_out_size, calc_conv2d_transpose_padding_size, col2im, col2im_cpu, col2im_gpu, im2col, im2col_cpu, im2col_gpu, zero_padding, zero_padding_bwd
Methods included from LayerNode
#forward
Methods inherited from Layer
#<<, #built?, #call, call, #clean, #forward, from_hash
Constructor Details
#initialize(unpool_size) ⇒ UnPool2D
Returns a new instance of UnPool2D.
442
443
444
445
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 442
def initialize(unpool_size)
super()
@unpool_size = unpool_size.is_a?(Integer) ? [unpool_size, unpool_size] : unpool_size
end
|
Instance Attribute Details
#unpool_size ⇒ Object
Returns the value of attribute unpool_size.
439
440
441
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 439
def unpool_size
@unpool_size
end
|
Instance Method Details
#backward_node(dy) ⇒ Object
472
473
474
475
476
477
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 472
def backward_node(dy)
in_size = @input_shape[0..1]
col = im2col(dy, *in_size, *@unpool_size, @unpool_size)
col = col.reshape(dy.shape[0] * in_size.reduce(:*), @unpool_size.reduce(:*), dy.shape[3])
col.sum(1).reshape(dy.shape[0], *in_size, dy.shape[3])
end
|
#build(input_shape) ⇒ Object
447
448
449
450
451
452
453
454
455
456
457
458
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 447
def build(input_shape)
unless input_shape.length == 3
raise DNNShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
end
prev_h, prev_w = input_shape[0..1]
unpool_h, unpool_w = @unpool_size
out_h = prev_h * unpool_h
out_w = prev_w * unpool_w
@out_size = [out_h, out_w]
@num_channel = input_shape[2]
super
end
|
#compute_output_shape ⇒ Object
479
480
481
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 479
def compute_output_shape
[*@out_size, @num_channel]
end
|
#forward_node(x) ⇒ Object
460
461
462
463
464
465
466
467
468
469
470
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 460
def forward_node(x)
@x_shape = x.shape
unpool_h, unpool_w = @unpool_size
x2 = Xumo::SFloat.zeros(x.shape[0], x.shape[1], unpool_h, x.shape[2], unpool_w, @num_channel)
unpool_h.times do |i|
unpool_w.times do |j|
x2[true, true, i, true, j, true] = x
end
end
x2.reshape(x.shape[0], *@out_size, x.shape[3])
end
|
#load_hash(hash) ⇒ Object
487
488
489
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 487
def load_hash(hash)
initialize(hash[:unpool_size])
end
|
#to_hash ⇒ Object
483
484
485
|
# File 'lib/dnn/core/layers/cnn_layers.rb', line 483
def to_hash
super(unpool_size: @unpool_size)
end
|