Class: Torch::NN::LSTM
Instance Method Summary collapse
- #check_forward_args(input, hidden, batch_sizes) ⇒ Object
- #forward(input, hx: nil) ⇒ Object
- #forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices) ⇒ Object
- #forward_tensor(input, hx: nil) ⇒ Object
-
#initialize(*args, **options) ⇒ LSTM
constructor
A new instance of LSTM.
- #permute_hidden(hx, permutation) ⇒ Object
Methods inherited from RNNBase
#_apply, #extra_inspect, #flatten_parameters, #reset_parameters
Methods inherited from Module
#_apply, #add_module, #apply, #buffers, #call, #children, #cpu, #cuda, #double, #eval, #float, #half, #inspect, #load_state_dict, #method_missing, #modules, #named_buffers, #named_children, #named_modules, #named_parameters, #parameters, #register_buffer, #register_parameter, #requires_grad!, #respond_to?, #share_memory, #state_dict, #to, #train, #type, #zero_grad
Methods included from Utils
#_ntuple, #_pair, #_quadrupal, #_single, #_triple
Constructor Details
#initialize(*args, **options) ⇒ LSTM
Returns a new instance of LSTM.
4 5 6 |
# File 'lib/torch/nn/lstm.rb', line 4 def initialize(*args, **) super("LSTM", *args, **) end |
Dynamic Method Handling
This class handles dynamic methods through the method_missing method in the class Torch::NN::Module
Instance Method Details
#check_forward_args(input, hidden, batch_sizes) ⇒ Object
8 9 10 11 12 13 14 15 |
# File 'lib/torch/nn/lstm.rb', line 8 def check_forward_args(input, hidden, batch_sizes) check_input(input, batch_sizes) expected_hidden_size = get_expected_hidden_size(input, batch_sizes) # TODO pass message check_hidden_size(hidden[0], expected_hidden_size) check_hidden_size(hidden[1], expected_hidden_size) end |
#forward(input, hx: nil) ⇒ Object
60 61 62 63 |
# File 'lib/torch/nn/lstm.rb', line 60 def forward(input, hx: nil) # TODO PackedSequence forward_tensor(input, hx: hx) end |
#forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices) ⇒ Object
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
# File 'lib/torch/nn/lstm.rb', line 24 def forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices) if hx.nil? num_directions = @bidirectional ? 2 : 1 zeros = Torch.zeros(@num_layers * num_directions, max_batch_size, @hidden_size, dtype: input.dtype, device: input.device) hx = [zeros, zeros] else # Each batch of the hidden state should match the input sequence that # the user believes he/she is passing in. hx = permute_hidden(hx, sorted_indices) end check_forward_args(input, hx, batch_sizes) if batch_sizes.nil? result = Torch.lstm(input, hx, _get_flat_weights, @bias, @num_layers, @dropout, @training, @bidirectional, @batch_first) else result = Torch.lstm(input, batch_sizes, hx, _get_flat_weights, @bias, @num_layers, @dropout, @training, @bidirectional) end output = result[0] hidden = result[1..-1] [output, hidden] end |
#forward_tensor(input, hx: nil) ⇒ Object
49 50 51 52 53 54 55 56 57 58 |
# File 'lib/torch/nn/lstm.rb', line 49 def forward_tensor(input, hx: nil) batch_sizes = nil max_batch_size = @batch_first ? input.size(0) : input.size(1) sorted_indices = nil unsorted_indices = nil output, hidden = forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices) [output, permute_hidden(hidden, unsorted_indices)] end |
#permute_hidden(hx, permutation) ⇒ Object
17 18 19 20 21 22 |
# File 'lib/torch/nn/lstm.rb', line 17 def permute_hidden(hx, permutation) if permutation.nil? return hx end raise NotImplementedYet end |