Class: Transformers::Distilbert::Transformer

Inherits:
Torch::NN::Module
  • Object
show all
Defined in:
lib/transformers/models/distilbert/modeling_distilbert.rb

Instance Method Summary collapse

Constructor Details

#initialize(config) ⇒ Transformer

Returns a new instance of Transformer.



238
239
240
241
242
243
# File 'lib/transformers/models/distilbert/modeling_distilbert.rb', line 238

def initialize(config)
  super()
  @n_layers = config.n_layers
  @layer = Torch::NN::ModuleList.new(config.n_layers.times.map { TransformerBlock.new(config) })
  @gradient_checkpointing = false
end

Instance Method Details

#forward(x:, attn_mask: nil, head_mask: nil, output_attentions: false, output_hidden_states: false, return_dict: nil) ⇒ Object



245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
# File 'lib/transformers/models/distilbert/modeling_distilbert.rb', line 245

def forward(
  x:,
  attn_mask: nil,
  head_mask: nil,
  output_attentions: false,
  output_hidden_states: false,
  return_dict: nil
)
  all_hidden_states = output_hidden_states ? [] : nil
  all_attentions = output_attentions ? [] : nil

  hidden_state = x
  @layer.each_with_index do |layer_module, i|
    if output_hidden_states
      all_hidden_states = all_hidden_states + [hidden_state]
    end

    if @gradient_checkpointing && training
      layer_outputs =
        _gradient_checkpointing_func(
          layer_module.__call__,
          hidden_state,
          attn_mask,
          head_mask[i],
          output_attentions,
        )
    else
      layer_outputs =
        layer_module.(
          x: hidden_state,
          attn_mask: attn_mask,
          head_mask: head_mask[i],
          output_attentions: output_attentions
        )
    end

    hidden_state = layer_outputs[-1]

    if output_attentions
      if layer_outputs.length != 2
        raise ArgumentError, "The length of the layer_outputs should be 2, but it is #{layer_outputs.length}"
      end

      attentions = layer_outputs[0]
      all_attentions = all_attentions + [attentions]
    else
      if layer_outputs.length != 1
        raise ArgumentError, "The length of the layer_outputs should be 1, but it is #{layer_outputs.length}"
      end
    end
  end

  # Add last layer
  if output_hidden_states
    all_hidden_states = all_hidden_states + [hidden_state]
  end

  if !return_dict
    raise Todo
  end
  BaseModelOutput.new(
    last_hidden_state: hidden_state, hidden_states: all_hidden_states, attentions: all_attentions
  )
end