Class: Transformers::Bert::BertEmbeddings
- Inherits:
-
Torch::NN::Module
- Object
- Torch::NN::Module
- Transformers::Bert::BertEmbeddings
- Defined in:
- lib/transformers/models/bert/modeling_bert.rb
Instance Method Summary collapse
- #forward(input_ids: nil, token_type_ids: nil, position_ids: nil, inputs_embeds: nil, past_key_values_length: 0) ⇒ Object
-
#initialize(config) ⇒ BertEmbeddings
constructor
A new instance of BertEmbeddings.
Constructor Details
#initialize(config) ⇒ BertEmbeddings
Returns a new instance of BertEmbeddings.
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
# File 'lib/transformers/models/bert/modeling_bert.rb', line 19 def initialize(config) super() @word_embeddings = Torch::NN::Embedding.new(config.vocab_size, config.hidden_size, padding_idx: config.pad_token_id) @position_embeddings = Torch::NN::Embedding.new(config., config.hidden_size) @token_type_embeddings = Torch::NN::Embedding.new(config.type_vocab_size, config.hidden_size) @LayerNorm = Torch::NN::LayerNorm.new(config.hidden_size, eps: config.layer_norm_eps) @dropout = Torch::NN::Dropout.new(p: config.hidden_dropout_prob) # position_ids (1, len position emb) is contiguous in memory and exported when serialized @position_embedding_type = config. || "absolute" register_buffer( "position_ids", Torch.arange(config.).([1, -1]), persistent: false ) register_buffer( "token_type_ids", Torch.zeros(position_ids.size, dtype: Torch.long), persistent: false ) end |
Instance Method Details
#forward(input_ids: nil, token_type_ids: nil, position_ids: nil, inputs_embeds: nil, past_key_values_length: 0) ⇒ Object
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
# File 'lib/transformers/models/bert/modeling_bert.rb', line 37 def forward( input_ids: nil, token_type_ids: nil, position_ids: nil, inputs_embeds: nil, past_key_values_length: 0 ) if !input_ids.nil? input_shape = input_ids.size else input_shape = .size[...-1] end seq_length = input_shape[1] if position_ids.nil? position_ids = @position_ids[0.., past_key_values_length...(seq_length + past_key_values_length)] end # Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs # when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves # issue #5664 if token_type_ids.nil? raise Todo end if .nil? = @word_embeddings.(input_ids) end = @token_type_embeddings.(token_type_ids) = + if @position_embedding_type == "absolute" = @position_embeddings.(position_ids) += end = @LayerNorm.() = @dropout.() end |