Module: Transformers::DebertaV2
- Defined in:
- lib/transformers/models/deberta_v2/modeling_deberta_v2.rb,
lib/transformers/models/deberta_v2/configuration_deberta_v2.rb,
lib/transformers/models/deberta_v2/tokenization_deberta_v2_fast.rb
Defined Under Namespace
Classes: ContextPooler, ConvLayer, DebertaV2Attention, DebertaV2Config, DebertaV2Embeddings, DebertaV2Encoder, DebertaV2ForMaskedLM, DebertaV2ForMultipleChoice, DebertaV2ForQuestionAnswering, DebertaV2ForSequenceClassification, DebertaV2ForTokenClassification, DebertaV2Intermediate, DebertaV2LMPredictionHead, DebertaV2Layer, DebertaV2Model, DebertaV2OnlyMLMHead, DebertaV2Output, DebertaV2PreTrainedModel, DebertaV2PredictionHeadTransform, DebertaV2SelfOutput, DebertaV2TokenizerFast, DisentangledSelfAttention, DropoutContext, StableDropout, XDropout, XSoftmax
Class Method Summary
collapse
-
.build_relative_position(query_size, key_size, bucket_size: -1,, max_position: -1,, device: nil) ⇒ Object
-
.c2p_dynamic_expand(c2p_pos, query_layer, relative_pos) ⇒ Object
-
.make_log_bucket_position(relative_pos, bucket_size, max_position) ⇒ Object
-
.p2c_dynamic_expand(c2p_pos, query_layer, key_layer) ⇒ Object
-
.pos_dynamic_expand(pos_index, p2c_att, key_layer) ⇒ Object
Instance Method Summary
collapse
Class Method Details
.build_relative_position(query_size, key_size, bucket_size: -1,, max_position: -1,, device: nil) ⇒ Object
436
437
438
439
440
441
442
443
444
445
446
447
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 436
def self.build_relative_position(query_size, key_size, bucket_size: -1, max_position: -1, device: nil)
q_ids = Torch.arange(0, query_size, device: device)
k_ids = Torch.arange(0, key_size, device: device)
rel_pos_ids = q_ids[0.., nil] - k_ids[nil, 0..]
if bucket_size > 0 && max_position > 0
rel_pos_ids = make_log_bucket_position(rel_pos_ids, bucket_size, max_position)
end
rel_pos_ids = rel_pos_ids.to(Torch.long)
rel_pos_ids = rel_pos_ids[...query_size, 0..]
rel_pos_ids = rel_pos_ids.unsqueeze(0)
rel_pos_ids
end
|
.c2p_dynamic_expand(c2p_pos, query_layer, relative_pos) ⇒ Object
449
450
451
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 449
def self.c2p_dynamic_expand(c2p_pos, query_layer, relative_pos)
c2p_pos.expand([query_layer.size(0), query_layer.size(1), query_layer.size(2), relative_pos.size(-1)])
end
|
.make_log_bucket_position(relative_pos, bucket_size, max_position) ⇒ Object
427
428
429
430
431
432
433
434
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 427
def self.make_log_bucket_position(relative_pos, bucket_size, max_position)
sign = Torch.sign(relative_pos)
mid = bucket_size / 2
abs_pos = Torch.where(relative_pos.lt(mid) & relative_pos.gt(-mid), Torch.tensor(mid - 1).type_as(relative_pos), Torch.abs(relative_pos))
log_pos = Torch.ceil((Torch.log(abs_pos / mid) / Torch.log(Torch.tensor((max_position - 1) / mid))) * (mid - 1)) + mid
bucket_pos = Torch.where(abs_pos.le(mid), relative_pos.type_as(log_pos), log_pos * sign)
bucket_pos
end
|
.p2c_dynamic_expand(c2p_pos, query_layer, key_layer) ⇒ Object
453
454
455
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 453
def self.p2c_dynamic_expand(c2p_pos, query_layer, key_layer)
c2p_pos.expand([query_layer.size(0), query_layer.size(1), key_layer.size(-2), key_layer.size(-2)])
end
|
.pos_dynamic_expand(pos_index, p2c_att, key_layer) ⇒ Object
457
458
459
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 457
def self.pos_dynamic_expand(pos_index, p2c_att, key_layer)
pos_index.expand(p2c_att.size[...2] + [pos_index.size(-2), key_layer.size(-2)])
end
|
Instance Method Details
#get_mask(input, local_context) ⇒ Object
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
|
# File 'lib/transformers/models/deberta_v2/modeling_deberta_v2.rb', line 65
def get_mask(input, local_context)
if !local_context.is_a?(DropoutContext)
dropout = local_context
mask = nil
else
dropout = local_context.dropout
dropout *= local_context.scale
mask = local_context.reuse_mask ? local_context.mask : nil
end
if dropout > 0 && mask.nil?
mask = (1 - Torch.empty_like(input).bernoulli!(1 - dropout)).to(Torch.bool)
end
if local_context.is_a?(DropoutContext)
if local_context.mask.nil?
@mask = mask
end
end
[mask, dropout]
end
|