Update modeling_ltgbert.py
Browse files- modeling_ltgbert.py +2 -0
modeling_ltgbert.py
CHANGED
@@ -422,6 +422,7 @@ class LtgBertModel(LtgBertPreTrainedModel):
|
|
422 |
output_hidden_states: Optional[bool] = None,
|
423 |
output_attentions: Optional[bool] = None,
|
424 |
return_dict: Optional[bool] = None,
|
|
|
425 |
) -> Union[Tuple[torch.Tensor], BaseModelOutput]:
|
426 |
|
427 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
@@ -468,6 +469,7 @@ class LtgBertForMaskedLM(LtgBertModel):
|
|
468 |
output_attentions: Optional[bool] = None,
|
469 |
return_dict: Optional[bool] = None,
|
470 |
labels: Optional[torch.LongTensor] = None,
|
|
|
471 |
) -> Union[Tuple[torch.Tensor], MaskedLMOutput]:
|
472 |
r"""
|
473 |
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
|
|
|
422 |
output_hidden_states: Optional[bool] = None,
|
423 |
output_attentions: Optional[bool] = None,
|
424 |
return_dict: Optional[bool] = None,
|
425 |
+
token_type_ids=None
|
426 |
) -> Union[Tuple[torch.Tensor], BaseModelOutput]:
|
427 |
|
428 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
|
|
469 |
output_attentions: Optional[bool] = None,
|
470 |
return_dict: Optional[bool] = None,
|
471 |
labels: Optional[torch.LongTensor] = None,
|
472 |
+
token_type_ids=None
|
473 |
) -> Union[Tuple[torch.Tensor], MaskedLMOutput]:
|
474 |
r"""
|
475 |
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
|