mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix doc examples: unexpected keyword argument (#14689)
* Fix doc examples: unexpected keyword argument * Don't delete token_type_ids from inputs Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
5b00400198
commit
ae82ee6a48
@ -1262,7 +1262,7 @@ class UniSpeechForPreTraining(UniSpeechPreTrainedModel):
|
||||
>>> # compute masked indices
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2, device=model.device)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
@ -1260,7 +1260,7 @@ class UniSpeechSatForPreTraining(UniSpeechSatPreTrainedModel):
|
||||
>>> # compute masked indices
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2, device=model.device)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
@ -1372,7 +1372,7 @@ class Wav2Vec2ForPreTraining(Wav2Vec2PreTrainedModel):
|
||||
>>> # compute masked indices
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2, device=model.device)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
Loading…
Reference in New Issue
Block a user