mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix doc example: mask_time_indices (numpy) has no attribute 'to' (#15033)
* fix doc example - AttributeError: 'numpy.ndarray' object has no attribute 'to' * fix more * Apply suggestions from code review * Update src/transformers/models/unispeech/modeling_unispeech.py Co-authored-by: ydshieh <ydshieh@users.noreply.github.com> Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>
This commit is contained in:
parent
927f654427
commit
e34dd055e9
@ -1290,6 +1290,7 @@ class UniSpeechForPreTraining(UniSpeechPreTrainedModel):
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
>>> mask_time_indices = torch.tensor(mask_time_indices, device=input_values.device, dtype=torch.long)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
@ -1322,6 +1322,7 @@ class UniSpeechSatForPreTraining(UniSpeechSatPreTrainedModel):
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
>>> mask_time_indices = torch.tensor(mask_time_indices, device=input_values.device, dtype=torch.long)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
@ -1460,6 +1460,7 @@ class Wav2Vec2ForPreTraining(Wav2Vec2PreTrainedModel):
|
||||
>>> batch_size, raw_sequence_length = input_values.shape
|
||||
>>> sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
|
||||
>>> mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.2, mask_length=2)
|
||||
>>> mask_time_indices = torch.tensor(mask_time_indices, device=input_values.device, dtype=torch.long)
|
||||
|
||||
>>> with torch.no_grad():
|
||||
... outputs = model(input_values, mask_time_indices=mask_time_indices)
|
||||
|
Loading…
Reference in New Issue
Block a user