mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Use attention_mask everywhere.
This commit is contained in:
parent
348e19aa21
commit
fe0f552e00
@ -154,9 +154,6 @@ class QuestionAnsweringPipeline(Pipeline):
|
||||
return_attention_masks=True, return_input_lengths=False
|
||||
)
|
||||
|
||||
# TODO : Harmonize model arguments across all model
|
||||
inputs['attention_mask'] = inputs.pop('encoder_attention_mask')
|
||||
|
||||
if is_tf_available():
|
||||
# TODO trace model
|
||||
start, end = self.model(inputs)
|
||||
|
Loading…
Reference in New Issue
Block a user