mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-30 09:42:22 +06:00
make p_mask
a numpy array before passing to select_starts_ends
(#32076)
* fix * bug fix * refine * fix
This commit is contained in:
parent
535fe78b9f
commit
6494479f1d
@ -378,7 +378,7 @@ class DocumentQuestionAnsweringPipeline(ChunkPipeline):
|
||||
# p_mask: mask with 1 for token than cannot be in the answer (0 for token which can be in an answer)
|
||||
# We put 0 on the tokens from the context and 1 everywhere else (question and special tokens)
|
||||
# This logic mirrors the logic in the question_answering pipeline
|
||||
p_mask = [[tok != 1 for tok in encoding.sequence_ids(span_id)] for span_id in range(num_spans)]
|
||||
p_mask = np.array([[tok != 1 for tok in encoding.sequence_ids(span_id)] for span_id in range(num_spans)])
|
||||
for span_idx in range(num_spans):
|
||||
if self.framework == "pt":
|
||||
span_encoding = {k: torch.tensor(v[span_idx : span_idx + 1]) for (k, v) in encoding.items()}
|
||||
|
Loading…
Reference in New Issue
Block a user