From 14e9d2954c3a7256a49a3e581ae25364c76f521e Mon Sep 17 00:00:00 2001 From: sararb Date: Mon, 16 Aug 2021 12:36:08 -0400 Subject: [PATCH] compute seq_len from inputs_embeds (#13128) --- src/transformers/models/electra/modeling_electra.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/models/electra/modeling_electra.py b/src/transformers/models/electra/modeling_electra.py index c4366f56848..4a5b03abdd1 100644 --- a/src/transformers/models/electra/modeling_electra.py +++ b/src/transformers/models/electra/modeling_electra.py @@ -854,12 +854,12 @@ class ElectraModel(ElectraPreTrainedModel): raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() - batch_size, seq_length = input_shape elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") + batch_size, seq_length = input_shape device = input_ids.device if input_ids is not None else inputs_embeds.device if attention_mask is None: