mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
decoder_config
used before intialisation (#7903)
Seeing error when sending `decoder_config` as a parameter while initializing a encoder-decoder model from pretrained. fixed "UnboundLocalError: local variable 'decoder_config' referenced before assignment"
This commit is contained in:
parent
033f29c625
commit
df1ddcedf2
@ -327,7 +327,7 @@ class EncoderDecoderModel(PreTrainedModel):
|
||||
|
||||
kwargs_decoder["config"] = decoder_config
|
||||
|
||||
if kwargs_decoder["config"].is_decoder is False or decoder_config.add_cross_attention is False:
|
||||
if kwargs_decoder["config"].is_decoder is False or kwargs_decoder["config"].add_cross_attention is False:
|
||||
logger.warning(
|
||||
f"Decoder model {decoder_pretrained_model_name_or_path} is not initialized as a decoder. In order to initialize {decoder_pretrained_model_name_or_path} as a decoder, make sure that the attributes `is_decoder` and `add_cross_attention` of `decoder_config` passed to `.from_encoder_decoder_pretrained(...)` are set to `True` or do not pass a `decoder_config` to `.from_encoder_decoder_pretrained(...)`"
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user