Fix XGLM cross attention (#16290)

This commit is contained in:
Suraj Patil 2022-03-21 13:07:28 +01:00 committed by GitHub
parent f393868073
commit 641e5f3f55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -399,7 +399,7 @@ class XGLMDecoderLayer(nn.Module):
self.activation_dropout = config.activation_dropout
if config.add_cross_attention:
self.crossattention = XGLMAttention(
self.encoder_attn = XGLMAttention(
embed_dim=self.embed_dim,
num_heads=config.attention_heads,
dropout=config.attention_dropout,