mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 19:21:31 +06:00
Fix XGLM cross attention (#16290)
This commit is contained in:
parent
f393868073
commit
641e5f3f55
@ -399,7 +399,7 @@ class XGLMDecoderLayer(nn.Module):
|
||||
self.activation_dropout = config.activation_dropout
|
||||
|
||||
if config.add_cross_attention:
|
||||
self.crossattention = XGLMAttention(
|
||||
self.encoder_attn = XGLMAttention(
|
||||
embed_dim=self.embed_dim,
|
||||
num_heads=config.attention_heads,
|
||||
dropout=config.attention_dropout,
|
||||
|
Loading…
Reference in New Issue
Block a user