mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fixed the default number of attention heads in Reformer Configuration (#6973)
This commit is contained in:
parent
e20d8895bd
commit
f7e80721eb
@ -160,7 +160,7 @@ class ReformerConfig(PretrainedConfig):
|
||||
lsh_num_chunks_before=1,
|
||||
lsh_num_chunks_after=0,
|
||||
max_position_embeddings=4096,
|
||||
num_attention_heads=2,
|
||||
num_attention_heads=12,
|
||||
num_buckets=None,
|
||||
num_hashes=1,
|
||||
pad_token_id=0,
|
||||
|
Loading…
Reference in New Issue
Block a user