Fixed the default number of attention heads in Reformer Configuration (#6973)

This commit is contained in:
tznurmin 2020-09-07 10:12:22 +00:00 committed by GitHub
parent e20d8895bd
commit f7e80721eb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -160,7 +160,7 @@ class ReformerConfig(PretrainedConfig):
lsh_num_chunks_before=1,
lsh_num_chunks_after=0,
max_position_embeddings=4096,
num_attention_heads=2,
num_attention_heads=12,
num_buckets=None,
num_hashes=1,
pad_token_id=0,