diff --git a/src/transformers/configuration_reformer.py b/src/transformers/configuration_reformer.py index 8d364680664..7532684fddb 100755 --- a/src/transformers/configuration_reformer.py +++ b/src/transformers/configuration_reformer.py @@ -115,6 +115,8 @@ class ReformerConfig(PretrainedConfig): vocab_size (:obj:`int`, optional, defaults to 320): Vocabulary size of the Reformer model. Defines the different tokens that can be represented by the `inputs_ids` passed to the forward method of :class:`~transformers.ReformerModel`. + tie_word_embeddings (:obj:`bool`, `optional`, defaults to :obj:`False`): + Whether to tie input and output embeddings. Example::