From acfaad74abc891a35d3d3435fb9bb609fb6880c6 Mon Sep 17 00:00:00 2001 From: Stas Bekman Date: Mon, 7 Sep 2020 02:36:16 -0700 Subject: [PATCH] [docstring] missing arg (#6933) * [docstring] missing arg add the missing `tie_word_embeddings` entry * cleanup * Update src/transformers/configuration_reformer.py Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com> Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com> --- src/transformers/configuration_reformer.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/transformers/configuration_reformer.py b/src/transformers/configuration_reformer.py index 8d364680664..7532684fddb 100755 --- a/src/transformers/configuration_reformer.py +++ b/src/transformers/configuration_reformer.py @@ -115,6 +115,8 @@ class ReformerConfig(PretrainedConfig): vocab_size (:obj:`int`, optional, defaults to 320): Vocabulary size of the Reformer model. Defines the different tokens that can be represented by the `inputs_ids` passed to the forward method of :class:`~transformers.ReformerModel`. + tie_word_embeddings (:obj:`bool`, `optional`, defaults to :obj:`False`): + Whether to tie input and output embeddings. Example::