mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 18:22:34 +06:00
[docstring] Fix docstring for LukeConfig (#26858)
* Deleted LukeConfig and ran check_docstrings.py * Filled docstring information --------- Co-authored-by: louie <louisparizeau@Chicken.local>
This commit is contained in:
parent
db611aabee
commit
51042ae8e5
@ -38,7 +38,7 @@ class LukeConfig(PretrainedConfig):
|
||||
|
||||
|
||||
Args:
|
||||
vocab_size (`int`, *optional*, defaults to 30522):
|
||||
vocab_size (`int`, *optional*, defaults to 50267):
|
||||
Vocabulary size of the LUKE model. Defines the number of different tokens that can be represented by the
|
||||
`inputs_ids` passed when calling [`LukeModel`].
|
||||
entity_vocab_size (`int`, *optional*, defaults to 500000):
|
||||
@ -70,12 +70,18 @@ class LukeConfig(PretrainedConfig):
|
||||
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
|
||||
layer_norm_eps (`float`, *optional*, defaults to 1e-12):
|
||||
The epsilon used by the layer normalization layers.
|
||||
use_entity_aware_attention (`bool`, defaults to `True`):
|
||||
use_entity_aware_attention (`bool`, *optional*, defaults to `True`):
|
||||
Whether or not the model should use the entity-aware self-attention mechanism proposed in [LUKE: Deep
|
||||
Contextualized Entity Representations with Entity-aware Self-attention (Yamada et
|
||||
al.)](https://arxiv.org/abs/2010.01057).
|
||||
classifier_dropout (`float`, *optional*):
|
||||
The dropout ratio for the classification head.
|
||||
pad_token_id (`int`, *optional*, defaults to 1):
|
||||
Padding token id.
|
||||
bos_token_id (`int`, *optional*, defaults to 0):
|
||||
Beginning of stream token id.
|
||||
eos_token_id (`int`, *optional*, defaults to 2):
|
||||
End of stream token id.
|
||||
|
||||
Examples:
|
||||
|
||||
|
@ -356,7 +356,6 @@ OBJECTS_TO_IGNORE = [
|
||||
"LongformerConfig",
|
||||
"LongformerModel",
|
||||
"LongformerTokenizerFast",
|
||||
"LukeConfig",
|
||||
"LukeModel",
|
||||
"LukeTokenizer",
|
||||
"LxmertTokenizerFast",
|
||||
|
Loading…
Reference in New Issue
Block a user