mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 11:11:05 +06:00
parent
3c0ce60855
commit
a36983653e
@ -65,7 +65,7 @@ class GPT2Config(PretrainedConfig):
|
||||
Activation function, to be selected in the list `["relu", "silu", "gelu", "tanh", "gelu_new"]`.
|
||||
resid_pdrop (`float`, *optional*, defaults to 0.1):
|
||||
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
|
||||
embd_pdrop (`int`, *optional*, defaults to 0.1):
|
||||
embd_pdrop (`float`, *optional*, defaults to 0.1):
|
||||
The dropout ratio for the embeddings.
|
||||
attn_pdrop (`float`, *optional*, defaults to 0.1):
|
||||
The dropout ratio for the attention.
|
||||
|
Loading…
Reference in New Issue
Block a user