mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Encoder decoder config docs (#6195)
* Adding docs for how to load encoder_decoder pretrained model with individual config objects * Adding docs for loading encoder_decoder config from pretrained folder * Fixing W293 blank line contains whitespace * Update src/transformers/modeling_encoder_decoder.py * Update src/transformers/modeling_encoder_decoder.py * Update src/transformers/modeling_encoder_decoder.py * Apply suggestions from code review model file should only show examples for how to load save model * Update src/transformers/configuration_encoder_decoder.py * Update src/transformers/configuration_encoder_decoder.py * fix space Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>
This commit is contained in:
parent
1d5c3a3d96
commit
7ea9b2db37
@ -56,6 +56,15 @@ class EncoderDecoderConfig(PretrainedConfig):
|
||||
>>> # Accessing the model configuration
|
||||
>>> config_encoder = model.config.encoder
|
||||
>>> config_decoder = model.config.decoder
|
||||
>>> # set decoder config to causal lm
|
||||
>>> config_decoder.is_decoder = True
|
||||
|
||||
>>> # Saving the model, including its configuration
|
||||
>>> model.save_pretrained('my-model')
|
||||
|
||||
>>> # loading model and config from pretrained folder
|
||||
>>> encoder_decoder_config = EncoderDecoderConfig.from_pretrained('my-model')
|
||||
>>> model = EncoderDecoderModel.from_pretrained('my-model', config=encoder_decoder_config)
|
||||
"""
|
||||
model_type = "encoder_decoder"
|
||||
|
||||
|
@ -127,7 +127,13 @@ class EncoderDecoderModel(PreTrainedModel):
|
||||
Examples::
|
||||
|
||||
>>> from transformers import EncoderDecoderModel
|
||||
>>> model = EncoderDecoderModel.from_encoder_decoder_pretrained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert
|
||||
>>> # initialize a bert2bert from two pretrained BERT models. Note that the cross-attention layers will be randomly initialized
|
||||
>>> model = EncoderDecoderModel.from_encoder_decoder_pretrained('bert-base-uncased', 'bert-base-uncased')
|
||||
>>> # saving model after fine-tuning
|
||||
>>> model.save_pretrained("./bert2bert")
|
||||
>>> # load fine-tuned model
|
||||
>>> model = EncoderDecoderModel.from_pretrained("./bert2bert")
|
||||
|
||||
"""
|
||||
|
||||
kwargs_encoder = {
|
||||
|
Loading…
Reference in New Issue
Block a user