mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix doc examples: cannot import name (#14698)
* Fix doc examples: cannot import name * remove copy because of some necessary minor changes (maybe add copy to the individual methods instead) * Keep copy with some modifications Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
fc74c84537
commit
ca0b82bbd7
@ -2117,7 +2117,7 @@ class BigBirdPegasusDecoder(BigBirdPegasusPreTrainedModel):
|
||||
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
|
||||
provide it.
|
||||
|
||||
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
|
||||
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
|
||||
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
|
||||
for details.
|
||||
|
||||
@ -2862,7 +2862,7 @@ class BigBirdPegasusDecoderWrapper(BigBirdPegasusPreTrainedModel):
|
||||
return self.decoder(*args, **kwargs)
|
||||
|
||||
|
||||
# Copied from transformers.models.bart.modeling_bart.BartForCausalLM with Bart->BigBirdPegasus, 'facebook/bart-large'->"google/bigbird-pegasus-large-arxiv"
|
||||
# Copied from transformers.models.bart.modeling_bart.BartForCausalLM with BartDecoderWrapper->BigBirdPegasusDecoderWrapper, BartForCausalLM->BigBirdPegasusForCausalLM, BartPreTrainedModel->BigBirdPegasusPreTrainedModel, BartTokenizer->PegasusTokenizer, 'facebook/bart-large'->"google/bigbird-pegasus-large-arxiv"
|
||||
class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
|
||||
def __init__(self, config):
|
||||
config = copy.deepcopy(config)
|
||||
@ -2917,7 +2917,7 @@ class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
|
||||
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
|
||||
provide it.
|
||||
|
||||
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
|
||||
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
|
||||
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
|
||||
for details.
|
||||
|
||||
@ -2985,9 +2985,9 @@ class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
|
||||
|
||||
Example::
|
||||
|
||||
>>> from transformers import BigBirdPegasusTokenizer, BigBirdPegasusForCausalLM
|
||||
>>> from transformers import PegasusTokenizer, BigBirdPegasusForCausalLM
|
||||
|
||||
>>> tokenizer = BigBirdPegasusTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv")
|
||||
>>> tokenizer = PegasusTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv")
|
||||
>>> model = BigBirdPegasusForCausalLM.from_pretrained("google/bigbird-pegasus-large-arxiv", add_cross_attention=False)
|
||||
>>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder."
|
||||
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
|
||||
|
Loading…
Reference in New Issue
Block a user