mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix GPT-NeoX doc examples (#19033)
This commit is contained in:
parent
4eb36f2921
commit
693ba2cc79
@ -354,7 +354,7 @@ GPT_NEOX_INPUTS_DOCSTRING = r"""
|
||||
input_ids (`torch.LongTensor` of shape `({0})`):
|
||||
Indices of input sequence tokens in the vocabulary.
|
||||
|
||||
Indices can be obtained using [`GPTNeoXTokenizer`]. See [`PreTrainedTokenizer.encode`] and
|
||||
Indices can be obtained using [`GPTNeoXTokenizerFast`]. See [`PreTrainedTokenizer.encode`] and
|
||||
[`PreTrainedTokenizer.__call__`] for details.
|
||||
|
||||
[What are input IDs?](../glossary#input-ids)
|
||||
@ -601,13 +601,13 @@ class GPTNeoXForCausalLM(GPTNeoXPreTrainedModel):
|
||||
Example:
|
||||
|
||||
```python
|
||||
>>> from transformers import GPTNeoXTokenizer, GPTNeoXForCausalLM, GPTNeoXConfig
|
||||
>>> from transformers import GPTNeoXTokenizerFast, GPTNeoXForCausalLM, GPTNeoXConfig
|
||||
>>> import torch
|
||||
|
||||
>>> tokenizer = GPTNeoXTokenizer.from_pretrained("gpt-neox-20b")
|
||||
>>> config = GPTNeoXConfig.from_pretrained("gpt-neox-20b")
|
||||
>>> tokenizer = GPTNeoXTokenizerFast.from_pretrained("EleutherAI/gpt-neox-20b")
|
||||
>>> config = GPTNeoXConfig.from_pretrained("EleutherAI/gpt-neox-20b")
|
||||
>>> config.is_decoder = True
|
||||
>>> model = GPTNeoXForCausalLM.from_pretrained("gpt-neox-20b", config=config)
|
||||
>>> model = GPTNeoXForCausalLM.from_pretrained("EleutherAI/gpt-neox-20b", config=config)
|
||||
|
||||
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
|
||||
>>> outputs = model(**inputs)
|
||||
|
Loading…
Reference in New Issue
Block a user