mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-16 19:18:24 +06:00
Update Code example according to deprecation of AutoModeWithLMHead (#7555)
'The class `AutoModelWithLMHead` is deprecated and will be removed in a future version. Please use `AutoModelForCausalLM` for causal language models, `AutoModelForMaskedLM` for masked language models and `AutoModelForSeq2SeqLM` for encoder-decoder models.'
I dont know how to change the 'How to use this model directly from the 🤗/transformers library:' part since it is not part of the model-paper
This commit is contained in:
parent
0d79de7322
commit
1a00f46c74
@ -31,12 +31,12 @@ ArXiv paper: [https://arxiv.org/abs/1911.00536](https://arxiv.org/abs/1911.00536
|
|||||||
Now we are ready to try out how the model works as a chatting partner!
|
Now we are ready to try out how the model works as a chatting partner!
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from transformers import AutoModelWithLMHead, AutoTokenizer
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
||||||
model = AutoModelWithLMHead.from_pretrained("microsoft/DialoGPT-large")
|
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
||||||
|
|
||||||
# Let's chat for 5 lines
|
# Let's chat for 5 lines
|
||||||
for step in range(5):
|
for step in range(5):
|
||||||
|
@ -31,12 +31,12 @@ ArXiv paper: [https://arxiv.org/abs/1911.00536](https://arxiv.org/abs/1911.00536
|
|||||||
Now we are ready to try out how the model works as a chatting partner!
|
Now we are ready to try out how the model works as a chatting partner!
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from transformers import AutoModelWithLMHead, AutoTokenizer
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
|
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
|
||||||
model = AutoModelWithLMHead.from_pretrained("microsoft/DialoGPT-medium")
|
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
|
||||||
|
|
||||||
# Let's chat for 5 lines
|
# Let's chat for 5 lines
|
||||||
for step in range(5):
|
for step in range(5):
|
||||||
|
@ -31,12 +31,12 @@ ArXiv paper: [https://arxiv.org/abs/1911.00536](https://arxiv.org/abs/1911.00536
|
|||||||
Now we are ready to try out how the model works as a chatting partner!
|
Now we are ready to try out how the model works as a chatting partner!
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from transformers import AutoModelWithLMHead, AutoTokenizer
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
|
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
|
||||||
model = AutoModelWithLMHead.from_pretrained("microsoft/DialoGPT-small")
|
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-small")
|
||||||
|
|
||||||
# Let's chat for 5 lines
|
# Let's chat for 5 lines
|
||||||
for step in range(5):
|
for step in range(5):
|
||||||
|
Loading…
Reference in New Issue
Block a user