mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix quantization docs typo (#22666)
This commit is contained in:
parent
3876fc6839
commit
14fc1a2467
@ -33,7 +33,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
model_id = "bigscience/bloom-1b7"
|
||||
|
||||
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
||||
model = AutoModelForCausalLM.from_pretrained(model_id, device_map == "auto", load_in_8bit=True)
|
||||
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_8bit=True)
|
||||
```
|
||||
|
||||
Then, use your model as you would usually use a [`PreTrainedModel`].
|
||||
|
Loading…
Reference in New Issue
Block a user