mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-14 01:58:22 +06:00
45 lines
909 B
Markdown
45 lines
909 B
Markdown
# Cohere
|
|
|
|
## Usage tips
|
|
The model and tokenizer can be loaded via:
|
|
|
|
```python
|
|
# pip install transformers
|
|
from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
|
|
model_id = "CohereForAI/c4ai-command-r7b-12-2024"
|
|
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
|
model = AutoModelForCausalLM.from_pretrained(model_id)
|
|
|
|
# Format message with the command-r chat template
|
|
messages = [{"role": "user", "content": "Hello, how are you?"}]
|
|
input_ids = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
|
|
|
gen_tokens = model.generate(
|
|
input_ids,
|
|
max_new_tokens=100,
|
|
do_sample=True,
|
|
temperature=0.3,
|
|
)
|
|
|
|
gen_text = tokenizer.decode(gen_tokens[0])
|
|
print(gen_text)
|
|
```
|
|
|
|
## Cohere2Config
|
|
|
|
[[autodoc]] Cohere2Config
|
|
|
|
## Cohere2Model
|
|
|
|
[[autodoc]] Cohere2Model
|
|
- forward
|
|
|
|
|
|
## Cohere2ForCausalLM
|
|
|
|
[[autodoc]] Cohere2ForCausalLM
|
|
- forward
|
|
|
|
|