mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 02:31:11 +06:00
[docs] Fix doc format (#28684)
* fix hfoptions * revert changes to other files * fix
This commit is contained in:
parent
8278b1538e
commit
f40b87de0c
@ -956,6 +956,8 @@ engine = deepspeed.initialize(model=model, config_params=ds_config, ...)
|
||||
</hfoption>
|
||||
<hfoption id="non-pretrained model">
|
||||
|
||||
[`HfDeepSpeedConfig`] is not required for ZeRO-1 or ZeRO-2.
|
||||
|
||||
```py
|
||||
from transformers.integrations import HfDeepSpeedConfig
|
||||
from transformers import AutoModel, AutoConfig
|
||||
@ -969,7 +971,8 @@ model = AutoModel.from_config(config)
|
||||
engine = deepspeed.initialize(model=model, config_params=ds_config, ...)
|
||||
```
|
||||
|
||||
[`HfDeepSpeedConfig`] is not required for ZeRO-1 or ZeRO-2.
|
||||
</hfoption>
|
||||
</hfoptions>
|
||||
|
||||
### Non-Trainer ZeRO Inference
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user