mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
parent
89575b567e
commit
62aa270f2a
@ -344,10 +344,15 @@ def check_support_param_buffer_assignment(model_to_load, state_dict, start_prefi
|
||||
as when loading in empty weights) by first checking
|
||||
if the model explicitly disables it, then by ensuring that the state dict keys
|
||||
are a subset of the model's parameters.
|
||||
|
||||
Note: We fully disable this if we are using `deepspeed`
|
||||
"""
|
||||
if len([key for key in state_dict if key.startswith(start_prefix)]) == 0:
|
||||
return False
|
||||
|
||||
if is_deepspeed_zero3_enabled():
|
||||
return False
|
||||
|
||||
# Some models explicitly do not support param buffer assignment
|
||||
if not getattr(model_to_load, "_supports_param_buffer_assignment", True):
|
||||
logger.debug(
|
||||
|
Loading…
Reference in New Issue
Block a user