mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Enable code-specific revision for code on the Hub (#23799)
* Enable code-specific revision for code on the Hub * invalidate old revision
This commit is contained in:
parent
edf7772826
commit
17a55534f5
@ -316,7 +316,7 @@ def get_cached_module_file(
|
||||
)
|
||||
new_files.append(f"{module_needed}.py")
|
||||
|
||||
if len(new_files) > 0:
|
||||
if len(new_files) > 0 and revision is None:
|
||||
new_files = "\n".join([f"- {f}" for f in new_files])
|
||||
repo_type_str = "" if repo_type is None else f"{repo_type}s/"
|
||||
url = f"https://huggingface.co/{repo_type_str}{pretrained_model_name_or_path}"
|
||||
@ -340,6 +340,7 @@ def get_class_from_dynamic_module(
|
||||
revision: Optional[str] = None,
|
||||
local_files_only: bool = False,
|
||||
repo_type: Optional[str] = None,
|
||||
code_revision: Optional[str] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
@ -391,6 +392,10 @@ def get_class_from_dynamic_module(
|
||||
If `True`, will only try to load the tokenizer configuration from local files.
|
||||
repo_type (`str`, *optional*):
|
||||
Specify the repo type (useful when downloading from a space for instance).
|
||||
code_revision (`str`, *optional*, defaults to `"main"`):
|
||||
The specific revision to use for the code on the Hub, if the code leaves in a different repository than the
|
||||
rest of the model. It can be a branch name, a tag name, or a commit id, since we use a git-based system for
|
||||
storing models and other artifacts on huggingface.co, so `revision` can be any identifier allowed by git.
|
||||
|
||||
<Tip>
|
||||
|
||||
@ -415,12 +420,12 @@ def get_class_from_dynamic_module(
|
||||
# Catch the name of the repo if it's specified in `class_reference`
|
||||
if "--" in class_reference:
|
||||
repo_id, class_reference = class_reference.split("--")
|
||||
# Invalidate revision since it's not relevant for this repo
|
||||
revision = "main"
|
||||
else:
|
||||
repo_id = pretrained_model_name_or_path
|
||||
module_file, class_name = class_reference.split(".")
|
||||
|
||||
if code_revision is None and pretrained_model_name_or_path == repo_id:
|
||||
code_revision = revision
|
||||
# And lastly we get the class inside our newly created module
|
||||
final_module = get_cached_module_file(
|
||||
repo_id,
|
||||
@ -430,7 +435,7 @@ def get_class_from_dynamic_module(
|
||||
resume_download=resume_download,
|
||||
proxies=proxies,
|
||||
use_auth_token=use_auth_token,
|
||||
revision=revision,
|
||||
revision=code_revision,
|
||||
local_files_only=local_files_only,
|
||||
repo_type=repo_type,
|
||||
)
|
||||
|
@ -128,6 +128,11 @@ FROM_PRETRAINED_TORCH_DOCSTRING = """
|
||||
Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
|
||||
should only be set to `True` for repositories you trust and in which you have read the code, as it will
|
||||
execute code present on the Hub on your local machine.
|
||||
code_revision (`str`, *optional*, defaults to `"main"`):
|
||||
The specific revision to use for the code on the Hub, if the code leaves in a different repository than
|
||||
the rest of the model. It can be a branch name, a tag name, or a commit id, since we use a git-based
|
||||
system for storing models and other artifacts on huggingface.co, so `revision` can be any identifier
|
||||
allowed by git.
|
||||
kwargs (additional keyword arguments, *optional*):
|
||||
Can be used to update the configuration object (after it being loaded) and initiate the model (e.g.,
|
||||
`output_attentions=True`). Behaves differently depending on whether a `config` is provided or
|
||||
@ -224,6 +229,11 @@ FROM_PRETRAINED_TF_DOCSTRING = """
|
||||
Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
|
||||
should only be set to `True` for repositories you trust and in which you have read the code, as it will
|
||||
execute code present on the Hub on your local machine.
|
||||
code_revision (`str`, *optional*, defaults to `"main"`):
|
||||
The specific revision to use for the code on the Hub, if the code leaves in a different repository than
|
||||
the rest of the model. It can be a branch name, a tag name, or a commit id, since we use a git-based
|
||||
system for storing models and other artifacts on huggingface.co, so `revision` can be any identifier
|
||||
allowed by git.
|
||||
kwargs (additional keyword arguments, *optional*):
|
||||
Can be used to update the configuration object (after it being loaded) and initiate the model (e.g.,
|
||||
`output_attentions=True`). Behaves differently depending on whether a `config` is provided or
|
||||
@ -320,6 +330,11 @@ FROM_PRETRAINED_FLAX_DOCSTRING = """
|
||||
Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
|
||||
should only be set to `True` for repositories you trust and in which you have read the code, as it will
|
||||
execute code present on the Hub on your local machine.
|
||||
code_revision (`str`, *optional*, defaults to `"main"`):
|
||||
The specific revision to use for the code on the Hub, if the code leaves in a different repository than
|
||||
the rest of the model. It can be a branch name, a tag name, or a commit id, since we use a git-based
|
||||
system for storing models and other artifacts on huggingface.co, so `revision` can be any identifier
|
||||
allowed by git.
|
||||
kwargs (additional keyword arguments, *optional*):
|
||||
Can be used to update the configuration object (after it being loaded) and initiate the model (e.g.,
|
||||
`output_attentions=True`). Behaves differently depending on whether a `config` is provided or
|
||||
@ -408,6 +423,7 @@ class _BaseAutoModelClass:
|
||||
else:
|
||||
repo_id = config.name_or_path
|
||||
model_class = get_class_from_dynamic_module(class_ref, repo_id, **kwargs)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
return model_class._from_config(config, **kwargs)
|
||||
elif type(config) in cls._model_mapping.keys():
|
||||
model_class = _get_model_class(config, cls._model_mapping)
|
||||
@ -425,6 +441,7 @@ class _BaseAutoModelClass:
|
||||
kwargs["_from_auto"] = True
|
||||
hub_kwargs_names = [
|
||||
"cache_dir",
|
||||
"code_revision",
|
||||
"force_download",
|
||||
"local_files_only",
|
||||
"proxies",
|
||||
@ -464,6 +481,7 @@ class _BaseAutoModelClass:
|
||||
model_class = get_class_from_dynamic_module(
|
||||
class_ref, pretrained_model_name_or_path, **hub_kwargs, **kwargs
|
||||
)
|
||||
_ = hub_kwargs.pop("code_revision", None)
|
||||
return model_class.from_pretrained(
|
||||
pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
|
||||
)
|
||||
|
@ -938,6 +938,7 @@ class AutoConfig:
|
||||
)
|
||||
class_ref = config_dict["auto_map"]["AutoConfig"]
|
||||
config_class = get_class_from_dynamic_module(class_ref, pretrained_model_name_or_path, **kwargs)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
|
||||
elif "model_type" in config_dict:
|
||||
config_class = CONFIG_MAPPING[config_dict["model_type"]]
|
||||
|
@ -337,6 +337,7 @@ class AutoFeatureExtractor:
|
||||
feature_extractor_class = get_class_from_dynamic_module(
|
||||
feature_extractor_auto_map, pretrained_model_name_or_path, **kwargs
|
||||
)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
else:
|
||||
feature_extractor_class = feature_extractor_class_from_name(feature_extractor_class)
|
||||
|
||||
|
@ -361,6 +361,7 @@ class AutoImageProcessor:
|
||||
image_processor_class = get_class_from_dynamic_module(
|
||||
image_processor_auto_map, pretrained_model_name_or_path, **kwargs
|
||||
)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
else:
|
||||
image_processor_class = image_processor_class_from_name(image_processor_class)
|
||||
|
||||
|
@ -259,6 +259,7 @@ class AutoProcessor:
|
||||
processor_class = get_class_from_dynamic_module(
|
||||
processor_auto_map, pretrained_model_name_or_path, **kwargs
|
||||
)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
else:
|
||||
processor_class = processor_class_from_name(processor_class)
|
||||
|
||||
|
@ -678,6 +678,7 @@ class AutoTokenizer:
|
||||
else:
|
||||
class_ref = tokenizer_auto_map[0]
|
||||
tokenizer_class = get_class_from_dynamic_module(class_ref, pretrained_model_name_or_path, **kwargs)
|
||||
_ = kwargs.pop("code_revision", None)
|
||||
|
||||
elif use_fast and not config_tokenizer_class.endswith("Fast"):
|
||||
tokenizer_class_candidate = f"{config_tokenizer_class}Fast"
|
||||
|
Loading…
Reference in New Issue
Block a user