diff --git a/docs/source/en/main_classes/executorch.md b/docs/source/en/main_classes/executorch.md index 28e0a445e79..3178085c913 100644 --- a/docs/source/en/main_classes/executorch.md +++ b/docs/source/en/main_classes/executorch.md @@ -27,7 +27,7 @@ ExecuTorch introduces well defined entry points to perform model, device, and/or An integration point is being developed to ensure that 🤗 Transformers can be exported using `torch.export`. The goal of this integration is not only to enable export but also to ensure that the exported artifact can be further lowered and optimized to run efficiently in `ExecuTorch`, particularly for mobile and edge use cases. -[[autodoc]] integrations.executorch.TorchExportableModuleWithStaticCache +[[autodoc]] TorchExportableModuleWithStaticCache - forward -[[autodoc]] integrations.executorch.convert_and_export_with_cache +[[autodoc]] convert_and_export_with_cache diff --git a/docs/source/en/main_classes/text_generation.md b/docs/source/en/main_classes/text_generation.md index e2c5ce9c0ba..574e4c75a6a 100644 --- a/docs/source/en/main_classes/text_generation.md +++ b/docs/source/en/main_classes/text_generation.md @@ -45,17 +45,17 @@ like token streaming. ## GenerationMixin -[[autodoc]] generation.GenerationMixin +[[autodoc]] GenerationMixin - generate - compute_transition_scores ## TFGenerationMixin -[[autodoc]] generation.TFGenerationMixin +[[autodoc]] TFGenerationMixin - generate - compute_transition_scores ## FlaxGenerationMixin -[[autodoc]] generation.FlaxGenerationMixin +[[autodoc]] FlaxGenerationMixin - generate diff --git a/src/transformers/integrations/executorch.py b/src/transformers/integrations/executorch.py index c0adff386f6..258017f1418 100644 --- a/src/transformers/integrations/executorch.py +++ b/src/transformers/integrations/executorch.py @@ -12,11 +12,15 @@ import torch -from transformers import ( - PreTrainedModel, - StaticCache, -) -from transformers.pytorch_utils import is_torch_greater_or_equal_than_2_3 +from ..utils.import_utils import is_torch_available + + +if is_torch_available(): + from transformers import ( + PreTrainedModel, + StaticCache, + ) + from transformers.pytorch_utils import is_torch_greater_or_equal_than_2_3 class TorchExportableModuleWithStaticCache(torch.nn.Module): diff --git a/src/transformers/utils/import_utils.py b/src/transformers/utils/import_utils.py index 519755489a3..fbc248824a4 100755 --- a/src/transformers/utils/import_utils.py +++ b/src/transformers/utils/import_utils.py @@ -1751,9 +1751,7 @@ class _LazyModule(ModuleType): def __getattr__(self, name: str) -> Any: if name in self._objects: return self._objects[name] - if name in self._modules: - value = self._get_module(name) - elif name in self._object_missing_backend.keys(): + if name in self._object_missing_backend.keys(): missing_backends = self._object_missing_backend[name] class Placeholder(metaclass=DummyObject): @@ -1769,6 +1767,8 @@ class _LazyModule(ModuleType): elif name in self._class_to_module.keys(): module = self._get_module(self._class_to_module[name]) value = getattr(module, name) + elif name in self._modules: + value = self._get_module(name) else: raise AttributeError(f"module {self.__name__} has no attribute {name}") diff --git a/utils/check_repo.py b/utils/check_repo.py index 3ecbd79eca4..98f96bcc78a 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -1089,18 +1089,34 @@ def check_public_method_exists(documented_methods_map): for submodule_name in nested_submodules: if submodule_name == "transformers": continue - submodule = getattr(submodule, submodule_name) + + try: + submodule = getattr(submodule, submodule_name) + except AttributeError: + failures.append(f"Could not parse {submodule_name}. Are the required dependencies installed?") + continue + class_name = nested_path[-1] - obj_class = getattr(submodule, class_name) + + try: + obj_class = getattr(submodule, class_name) + except AttributeError: + failures.append(f"Could not parse {submodule_name}. Are the required dependencies installed?") + continue + # Checks that all explicitly documented methods are defined in the class for method in methods: if method == "all": # Special keyword to document all public methods continue - if not hasattr(obj_class, method): - failures.append( - "The following public method is explicitly documented but not defined in the corresponding " - f"class. class: {obj}, method: {method}" - ) + try: + if not hasattr(obj_class, method): + failures.append( + "The following public method is explicitly documented but not defined in the corresponding " + f"class. class: {obj}, method: {method}. If the method is defined, this error can be due to " + f"lacking dependencies." + ) + except ImportError: + pass if len(failures) > 0: raise Exception("\n".join(failures))