mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-29 17:22:25 +06:00
Fix flax failures (#33912)
* Few fixes here and there * Remove typos * Remove typos
This commit is contained in:
parent
e878eaa9fc
commit
f052e94bcc
@ -27,7 +27,7 @@ ExecuTorch introduces well defined entry points to perform model, device, and/or
|
|||||||
|
|
||||||
An integration point is being developed to ensure that 🤗 Transformers can be exported using `torch.export`. The goal of this integration is not only to enable export but also to ensure that the exported artifact can be further lowered and optimized to run efficiently in `ExecuTorch`, particularly for mobile and edge use cases.
|
An integration point is being developed to ensure that 🤗 Transformers can be exported using `torch.export`. The goal of this integration is not only to enable export but also to ensure that the exported artifact can be further lowered and optimized to run efficiently in `ExecuTorch`, particularly for mobile and edge use cases.
|
||||||
|
|
||||||
[[autodoc]] integrations.executorch.TorchExportableModuleWithStaticCache
|
[[autodoc]] TorchExportableModuleWithStaticCache
|
||||||
- forward
|
- forward
|
||||||
|
|
||||||
[[autodoc]] integrations.executorch.convert_and_export_with_cache
|
[[autodoc]] convert_and_export_with_cache
|
||||||
|
@ -45,17 +45,17 @@ like token streaming.
|
|||||||
|
|
||||||
## GenerationMixin
|
## GenerationMixin
|
||||||
|
|
||||||
[[autodoc]] generation.GenerationMixin
|
[[autodoc]] GenerationMixin
|
||||||
- generate
|
- generate
|
||||||
- compute_transition_scores
|
- compute_transition_scores
|
||||||
|
|
||||||
## TFGenerationMixin
|
## TFGenerationMixin
|
||||||
|
|
||||||
[[autodoc]] generation.TFGenerationMixin
|
[[autodoc]] TFGenerationMixin
|
||||||
- generate
|
- generate
|
||||||
- compute_transition_scores
|
- compute_transition_scores
|
||||||
|
|
||||||
## FlaxGenerationMixin
|
## FlaxGenerationMixin
|
||||||
|
|
||||||
[[autodoc]] generation.FlaxGenerationMixin
|
[[autodoc]] FlaxGenerationMixin
|
||||||
- generate
|
- generate
|
||||||
|
@ -12,11 +12,15 @@
|
|||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
from transformers import (
|
from ..utils.import_utils import is_torch_available
|
||||||
|
|
||||||
|
|
||||||
|
if is_torch_available():
|
||||||
|
from transformers import (
|
||||||
PreTrainedModel,
|
PreTrainedModel,
|
||||||
StaticCache,
|
StaticCache,
|
||||||
)
|
)
|
||||||
from transformers.pytorch_utils import is_torch_greater_or_equal_than_2_3
|
from transformers.pytorch_utils import is_torch_greater_or_equal_than_2_3
|
||||||
|
|
||||||
|
|
||||||
class TorchExportableModuleWithStaticCache(torch.nn.Module):
|
class TorchExportableModuleWithStaticCache(torch.nn.Module):
|
||||||
|
@ -1751,9 +1751,7 @@ class _LazyModule(ModuleType):
|
|||||||
def __getattr__(self, name: str) -> Any:
|
def __getattr__(self, name: str) -> Any:
|
||||||
if name in self._objects:
|
if name in self._objects:
|
||||||
return self._objects[name]
|
return self._objects[name]
|
||||||
if name in self._modules:
|
if name in self._object_missing_backend.keys():
|
||||||
value = self._get_module(name)
|
|
||||||
elif name in self._object_missing_backend.keys():
|
|
||||||
missing_backends = self._object_missing_backend[name]
|
missing_backends = self._object_missing_backend[name]
|
||||||
|
|
||||||
class Placeholder(metaclass=DummyObject):
|
class Placeholder(metaclass=DummyObject):
|
||||||
@ -1769,6 +1767,8 @@ class _LazyModule(ModuleType):
|
|||||||
elif name in self._class_to_module.keys():
|
elif name in self._class_to_module.keys():
|
||||||
module = self._get_module(self._class_to_module[name])
|
module = self._get_module(self._class_to_module[name])
|
||||||
value = getattr(module, name)
|
value = getattr(module, name)
|
||||||
|
elif name in self._modules:
|
||||||
|
value = self._get_module(name)
|
||||||
else:
|
else:
|
||||||
raise AttributeError(f"module {self.__name__} has no attribute {name}")
|
raise AttributeError(f"module {self.__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
@ -1089,18 +1089,34 @@ def check_public_method_exists(documented_methods_map):
|
|||||||
for submodule_name in nested_submodules:
|
for submodule_name in nested_submodules:
|
||||||
if submodule_name == "transformers":
|
if submodule_name == "transformers":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
submodule = getattr(submodule, submodule_name)
|
submodule = getattr(submodule, submodule_name)
|
||||||
|
except AttributeError:
|
||||||
|
failures.append(f"Could not parse {submodule_name}. Are the required dependencies installed?")
|
||||||
|
continue
|
||||||
|
|
||||||
class_name = nested_path[-1]
|
class_name = nested_path[-1]
|
||||||
|
|
||||||
|
try:
|
||||||
obj_class = getattr(submodule, class_name)
|
obj_class = getattr(submodule, class_name)
|
||||||
|
except AttributeError:
|
||||||
|
failures.append(f"Could not parse {submodule_name}. Are the required dependencies installed?")
|
||||||
|
continue
|
||||||
|
|
||||||
# Checks that all explicitly documented methods are defined in the class
|
# Checks that all explicitly documented methods are defined in the class
|
||||||
for method in methods:
|
for method in methods:
|
||||||
if method == "all": # Special keyword to document all public methods
|
if method == "all": # Special keyword to document all public methods
|
||||||
continue
|
continue
|
||||||
|
try:
|
||||||
if not hasattr(obj_class, method):
|
if not hasattr(obj_class, method):
|
||||||
failures.append(
|
failures.append(
|
||||||
"The following public method is explicitly documented but not defined in the corresponding "
|
"The following public method is explicitly documented but not defined in the corresponding "
|
||||||
f"class. class: {obj}, method: {method}"
|
f"class. class: {obj}, method: {method}. If the method is defined, this error can be due to "
|
||||||
|
f"lacking dependencies."
|
||||||
)
|
)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
if len(failures) > 0:
|
if len(failures) > 0:
|
||||||
raise Exception("\n".join(failures))
|
raise Exception("\n".join(failures))
|
||||||
|
Loading…
Reference in New Issue
Block a user