remove "PretrainedConfig" annotations

This commit is contained in:
qubvel 2025-07-02 16:43:59 +00:00
parent bf1192d982
commit 3afd9aceca
7 changed files with 8 additions and 11 deletions

View File

@ -2616,7 +2616,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, PushToHubMixin, PeftAdapterMi
return config
@classmethod
def _check_and_enable_sdpa(cls, config, hard_check_only: bool = False) -> PretrainedConfig:
def _check_and_enable_sdpa(cls, config, hard_check_only: bool = False):
"""
Checks the availability of SDPA for a given model.

View File

@ -131,7 +131,7 @@ class ClvpEncoderConfig(PretrainedConfig):
@classmethod
def from_pretrained(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], config_type: str = "text_config", **kwargs
) -> "PretrainedConfig":
):
cls._set_token_in_kwargs(kwargs)
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)

View File

@ -15,7 +15,7 @@
"""PyTorch Falcon model."""
import math
from typing import TYPE_CHECKING, Optional, Union
from typing import Optional, Union
import torch
import torch.utils.checkpoint
@ -47,9 +47,6 @@ from ...utils import (
from .configuration_falcon import FalconConfig
if TYPE_CHECKING:
from ...configuration_utils import PretrainedConfig
if is_flash_attn_available():
from ...modeling_flash_attention_utils import _flash_attention_forward
@ -688,7 +685,7 @@ class FalconPreTrainedModel(PreTrainedModel):
# Adapted from transformers.modeling_utils.PreTrainedModel._check_and_enable_sdpa
@classmethod
def _check_and_enable_sdpa(cls, config, hard_check_only: bool = False) -> "PretrainedConfig":
def _check_and_enable_sdpa(cls, config, hard_check_only: bool = False):
_is_bettertransformer = getattr(cls, "use_bettertransformer", False)
if _is_bettertransformer:
return config

View File

@ -1074,7 +1074,7 @@ class Qwen2_5OmniConfig(PretrainedConfig):
super().__init__(**kwargs)
def get_text_config(self, decoder=False) -> "PretrainedConfig":
def get_text_config(self, decoder=False):
"""
Returns the config that is meant to be used with text IO. On most models, it is the original config instance
itself. On specific composite models, it is under a set of valid names.

View File

@ -1114,7 +1114,7 @@ class Qwen2_5OmniConfig(PretrainedConfig):
super().__init__(**kwargs)
def get_text_config(self, decoder=False) -> "PretrainedConfig":
def get_text_config(self, decoder=False):
"""
Returns the config that is meant to be used with text IO. On most models, it is the original config instance
itself. On specific composite models, it is under a set of valid names.

View File

@ -324,7 +324,7 @@ class T5GemmaConfig(PretrainedConfig):
setattr(self.decoder, key, value)
super().__setattr__(key, value)
def get_text_config(self, decoder=False) -> "PretrainedConfig":
def get_text_config(self, decoder=False):
# Always return self, regardless of the decoder option.
del decoder
return self

View File

@ -213,7 +213,7 @@ class T5GemmaConfig(PretrainedConfig):
setattr(self.decoder, key, value)
super().__setattr__(key, value)
def get_text_config(self, decoder=False) -> "PretrainedConfig":
def get_text_config(self, decoder=False):
# Always return self, regardless of the decoder option.
del decoder
return self