Remove type annotation in Siglip Attention Module (#38503)

* Remove type annotation

* remove print statement
This commit is contained in:
Yaswanth Gali 2025-06-02 21:21:07 +05:30 committed by GitHub
parent afb35a10ed
commit 1094dd34f7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 2 additions and 3 deletions

View File

@ -370,7 +370,7 @@ def eager_attention_forward(
class SiglipAttention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(self, config: Union[SiglipVisionConfig, SiglipTextConfig]):
def __init__(self, config):
super().__init__()
self.config = config
self.embed_dim = config.hidden_size

View File

@ -264,7 +264,7 @@ def eager_attention_forward(
class Siglip2Attention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(self, config: Union[Siglip2VisionConfig, Siglip2TextConfig]):
def __init__(self, config):
super().__init__()
self.config = config
self.embed_dim = config.hidden_size

View File

@ -739,7 +739,6 @@ class ModelTesterMixin:
model = model_class(config)
model.to(torch_device)
model.eval()
print(model_class)
with torch.no_grad():
first = model(**self._prepare_for_class(inputs_dict, model_class))[0]
second = model(**self._prepare_for_class(inputs_dict, model_class))[0]