mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00
Remove type annotation in Siglip Attention Module (#38503)
* Remove type annotation * remove print statement
This commit is contained in:
parent
afb35a10ed
commit
1094dd34f7
@ -370,7 +370,7 @@ def eager_attention_forward(
|
||||
class SiglipAttention(nn.Module):
|
||||
"""Multi-headed attention from 'Attention Is All You Need' paper"""
|
||||
|
||||
def __init__(self, config: Union[SiglipVisionConfig, SiglipTextConfig]):
|
||||
def __init__(self, config):
|
||||
super().__init__()
|
||||
self.config = config
|
||||
self.embed_dim = config.hidden_size
|
||||
|
@ -264,7 +264,7 @@ def eager_attention_forward(
|
||||
class Siglip2Attention(nn.Module):
|
||||
"""Multi-headed attention from 'Attention Is All You Need' paper"""
|
||||
|
||||
def __init__(self, config: Union[Siglip2VisionConfig, Siglip2TextConfig]):
|
||||
def __init__(self, config):
|
||||
super().__init__()
|
||||
self.config = config
|
||||
self.embed_dim = config.hidden_size
|
||||
|
@ -739,7 +739,6 @@ class ModelTesterMixin:
|
||||
model = model_class(config)
|
||||
model.to(torch_device)
|
||||
model.eval()
|
||||
print(model_class)
|
||||
with torch.no_grad():
|
||||
first = model(**self._prepare_for_class(inputs_dict, model_class))[0]
|
||||
second = model(**self._prepare_for_class(inputs_dict, model_class))[0]
|
||||
|
Loading…
Reference in New Issue
Block a user