mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix Tensor + Embedding error in some cases when using SiglipVisionModel (#33994)
Fix Tensor + Embedding error in some cases Co-authored-by: kaitolucifer <kaito.o@ghelia.com>
This commit is contained in:
parent
9b4b0c07db
commit
e782e95e34
@ -283,7 +283,7 @@ class SiglipVisionEmbeddings(nn.Module):
|
||||
|
||||
# always interpolate when tracing to ensure the exported model works for dynamic input shapes
|
||||
if not torch.jit.is_tracing() and num_patches == num_positions and height == width:
|
||||
return self.position_embedding
|
||||
return self.position_embedding(self.position_ids)
|
||||
|
||||
patch_pos_embed = self.position_embedding
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user