Fix std initialization in Idefics variants (#37100)

* Nit 😅

* Another one

* fix

* run ci

* revert change
This commit is contained in:
Yaswanth Gali 2025-04-01 12:48:54 +05:30 committed by GitHub
parent 786d9c5ed9
commit 4b13a02920
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 6 additions and 6 deletions

View File

@ -604,9 +604,9 @@ class Idefics2PreTrainedModel(PreTrainedModel):
def _init_weights(self, module):
std = (
self.config.text_config.initializer_range
self.config.initializer_range
if hasattr(self.config, "initializer_range")
else self.config.text_config.initializer_range
else self.config.get_text_config().initializer_range
)
if hasattr(module, "class_embedding"):

View File

@ -625,9 +625,9 @@ class Idefics3PreTrainedModel(PreTrainedModel):
# Copied from transformers.models.idefics2.modeling_idefics2.Idefics2PreTrainedModel._init_weights
def _init_weights(self, module):
std = (
self.config.text_config.initializer_range
self.config.initializer_range
if hasattr(self.config, "initializer_range")
else self.config.text_config.initializer_range
else self.config.get_text_config().initializer_range
)
if hasattr(module, "class_embedding"):

View File

@ -85,9 +85,9 @@ class SmolVLMPreTrainedModel(PreTrainedModel):
def _init_weights(self, module):
std = (
self.config.text_config.initializer_range
self.config.initializer_range
if hasattr(self.config, "initializer_range")
else self.config.text_config.initializer_range
else self.config.get_text_config().initializer_range
)
if hasattr(module, "class_embedding"):