mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix
This commit is contained in:
parent
c278e1cb1c
commit
e3c82cb7ed
@ -1029,7 +1029,7 @@ class SamVisionNeck(nn.Module):
|
||||
|
||||
|
||||
class SamVisionEncoder(PreTrainedModel):
|
||||
_can_record_outputs = {"hidden_states": (SamVisionLayer, 0), "attentions": (SamVisionAttention, 1)}
|
||||
_can_record_outputs = {"hidden_states": SamVisionLayer, "attentions": SamVisionAttention}
|
||||
|
||||
def __init__(self, config: SamVisionConfig):
|
||||
super().__init__(config)
|
||||
|
@ -300,9 +300,9 @@ class Starcoder2PreTrainedModel(PreTrainedModel):
|
||||
_supports_quantized_cache = True
|
||||
_supports_static_cache = True
|
||||
_supports_attention_backend = True
|
||||
_can_record_outputs: dict[str, tuple[nn.Module, int]] = {
|
||||
"hidden_states": (Starcoder2DecoderLayer, 0),
|
||||
"attentions": (Starcoder2Attention, 1),
|
||||
_can_record_outputs: = {
|
||||
"hidden_states": Starcoder2DecoderLayer,
|
||||
"attentions": Starcoder2Attention,
|
||||
}
|
||||
|
||||
def _init_weights(self, module):
|
||||
|
@ -550,8 +550,8 @@ def make_default_2d_attention_mask(
|
||||
|
||||
class T5GemmaEncoder(T5GemmaPreTrainedModel):
|
||||
_can_record_outputs = {
|
||||
"attentions": (T5GemmaSelfAttention, 1),
|
||||
"hidden_states": (T5GemmaEncoderLayer, 1),
|
||||
"attentions": T5GemmaSelfAttention,
|
||||
"hidden_states": T5GemmaEncoderLayer,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
|
Loading…
Reference in New Issue
Block a user