mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 11:11:05 +06:00
fix: jamba cache fails to use torch.nn.module (#32894)
Co-authored-by: Gal Cohen <galc@ai21.com>
This commit is contained in:
parent
65f4bc99f9
commit
01c4fc455b
@ -210,6 +210,7 @@ class HybridMambaAttentionDynamicCache(DynamicCache):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config, batch_size, dtype=torch.float16, device=None):
|
def __init__(self, config, batch_size, dtype=torch.float16, device=None):
|
||||||
|
super().__init__()
|
||||||
self.dtype = dtype
|
self.dtype = dtype
|
||||||
self.layers_block_type = config.layers_block_type
|
self.layers_block_type = config.layers_block_type
|
||||||
self.has_previous_state = False # only used by mamba
|
self.has_previous_state = False # only used by mamba
|
||||||
|
Loading…
Reference in New Issue
Block a user