mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix: jamba cache fails to use torch.nn.module (#32894)
Co-authored-by: Gal Cohen <galc@ai21.com>
This commit is contained in:
parent
65f4bc99f9
commit
01c4fc455b
@ -210,6 +210,7 @@ class HybridMambaAttentionDynamicCache(DynamicCache):
|
||||
"""
|
||||
|
||||
def __init__(self, config, batch_size, dtype=torch.float16, device=None):
|
||||
super().__init__()
|
||||
self.dtype = dtype
|
||||
self.layers_block_type = config.layers_block_type
|
||||
self.has_previous_state = False # only used by mamba
|
||||
|
Loading…
Reference in New Issue
Block a user