mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix: replace asserts by error (#13894)
This commit is contained in:
parent
f099249cf1
commit
7af7d7ce05
@ -172,7 +172,8 @@ class FlaxMultiHeadSelfAttention(nn.Module):
|
||||
self.dim = self.config.dim
|
||||
self.dropout = nn.Dropout(rate=self.config.attention_dropout)
|
||||
|
||||
assert self.dim % self.n_heads == 0, f"Hidden size {self.dim} not dividable by number of heads {self.n_heads}"
|
||||
if not (self.dim % self.n_heads == 0):
|
||||
raise ValueError(f"Hidden size {self.dim} not dividable by number of heads {self.n_heads}")
|
||||
|
||||
self.q_lin = nn.Dense(
|
||||
self.dim,
|
||||
|
Loading…
Reference in New Issue
Block a user