mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Remove DT_DOUBLE from the T5 graph (#17891)
This commit is contained in:
parent
6aae59d0b5
commit
babd7b1a92
@ -268,7 +268,7 @@ class TFT5Attention(tf.keras.layers.Layer):
|
||||
max_exact = num_buckets // 2
|
||||
is_small = tf.math.less(relative_position, max_exact)
|
||||
relative_position_if_large = max_exact + tf.cast(
|
||||
tf.math.log(relative_position / max_exact)
|
||||
tf.math.log(tf.cast(relative_position, tf.float32) / tf.cast(max_exact, tf.float32))
|
||||
/ math.log(max_distance / max_exact)
|
||||
* (num_buckets - max_exact),
|
||||
dtype=relative_position.dtype,
|
||||
|
Loading…
Reference in New Issue
Block a user