mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix bias
keyword argument in TFDebertaEmbeddings (#17940)
This commit is contained in:
parent
569b679adb
commit
3a064bd4dd
@ -729,7 +729,7 @@ class TFDebertaEmbeddings(tf.keras.layers.Layer):
|
||||
self.position_biased_input = getattr(config, "position_biased_input", True)
|
||||
self.initializer_range = config.initializer_range
|
||||
if self.embedding_size != config.hidden_size:
|
||||
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, bias=False)
|
||||
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, use_bias=False)
|
||||
self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
|
||||
self.dropout = TFDebertaStableDropout(config.hidden_dropout_prob, name="dropout")
|
||||
|
||||
|
@ -827,7 +827,7 @@ class TFDebertaV2Embeddings(tf.keras.layers.Layer):
|
||||
self.position_biased_input = getattr(config, "position_biased_input", True)
|
||||
self.initializer_range = config.initializer_range
|
||||
if self.embedding_size != config.hidden_size:
|
||||
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, bias=False)
|
||||
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, use_bias=False)
|
||||
self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
|
||||
self.dropout = TFDebertaV2StableDropout(config.hidden_dropout_prob, name="dropout")
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user