fix bias keyword argument in TFDebertaEmbeddings (#17940)

This commit is contained in:
Wissam Antoun 2022-07-01 15:48:43 +02:00 committed by GitHub
parent 569b679adb
commit 3a064bd4dd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 2 deletions

View File

@ -729,7 +729,7 @@ class TFDebertaEmbeddings(tf.keras.layers.Layer):
self.position_biased_input = getattr(config, "position_biased_input", True)
self.initializer_range = config.initializer_range
if self.embedding_size != config.hidden_size:
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, bias=False)
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, use_bias=False)
self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
self.dropout = TFDebertaStableDropout(config.hidden_dropout_prob, name="dropout")

View File

@ -827,7 +827,7 @@ class TFDebertaV2Embeddings(tf.keras.layers.Layer):
self.position_biased_input = getattr(config, "position_biased_input", True)
self.initializer_range = config.initializer_range
if self.embedding_size != config.hidden_size:
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, bias=False)
self.embed_proj = tf.keras.layers.Dense(config.hidden_size, use_bias=False)
self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
self.dropout = TFDebertaV2StableDropout(config.hidden_dropout_prob, name="dropout")