Remove dropout in embedding layer of OPT (#18845)

This commit is contained in:
Shijie Wu 2022-09-12 10:32:38 -04:00 committed by GitHub
parent 367026000b
commit adbf3a40de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 0 additions and 4 deletions

View File

@ -484,8 +484,6 @@ class FlaxOPTDecoder(nn.Module):
hidden_states = inputs_embeds + positions
hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic)
hidden_state, all_hidden_states, attentions = self.layers(
hidden_states,
attention_mask,

View File

@ -637,7 +637,6 @@ class OPTDecoder(OPTPreTrainedModel):
inputs_embeds = self.project_in(inputs_embeds)
hidden_states = inputs_embeds + pos_embeds
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
# decoder layers
all_hidden_states = () if output_hidden_states else None

View File

@ -652,7 +652,6 @@ class TFOPTDecoder(tf.keras.layers.Layer):
inputs_embeds = self.project_in(inputs_embeds)
hidden_states = inputs_embeds + pos_embeds
hidden_states = self.dropout(hidden_states, training=training)
# decoder layers
all_hidden_states = () if output_hidden_states else None