fix: passing config as Layer trainable param

Lurking bugs discovered while working on other stuff.
This commit is contained in:
Gunnlaugur Thor Briem 2020-03-03 23:05:40 +00:00
parent e9e6efdc45
commit b1116fd673
2 changed files with 2 additions and 2 deletions

View File

@ -480,7 +480,7 @@ class TFAlbertMLMHead(tf.keras.layers.Layer):
class TFAlbertMainLayer(tf.keras.layers.Layer):
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
super().__init__(**kwargs)
self.num_hidden_layers = config.num_hidden_layers
self.embeddings = TFAlbertEmbeddings(config, name="embeddings")

View File

@ -199,7 +199,7 @@ class TFBlock(tf.keras.layers.Layer):
class TFOpenAIGPTMainLayer(tf.keras.layers.Layer):
def __init__(self, config, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs)
super().__init__(*inputs, **kwargs)
self.output_hidden_states = config.output_hidden_states
self.output_attentions = config.output_attentions
self.num_hidden_layers = config.n_layer