mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix: passing config as Layer trainable param
Lurking bugs discovered while working on other stuff.
This commit is contained in:
parent
e9e6efdc45
commit
b1116fd673
@ -480,7 +480,7 @@ class TFAlbertMLMHead(tf.keras.layers.Layer):
|
||||
|
||||
class TFAlbertMainLayer(tf.keras.layers.Layer):
|
||||
def __init__(self, config, **kwargs):
|
||||
super().__init__(config, **kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.num_hidden_layers = config.num_hidden_layers
|
||||
|
||||
self.embeddings = TFAlbertEmbeddings(config, name="embeddings")
|
||||
|
@ -199,7 +199,7 @@ class TFBlock(tf.keras.layers.Layer):
|
||||
|
||||
class TFOpenAIGPTMainLayer(tf.keras.layers.Layer):
|
||||
def __init__(self, config, *inputs, **kwargs):
|
||||
super().__init__(config, *inputs, **kwargs)
|
||||
super().__init__(*inputs, **kwargs)
|
||||
self.output_hidden_states = config.output_hidden_states
|
||||
self.output_attentions = config.output_attentions
|
||||
self.num_hidden_layers = config.n_layer
|
||||
|
Loading…
Reference in New Issue
Block a user