fix deprecation warnings (#7033)

* fix deprecation warnings

* remove tests/test_tokenization_common.py's test_padding_to_max_length

* revert test_padding_to_max_length
This commit is contained in:
Stas Bekman 2020-09-14 04:51:19 -07:00 committed by GitHub
parent 576eec98e0
commit 4d39148419
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 3 deletions

View File

@ -127,7 +127,7 @@ def load_tf_weights_in_funnel(model, config, tf_checkpoint_path):
skipped = False
for m_name in name[1:]:
if not isinstance(pointer, FunnelPositionwiseFFN) and re.fullmatch(r"layer_\d+", m_name):
layer_index = int(re.search("layer_(\d+)", m_name).groups()[0])
layer_index = int(re.search(r"layer_(\d+)", m_name).groups()[0])
if layer_index < config.num_hidden_layers:
block_idx = 0
while layer_index >= config.block_sizes[block_idx]:

View File

@ -699,7 +699,7 @@ class TFConv1D(tf.keras.layers.Layer):
class TFSharedEmbeddings(tf.keras.layers.Layer):
"""
r"""
Construct shared token embeddings.
The weights of the embedding layer is usually shared with the weights of the linear decoder when doing

View File

@ -156,7 +156,7 @@ class TokenizerTesterMixin:
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
self.assertNotEqual(tokenizer.max_len, 42)
self.assertNotEqual(tokenizer.model_max_length, 42)
# Now let's start the test
tokenizers = self.get_tokenizers()