mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 18:22:34 +06:00
fix num_special_tokens in GPT 2 test
This commit is contained in:
parent
cad88e19de
commit
44e9ddd7fe
@ -432,6 +432,8 @@ class GPT2PreTrainedModel(nn.Module):
|
|||||||
kwargs.pop('cache_dir', None)
|
kwargs.pop('cache_dir', None)
|
||||||
from_tf = kwargs.get('from_tf', False)
|
from_tf = kwargs.get('from_tf', False)
|
||||||
kwargs.pop('from_tf', None)
|
kwargs.pop('from_tf', None)
|
||||||
|
num_special_tokens = kwargs.get('num_special_tokens', None)
|
||||||
|
kwargs.pop('num_special_tokens', None)
|
||||||
|
|
||||||
if pretrained_model_name_or_path in PRETRAINED_MODEL_ARCHIVE_MAP:
|
if pretrained_model_name_or_path in PRETRAINED_MODEL_ARCHIVE_MAP:
|
||||||
archive_file = PRETRAINED_MODEL_ARCHIVE_MAP[pretrained_model_name_or_path]
|
archive_file = PRETRAINED_MODEL_ARCHIVE_MAP[pretrained_model_name_or_path]
|
||||||
|
Loading…
Reference in New Issue
Block a user