mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Merge pull request #1075 from abhishekraok/modeling_utils_config_None
reraise EnvironmentError in modeling_utils.py
This commit is contained in:
commit
3f20dd7186
@ -166,7 +166,7 @@ class PretrainedConfig(object):
|
||||
# redirect to the cache, if necessary
|
||||
try:
|
||||
resolved_config_file = cached_path(config_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies)
|
||||
except EnvironmentError:
|
||||
except EnvironmentError as e:
|
||||
if pretrained_model_name_or_path in cls.pretrained_config_archive_map:
|
||||
logger.error(
|
||||
"Couldn't reach server at '{}' to download pretrained model configuration file.".format(
|
||||
@ -179,7 +179,7 @@ class PretrainedConfig(object):
|
||||
pretrained_model_name_or_path,
|
||||
', '.join(cls.pretrained_config_archive_map.keys()),
|
||||
config_file))
|
||||
return None
|
||||
raise e
|
||||
if resolved_config_file == config_file:
|
||||
logger.info("loading configuration file {}".format(config_file))
|
||||
else:
|
||||
@ -473,7 +473,7 @@ class PreTrainedModel(nn.Module):
|
||||
# redirect to the cache, if necessary
|
||||
try:
|
||||
resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies)
|
||||
except EnvironmentError:
|
||||
except EnvironmentError as e:
|
||||
if pretrained_model_name_or_path in cls.pretrained_model_archive_map:
|
||||
logger.error(
|
||||
"Couldn't reach server at '{}' to download pretrained weights.".format(
|
||||
@ -486,7 +486,7 @@ class PreTrainedModel(nn.Module):
|
||||
pretrained_model_name_or_path,
|
||||
', '.join(cls.pretrained_model_archive_map.keys()),
|
||||
archive_file))
|
||||
return None
|
||||
raise e
|
||||
if resolved_archive_file == archive_file:
|
||||
logger.info("loading weights file {}".format(archive_file))
|
||||
else:
|
||||
|
@ -293,7 +293,7 @@ class PreTrainedTokenizer(object):
|
||||
resolved_vocab_files[file_id] = None
|
||||
else:
|
||||
resolved_vocab_files[file_id] = cached_path(file_path, cache_dir=cache_dir, force_download=force_download, proxies=proxies)
|
||||
except EnvironmentError:
|
||||
except EnvironmentError as e:
|
||||
if pretrained_model_name_or_path in s3_models:
|
||||
logger.error("Couldn't reach server to download vocabulary.")
|
||||
else:
|
||||
@ -303,7 +303,7 @@ class PreTrainedTokenizer(object):
|
||||
"at this path or url.".format(
|
||||
pretrained_model_name_or_path, ', '.join(s3_models),
|
||||
pretrained_model_name_or_path, str(vocab_files.keys())))
|
||||
return None
|
||||
raise e
|
||||
|
||||
for file_id, file_path in vocab_files.items():
|
||||
if file_path == resolved_vocab_files[file_id]:
|
||||
|
Loading…
Reference in New Issue
Block a user