Fix token in example template (#25351)

fix

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2023-08-08 12:00:31 +02:00 committed by GitHub
parent 01ab39b65f
commit 5744482abc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -115,11 +115,13 @@ class ModelArguments:
default="main", default="main",
metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."}, metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
) )
token: bool = field( token: str = field(
default=False, default=None,
metadata={ metadata={
"help": "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "help": (
"with private models)." "The token to use as HTTP bearer authorization for remote files. If not specified, will use the token "
"generated when running `huggingface-cli login` (stored in `~/.huggingface`)."
)
}, },
) )
trust_remote_code: bool = field( trust_remote_code: bool = field(
@ -299,7 +301,7 @@ def main():
config_kwargs = { config_kwargs = {
"cache_dir": model_args.cache_dir, "cache_dir": model_args.cache_dir,
"revision": model_args.model_revision, "revision": model_args.model_revision,
"token": True if model_args.token else None, "token": model_args.token,
"trust_remote_code": model_args.trust_remote_code, "trust_remote_code": model_args.trust_remote_code,
} }
if model_args.config_name: if model_args.config_name:
@ -314,7 +316,7 @@ def main():
"cache_dir": model_args.cache_dir, "cache_dir": model_args.cache_dir,
"use_fast": model_args.use_fast_tokenizer, "use_fast": model_args.use_fast_tokenizer,
"revision": model_args.model_revision, "revision": model_args.model_revision,
"token": True if model_args.token else None, "token": model_args.token,
"trust_remote_code": model_args.trust_remote_code, "trust_remote_code": model_args.trust_remote_code,
} }
if model_args.tokenizer_name: if model_args.tokenizer_name:
@ -334,7 +336,7 @@ def main():
config=config, config=config,
cache_dir=model_args.cache_dir, cache_dir=model_args.cache_dir,
revision=model_args.model_revision, revision=model_args.model_revision,
token=True if model_args.token else None, token=model_args.token,
trust_remote_code=model_args.trust_remote_code, trust_remote_code=model_args.trust_remote_code,
) )
else: else:
@ -349,7 +351,7 @@ def main():
finetuning_task=data_args.task_name, finetuning_task=data_args.task_name,
cache_dir=model_args.cache_dir, cache_dir=model_args.cache_dir,
revision=model_args.model_revision, revision=model_args.model_revision,
token=True if model_args.token else None, token=model_args.token,
trust_remote_code=model_args.trust_remote_code, trust_remote_code=model_args.trust_remote_code,
) )
tokenizer = AutoTokenizer.from_pretrained( tokenizer = AutoTokenizer.from_pretrained(
@ -357,7 +359,7 @@ def main():
cache_dir=model_args.cache_dir, cache_dir=model_args.cache_dir,
use_fast=model_args.use_fast_tokenizer, use_fast=model_args.use_fast_tokenizer,
revision=model_args.model_revision, revision=model_args.model_revision,
token=True if model_args.token else None, token=model_args.token,
trust_remote_code=model_args.trust_remote_code, trust_remote_code=model_args.trust_remote_code,
) )
model = AutoModelForSequenceClassification.from_pretrained( model = AutoModelForSequenceClassification.from_pretrained(
@ -366,7 +368,7 @@ def main():
config=config, config=config,
cache_dir=model_args.cache_dir, cache_dir=model_args.cache_dir,
revision=model_args.model_revision, revision=model_args.model_revision,
token=True if model_args.token else None, token=model_args.token,
trust_remote_code=model_args.trust_remote_code, trust_remote_code=model_args.trust_remote_code,
) )
{% endif %} {% endif %}