Stop passing None to compile() in TF examples (#29597)

* Fix examples to stop passing None to compile(), rework example invocation for run_text_classification.py

* Add Amy's fix
This commit is contained in:
Matt 2024-03-12 12:22:29 +00:00 committed by GitHub
parent 73efe896df
commit 81ec8028f9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 12 additions and 8 deletions

View File

@ -509,7 +509,7 @@ def main():
collate_fn=collate_fn,
).with_options(dataset_options)
else:
optimizer = None
optimizer = "sgd" # Just write anything because we won't be using it
if training_args.do_eval:
eval_dataset = model.prepare_tf_dataset(

View File

@ -482,7 +482,7 @@ def main():
adam_global_clipnorm=training_args.max_grad_norm,
)
else:
optimizer = None
optimizer = "sgd" # Just write anything because we won't be using it
# Transformers models compute the right loss for their task by default when labels are passed, and will
# use this for training unless you specify your own loss function in compile().
model.compile(optimizer=optimizer, metrics=["accuracy"], jit_compile=training_args.xla)

View File

@ -706,7 +706,8 @@ def main():
model.compile(optimizer=optimizer, jit_compile=training_args.xla, metrics=["accuracy"])
else:
model.compile(optimizer=None, jit_compile=training_args.xla, metrics=["accuracy"])
# Optimizer doesn't matter as it won't be used anyway
model.compile(optimizer="sgd", jit_compile=training_args.xla, metrics=["accuracy"])
training_dataset = None
if training_args.do_eval:

View File

@ -621,7 +621,7 @@ def main():
adam_global_clipnorm=training_args.max_grad_norm,
)
else:
optimizer = None
optimizer = "sgd" # Just write anything because we won't be using it
# endregion

View File

@ -75,7 +75,10 @@ python run_text_classification.py \
--train_file training_data.json \
--validation_file validation_data.json \
--output_dir output/ \
--test_file data_to_predict.json
--test_file data_to_predict.json \
--do_train \
--do_eval \
--do_predict
```
## run_glue.py

View File

@ -477,7 +477,7 @@ def main():
adam_global_clipnorm=training_args.max_grad_norm,
)
else:
optimizer = "adam" # Just write anything because we won't be using it
optimizer = "sgd" # Just write anything because we won't be using it
if is_regression:
metrics = []
else:

View File

@ -526,7 +526,7 @@ def main():
adam_global_clipnorm=training_args.max_grad_norm,
)
else:
optimizer = None
optimizer = "sgd" # Just use any default
if is_regression:
metrics = []
else:

View File

@ -584,7 +584,7 @@ def main():
adam_global_clipnorm=training_args.max_grad_norm,
)
else:
optimizer = None
optimizer = "sgd" # Just write anything because we won't be using it
# endregion
# region Metric and postprocessing