mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Stop passing None to compile() in TF examples (#29597)
* Fix examples to stop passing None to compile(), rework example invocation for run_text_classification.py * Add Amy's fix
This commit is contained in:
parent
73efe896df
commit
81ec8028f9
@ -509,7 +509,7 @@ def main():
|
||||
collate_fn=collate_fn,
|
||||
).with_options(dataset_options)
|
||||
else:
|
||||
optimizer = None
|
||||
optimizer = "sgd" # Just write anything because we won't be using it
|
||||
|
||||
if training_args.do_eval:
|
||||
eval_dataset = model.prepare_tf_dataset(
|
||||
|
@ -482,7 +482,7 @@ def main():
|
||||
adam_global_clipnorm=training_args.max_grad_norm,
|
||||
)
|
||||
else:
|
||||
optimizer = None
|
||||
optimizer = "sgd" # Just write anything because we won't be using it
|
||||
# Transformers models compute the right loss for their task by default when labels are passed, and will
|
||||
# use this for training unless you specify your own loss function in compile().
|
||||
model.compile(optimizer=optimizer, metrics=["accuracy"], jit_compile=training_args.xla)
|
||||
|
@ -706,7 +706,8 @@ def main():
|
||||
model.compile(optimizer=optimizer, jit_compile=training_args.xla, metrics=["accuracy"])
|
||||
|
||||
else:
|
||||
model.compile(optimizer=None, jit_compile=training_args.xla, metrics=["accuracy"])
|
||||
# Optimizer doesn't matter as it won't be used anyway
|
||||
model.compile(optimizer="sgd", jit_compile=training_args.xla, metrics=["accuracy"])
|
||||
training_dataset = None
|
||||
|
||||
if training_args.do_eval:
|
||||
|
@ -621,7 +621,7 @@ def main():
|
||||
adam_global_clipnorm=training_args.max_grad_norm,
|
||||
)
|
||||
else:
|
||||
optimizer = None
|
||||
optimizer = "sgd" # Just write anything because we won't be using it
|
||||
|
||||
# endregion
|
||||
|
||||
|
@ -75,7 +75,10 @@ python run_text_classification.py \
|
||||
--train_file training_data.json \
|
||||
--validation_file validation_data.json \
|
||||
--output_dir output/ \
|
||||
--test_file data_to_predict.json
|
||||
--test_file data_to_predict.json \
|
||||
--do_train \
|
||||
--do_eval \
|
||||
--do_predict
|
||||
```
|
||||
|
||||
## run_glue.py
|
||||
|
@ -477,7 +477,7 @@ def main():
|
||||
adam_global_clipnorm=training_args.max_grad_norm,
|
||||
)
|
||||
else:
|
||||
optimizer = "adam" # Just write anything because we won't be using it
|
||||
optimizer = "sgd" # Just write anything because we won't be using it
|
||||
if is_regression:
|
||||
metrics = []
|
||||
else:
|
||||
|
@ -526,7 +526,7 @@ def main():
|
||||
adam_global_clipnorm=training_args.max_grad_norm,
|
||||
)
|
||||
else:
|
||||
optimizer = None
|
||||
optimizer = "sgd" # Just use any default
|
||||
if is_regression:
|
||||
metrics = []
|
||||
else:
|
||||
|
@ -584,7 +584,7 @@ def main():
|
||||
adam_global_clipnorm=training_args.max_grad_norm,
|
||||
)
|
||||
else:
|
||||
optimizer = None
|
||||
optimizer = "sgd" # Just write anything because we won't be using it
|
||||
# endregion
|
||||
|
||||
# region Metric and postprocessing
|
||||
|
Loading…
Reference in New Issue
Block a user