mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-04 13:20:12 +06:00
examples/run_summarization_no_trainer: fixed incorrect param to hasattr (#18720)
* fixed incorrect param to hasattr * simplified condition checks * code cleanup
This commit is contained in:
parent
6667b0d7bf
commit
c55d6e4e10
@ -573,12 +573,9 @@ def main():
|
|||||||
args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch)
|
args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch)
|
||||||
|
|
||||||
# Figure out how many steps we should save the Accelerator states
|
# Figure out how many steps we should save the Accelerator states
|
||||||
if hasattr(args.checkpointing_steps, "isdigit"):
|
|
||||||
checkpointing_steps = args.checkpointing_steps
|
checkpointing_steps = args.checkpointing_steps
|
||||||
if args.checkpointing_steps.isdigit():
|
if checkpointing_steps is not None and checkpointing_steps.isdigit():
|
||||||
checkpointing_steps = int(args.checkpointing_steps)
|
checkpointing_steps = int(checkpointing_steps)
|
||||||
else:
|
|
||||||
checkpointing_steps = None
|
|
||||||
|
|
||||||
# We need to initialize the trackers we use, and also store our configuration.
|
# We need to initialize the trackers we use, and also store our configuration.
|
||||||
# The trackers initializes automatically on the main process.
|
# The trackers initializes automatically on the main process.
|
||||||
|
Loading…
Reference in New Issue
Block a user