feat: log LR in CLM fine-tune script

This commit is contained in:
11b 2022-12-27 13:21:00 -03:00
parent 96b41dee60
commit e99277ec52
1 changed files with 1 additions and 0 deletions

View File

@ -600,6 +600,7 @@ def main():
train_perplexity = float("inf")
writer.add_scalar("Train/Perplexity (Step)", train_perplexity, global_step)
writer.add_scalar("Train/Loss (Step)", loss, global_step)
writer.add_scalar("Train/Learning Rate (Step)", lr_scheduler.get_last_lr()[-1], global_step)
if args.output_dir is not None and args.checkpointing_steps is not None:
if args.checkpointing_steps != "epoch" and completed_steps % int(args.checkpointing_steps) == 0: