Log lr for param groups (#159)

This commit is contained in:
Ayush Chaurasia
2023-01-09 04:57:14 +05:30
committed by GitHub
parent e79ea1666c
commit 9a2f67b3b4
2 changed files with 4 additions and 2 deletions

View File

@ -317,7 +317,8 @@ class BaseTrainer:
self.run_callbacks("on_train_batch_end")
lr = {f"lr{ir}": x['lr'] for ir, x in enumerate(self.optimizer.param_groups)} # for loggers
self.lr = {f"lr/pg{ir}": x['lr'] for ir, x in enumerate(self.optimizer.param_groups)} # for loggers
self.scheduler.step()
self.run_callbacks("on_train_epoch_end")
@ -328,7 +329,7 @@ class BaseTrainer:
final_epoch = (epoch + 1 == self.epochs)
if self.args.val or final_epoch:
self.metrics, self.fitness = self.validate()
self.save_metrics(metrics={**self.label_loss_items(self.tloss), **self.metrics, **lr})
self.save_metrics(metrics={**self.label_loss_items(self.tloss), **self.metrics, **self.lr})
# Save model
if self.args.save or (epoch + 1 == self.epochs):