You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
28 lines
681 B
28 lines
681 B
2 years ago
|
from torch.utils.tensorboard import SummaryWriter
|
||
|
|
||
|
writer = None # TensorBoard SummaryWriter instance
|
||
|
|
||
|
|
||
|
def _log_scalars(scalars, step=0):
|
||
|
for k, v in scalars.items():
|
||
|
writer.add_scalar(k, v, step)
|
||
|
|
||
|
|
||
2 years ago
|
def on_pretrain_routine_start(trainer):
|
||
2 years ago
|
global writer
|
||
|
writer = SummaryWriter(str(trainer.save_dir))
|
||
|
|
||
|
|
||
|
def on_batch_end(trainer):
|
||
2 years ago
|
_log_scalars(trainer.label_loss_items(trainer.tloss, prefix="train"), trainer.epoch + 1)
|
||
2 years ago
|
|
||
|
|
||
2 years ago
|
def on_fit_epoch_end(trainer):
|
||
2 years ago
|
_log_scalars(trainer.metrics, trainer.epoch + 1)
|
||
2 years ago
|
|
||
|
|
||
2 years ago
|
callbacks = {
|
||
|
"on_pretrain_routine_start": on_pretrain_routine_start,
|
||
2 years ago
|
"on_fit_epoch_end": on_fit_epoch_end,
|
||
2 years ago
|
"on_batch_end": on_batch_end}
|