Add flops, num_params, inference speed logging and best.pt logging (#84)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
Ayush Chaurasia
2022-12-24 00:44:21 +05:30
committed by GitHub
parent f0fff8c13e
commit ae2443c210
3 changed files with 47 additions and 14 deletions

View File

@ -1,3 +1,5 @@
from ultralytics.yolo.utils.torch_utils import get_flops, get_num_params
try:
import clearml
from clearml import Task
@ -38,8 +40,23 @@ def on_val_end(trainer):
_log_scalers(val_loss_dict, "val", trainer.epoch)
_log_scalers(metrics, "metrics", trainer.epoch)
if trainer.epoch == 0:
infer_speed = trainer.validator.speed[1]
model_info = {
"inference_speed": infer_speed,
"flops@640": get_flops(trainer.model),
"params": get_num_params(trainer.model)}
_log_scalers(model_info, "model")
def on_train_end(trainer):
task = Task.current_task()
if task:
task.update_output_model(model_path=str(trainer.best), model_name='Best Model', auto_delete_file=False)
callbacks = {
"before_train": before_train,
"on_val_end": on_val_end,
"on_batch_end": on_batch_end,}
"on_batch_end": on_batch_end,
"on_train_end": on_train_end}