Move loss calculation to head (#2874)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Laughing-q <1185102784@qq.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: Laughing <61612323+Laughing-q@users.noreply.github.com>
This commit is contained in:
Ayush Chaurasia
2023-05-30 22:54:30 +05:30
committed by GitHub
parent 7f077f7654
commit facb7861cf
9 changed files with 417 additions and 348 deletions

View File

@ -286,7 +286,7 @@ class Results(SimpleClass):
seg = masks[j].xyn[0].copy().reshape(-1) # reversed mask.xyn, (n,2) to (n*2)
line = (c, *seg)
if kpts is not None:
kpt = (kpts[j][:, :2] / d.orig_shape[[1, 0]]).reshape(-1).tolist()
kpt = (kpts[j][:, :2].cpu() / d.orig_shape[[1, 0]]).reshape(-1).tolist()
line += (*kpt, )
line += (conf, ) * save_conf + (() if id is None else (id, ))
texts.append(('%g ' * len(line)).rstrip() % line)

View File

@ -325,8 +325,7 @@ class BaseTrainer:
# Forward
with torch.cuda.amp.autocast(self.amp):
batch = self.preprocess_batch(batch)
preds = self.model(batch['img'])
self.loss, self.loss_items = self.criterion(preds, batch)
self.loss, self.loss_items = self.model(batch)
if RANK != -1:
self.loss *= world_size
self.tloss = (self.tloss * i + self.loss_items) / (i + 1) if self.tloss is not None \
@ -496,12 +495,6 @@ class BaseTrainer:
"""Build dataset"""
raise NotImplementedError('build_dataset function not implemented in trainer')
def criterion(self, preds, batch):
"""
Returns loss and individual loss items as Tensor.
"""
raise NotImplementedError('criterion function not implemented in trainer')
def label_loss_items(self, loss_items=None, prefix='train'):
"""
Returns a loss dict with labelled training loss items tensor

View File

@ -157,12 +157,12 @@ class BaseValidator:
# Inference
with dt[1]:
preds = model(batch['img'])
preds = model(batch['img'], augment=self.args.augment)
# Loss
with dt[2]:
if self.training:
self.loss += trainer.criterion(preds, batch)[1]
self.loss += model.loss(batch, preds)[1]
# Postprocess
with dt[3]: