ultralytics 8.0.100 add Mosaic9() augmentation (#2605)

Co-authored-by: Ayush Chaurasia <ayush.chaurarsia@gmail.com>
Co-authored-by: Tommy in Tongji <36354458+TommyZihao@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: BIGBOSS-FOX <47949596+BIGBOSS-FOX@users.noreply.github.com>
Co-authored-by: xbkaishui <xxkaishui@gmail.com>
This commit is contained in:
Glenn Jocher
2023-05-14 20:43:35 +02:00
committed by GitHub
parent db1c5885d5
commit dce4efce48
23 changed files with 351 additions and 64 deletions

View File

@ -201,15 +201,16 @@ class YOLO:
self.model.load(weights)
return self
def info(self, verbose=True):
def info(self, detailed=False, verbose=True):
"""
Logs model info.
Args:
detailed (bool): Show detailed information about model.
verbose (bool): Controls verbosity.
"""
self._check_is_pytorch_model()
self.model.info(verbose=verbose)
return self.model.info(detailed=detailed, verbose=verbose)
def fuse(self):
"""Fuse PyTorch Conv2d and BatchNorm2d layers."""

View File

@ -190,17 +190,6 @@ class BaseTrainer:
else:
self._do_train(world_size)
def _pre_caching_dataset(self):
"""
Caching dataset before training to avoid NCCL timeout.
Must be done before DDP initialization.
See https://github.com/ultralytics/ultralytics/pull/2549 for details.
"""
if RANK in (-1, 0):
LOGGER.info('Pre-caching dataset to avoid NCCL timeout')
self.get_dataloader(self.trainset, batch_size=1, rank=RANK, mode='train')
self.get_dataloader(self.testset, batch_size=1, rank=-1, mode='val')
def _setup_ddp(self, world_size):
"""Initializes and sets the DistributedDataParallel parameters for training."""
torch.cuda.set_device(RANK)
@ -274,7 +263,6 @@ class BaseTrainer:
def _do_train(self, world_size=1):
"""Train completed, evaluate and plot if specified by arguments."""
if world_size > 1:
self._pre_caching_dataset()
self._setup_ddp(world_size)
self._setup_train(world_size)