ultralytics 8.0.57 Comet, AMP, Classify, Docker updates (#1601)

Co-authored-by: Laughing <61612323+Laughing-q@users.noreply.github.com>
Co-authored-by: Ayush Chaurasia <ayush.chaurarsia@gmail.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Glenn Jocher
2023-03-24 19:07:06 +01:00
committed by GitHub
parent 28e48be5b6
commit ef03e6732a
10 changed files with 46 additions and 39 deletions

View File

@ -1,6 +1,6 @@
# Ultralytics YOLO 🚀, GPL-3.0 license
__version__ = '8.0.56'
__version__ = '8.0.57'
from ultralytics.yolo.engine.model import YOLO
from ultralytics.yolo.utils.checks import check_yolo as checks

View File

@ -1,8 +1,8 @@
# Ultralytics YOLO 🚀, GPL-3.0 license
# Default training settings and hyperparameters for medium-augmentation COCO training
task: detect # inference task, i.e. detect, segment, classify
mode: train # YOLO mode, i.e. train, val, predict, export
task: detect # YOLO task, i.e. detect, segment, classify, pose
mode: train # YOLO mode, i.e. train, val, predict, export, track, benchmark
# Train settings -------------------------------------------------------------------------------------------------------
model: # path to model file, i.e. yolov8n.pt, yolov8n.yaml
@ -30,6 +30,7 @@ rect: False # support rectangular training if mode='train', support rectangular
cos_lr: False # use cosine learning rate scheduler
close_mosaic: 10 # disable mosaic augmentation for final 10 epochs
resume: False # resume training from last checkpoint
amp: True # Automatic Mixed Precision (AMP) training, choices=[True, False], True runs AMP check
# Segmentation
overlap_mask: True # masks should overlap during training (segment train only)
mask_ratio: 4 # mask downsample ratio (segment train only)

View File

@ -207,12 +207,20 @@ def check_det_dataset(dataset, autodownload=True):
data = yaml_load(data, append_filename=True) # dictionary
# Checks
for k in 'train', 'val', 'names':
for k in 'train', 'val':
if k not in data:
raise SyntaxError(
emojis(f"{dataset} '{k}:' key missing ❌.\n'train', 'val' and 'names' are required in all data YAMLs."))
emojis(f"{dataset} '{k}:' key missing ❌.\n'train' and 'val' are required in all data YAMLs."))
if 'names' not in data and 'nc' not in data:
raise SyntaxError(emojis(f"{dataset} key missing ❌.\n either 'names' or 'nc' are required in all data YAMLs."))
if 'names' in data and 'nc' in data and len(data['names']) != data['nc']:
raise SyntaxError(emojis(f"{dataset} 'names' length {len(data['names'])} and 'nc: {data['nc']}' must match."))
if 'names' not in data:
data['names'] = [f'class_{i}' for i in range(data['nc'])]
else:
data['nc'] = len(data['names'])
data['names'] = check_class_names(data['names'])
data['nc'] = len(data['names'])
# Resolve paths
path = Path(extract_dir or data.get('path') or Path(data.get('yaml_file', '')).parent) # dataset root

View File

@ -142,6 +142,7 @@ class YOLO:
self.task = task or guess_model_task(weights)
self.ckpt_path = weights
self.overrides['model'] = weights
self.overrides['task'] = self.task
def _check_is_pytorch_model(self):
"""

View File

@ -203,8 +203,8 @@ class BaseTrainer:
self.model = self.model.to(self.device)
self.set_model_attributes()
# Check AMP
self.amp = torch.tensor(True).to(self.device)
if RANK in (-1, 0): # Single-GPU and DDP
self.amp = torch.tensor(self.args.amp).to(self.device) # True or False
if self.amp and RANK in (-1, 0): # Single-GPU and DDP
callbacks_backup = callbacks.default_callbacks.copy() # backup callbacks as check_amp() resets them
self.amp = torch.tensor(check_amp(self.model), device=self.device)
callbacks.default_callbacks = callbacks_backup # restore callbacks

View File

@ -14,6 +14,7 @@ except (ImportError, AssertionError):
def on_pretrain_routine_start(trainer):
try:
experiment = comet_ml.Experiment(project_name=trainer.args.project or 'YOLOv8')
experiment.set_name(trainer.args.name)
experiment.log_parameters(vars(trainer.args))
except Exception as e:
LOGGER.warning(f'WARNING ⚠️ Comet installed but not initialized correctly, not logging this run. {e}')