Start export implementation (#110)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@ -1,4 +1,5 @@
|
||||
import contextlib
|
||||
import inspect
|
||||
import logging.config
|
||||
import os
|
||||
import platform
|
||||
@ -13,6 +14,7 @@ import pandas as pd
|
||||
# Constants
|
||||
FILE = Path(__file__).resolve()
|
||||
ROOT = FILE.parents[2] # YOLO
|
||||
DEFAULT_CONFIG = ROOT / "yolo/configs/default.yaml"
|
||||
RANK = int(os.getenv('RANK', -1))
|
||||
DATASETS_DIR = Path(os.getenv('YOLOv5_DATASETS_DIR', ROOT.parent / 'datasets')) # global datasets directory
|
||||
NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads
|
||||
@ -98,6 +100,12 @@ def is_writeable(dir, test=False):
|
||||
return False
|
||||
|
||||
|
||||
def get_default_args(func):
|
||||
# Get func() default arguments
|
||||
signature = inspect.signature(func)
|
||||
return {k: v.default for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty}
|
||||
|
||||
|
||||
def user_config_dir(dir='Ultralytics', env_var='YOLOV5_CONFIG_DIR'):
|
||||
# Return path of user configuration directory. Prefer environment variable if exists. Make dir if required.
|
||||
env = os.getenv(env_var)
|
||||
|
@ -13,6 +13,7 @@ import torch
|
||||
|
||||
from ultralytics.yolo.utils import (AUTOINSTALL, FONT, LOGGER, ROOT, USER_CONFIG_DIR, TryExcept, colorstr, emojis,
|
||||
is_docker, is_notebook)
|
||||
from ultralytics.yolo.utils.ops import make_divisible
|
||||
|
||||
|
||||
def is_ascii(s=''):
|
||||
@ -21,6 +22,18 @@ def is_ascii(s=''):
|
||||
return len(s.encode().decode('ascii', 'ignore')) == len(s)
|
||||
|
||||
|
||||
def check_imgsz(imgsz, s=32, floor=0):
|
||||
# Verify image size is a multiple of stride s in each dimension
|
||||
if isinstance(imgsz, int): # integer i.e. img_size=640
|
||||
new_size = max(make_divisible(imgsz, int(s)), floor)
|
||||
else: # list i.e. img_size=[640, 480]
|
||||
imgsz = list(imgsz) # convert to list if tuple
|
||||
new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz]
|
||||
if new_size != imgsz:
|
||||
LOGGER.warning(f'WARNING ⚠️ --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}')
|
||||
return new_size
|
||||
|
||||
|
||||
def check_version(current="0.0.0", minimum="0.0.0", name="version ", pinned=False, hard=False, verbose=False):
|
||||
# Check version vs. required version
|
||||
current, minimum = (pkg.parse_version(x) for x in (current, minimum))
|
||||
@ -93,7 +106,7 @@ def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), insta
|
||||
LOGGER.warning(f'{prefix} ❌ {e}')
|
||||
|
||||
|
||||
def check_suffix(file='yolov5s.pt', suffix=('.pt',), msg=''):
|
||||
def check_suffix(file='yolov8n.pt', suffix=('.pt',), msg=''):
|
||||
# Check file(s) for acceptable suffix
|
||||
if file and suffix:
|
||||
if isinstance(suffix, str):
|
||||
|
@ -1,28 +0,0 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Union
|
||||
|
||||
from omegaconf import DictConfig, OmegaConf
|
||||
|
||||
from ultralytics.yolo.utils.configs.hydra_patch import check_config_mismatch
|
||||
|
||||
|
||||
def get_config(config: Union[str, DictConfig], overrides: Union[str, Dict] = {}):
|
||||
"""
|
||||
Accepts yaml file name or DictConfig containing experiment configuration.
|
||||
Returns training args namespace
|
||||
:param overrides: Overrides str or Dict
|
||||
:param config: Optional file name or DictConfig object
|
||||
"""
|
||||
if isinstance(config, (str, Path)):
|
||||
config = OmegaConf.load(config)
|
||||
elif isinstance(config, Dict):
|
||||
config = OmegaConf.create(config)
|
||||
# override
|
||||
if isinstance(overrides, str):
|
||||
overrides = OmegaConf.load(overrides)
|
||||
elif isinstance(overrides, Dict):
|
||||
overrides = OmegaConf.create(overrides)
|
||||
|
||||
check_config_mismatch(dict(overrides).keys(), dict(config).keys())
|
||||
|
||||
return OmegaConf.merge(config, overrides)
|
@ -1,102 +0,0 @@
|
||||
# YOLO 🚀 by Ultralytics, GPL-3.0 license
|
||||
# Default training settings and hyperparameters for medium-augmentation COCO training
|
||||
|
||||
# Task and Mode
|
||||
task: "classify" # choices=['detect', 'segment', 'classify', 'init'] # init is a special case
|
||||
mode: "train" # choice=['train', 'val', 'predict']
|
||||
|
||||
# Train settings -------------------------------------------------------------------------------------------------------
|
||||
model: null # i.e. yolov5s.pt, yolo.yaml
|
||||
data: null # i.e. coco128.yaml
|
||||
epochs: 100
|
||||
batch_size: 16
|
||||
imgsz: 640
|
||||
nosave: False
|
||||
cache: False # True/ram, disk or False
|
||||
device: '' # cuda device, i.e. 0 or 0,1,2,3 or cpu
|
||||
workers: 8
|
||||
project: null
|
||||
name: null
|
||||
exist_ok: False
|
||||
pretrained: False
|
||||
optimizer: 'SGD' # choices=['SGD', 'Adam', 'AdamW', 'RMSProp']
|
||||
verbose: False
|
||||
seed: 0
|
||||
deterministic: True
|
||||
local_rank: -1
|
||||
single_cls: False # train multi-class data as single-class
|
||||
image_weights: False # use weighted image selection for training
|
||||
rect: False # support rectangular training
|
||||
cos_lr: False # use cosine LR scheduler
|
||||
close_mosaic: 10 # disable mosaic for final 10 epochs
|
||||
# Segmentation
|
||||
overlap_mask: True # masks overlap
|
||||
mask_ratio: 4 # mask downsample ratio
|
||||
# Classification
|
||||
dropout: False # use dropout
|
||||
resume: False
|
||||
|
||||
|
||||
# Val/Test settings ----------------------------------------------------------------------------------------------------
|
||||
noval: False
|
||||
save_json: False
|
||||
save_hybrid: False
|
||||
conf_thres: 0.001
|
||||
iou_thres: 0.7
|
||||
max_det: 300
|
||||
half: True
|
||||
dnn: False # use OpenCV DNN for ONNX inference
|
||||
plots: True
|
||||
|
||||
# Prediction settings:
|
||||
source: "ultralytics/assets/"
|
||||
view_img: False
|
||||
save_txt: False
|
||||
save_conf: False
|
||||
save_crop: False
|
||||
hide_labels: False # hide labels
|
||||
hide_conf: False
|
||||
vid_stride: 1 # video frame-rate stride
|
||||
line_thickness: 3 # bounding box thickness (pixels)
|
||||
update: False # Update all models
|
||||
visualize: False
|
||||
augment: False
|
||||
agnostic_nms: False # class-agnostic NMS
|
||||
retina_masks: False
|
||||
|
||||
# Hyperparameters ------------------------------------------------------------------------------------------------------
|
||||
lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3)
|
||||
lrf: 0.01 # final OneCycleLR learning rate (lr0 * lrf)
|
||||
momentum: 0.937 # SGD momentum/Adam beta1
|
||||
weight_decay: 0.0005 # optimizer weight decay 5e-4
|
||||
warmup_epochs: 3.0 # warmup epochs (fractions ok)
|
||||
warmup_momentum: 0.8 # warmup initial momentum
|
||||
warmup_bias_lr: 0.1 # warmup initial bias lr
|
||||
box: 7.5 # box loss gain
|
||||
cls: 0.5 # cls loss gain (scale with pixels)
|
||||
dfl: 1.5 # dfl loss gain
|
||||
fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5)
|
||||
label_smoothing: 0.0
|
||||
nbs: 64 # nominal batch size
|
||||
hsv_h: 0.015 # image HSV-Hue augmentation (fraction)
|
||||
hsv_s: 0.7 # image HSV-Saturation augmentation (fraction)
|
||||
hsv_v: 0.4 # image HSV-Value augmentation (fraction)
|
||||
degrees: 0.0 # image rotation (+/- deg)
|
||||
translate: 0.1 # image translation (+/- fraction)
|
||||
scale: 0.5 # image scale (+/- gain)
|
||||
shear: 0.0 # image shear (+/- deg)
|
||||
perspective: 0.0 # image perspective (+/- fraction), range 0-0.001
|
||||
flipud: 0.0 # image flip up-down (probability)
|
||||
fliplr: 0.5 # image flip left-right (probability)
|
||||
mosaic: 1.0 # image mosaic (probability)
|
||||
mixup: 0.0 # image mixup (probability)
|
||||
copy_paste: 0.0 # segment copy-paste (probability)
|
||||
|
||||
# For debugging. Don't change
|
||||
v5loader: True
|
||||
|
||||
# Hydra configs --------------------------------------------------------------------------------------------------------
|
||||
hydra:
|
||||
output_subdir: null # disable hydra directory creation
|
||||
run:
|
||||
dir: .
|
@ -1,79 +0,0 @@
|
||||
import sys
|
||||
from difflib import get_close_matches
|
||||
from textwrap import dedent
|
||||
|
||||
import hydra
|
||||
from hydra.errors import ConfigCompositionException
|
||||
from omegaconf import OmegaConf, open_dict
|
||||
from omegaconf.errors import ConfigAttributeError, ConfigKeyError, OmegaConfBaseException
|
||||
|
||||
from ultralytics.yolo.utils import LOGGER, colorstr
|
||||
|
||||
|
||||
def override_config(overrides, cfg):
|
||||
override_keys = [override.key_or_group for override in overrides]
|
||||
check_config_mismatch(override_keys, cfg.keys())
|
||||
for override in overrides:
|
||||
if override.package is not None:
|
||||
raise ConfigCompositionException(f"Override {override.input_line} looks like a config group"
|
||||
f" override, but config group '{override.key_or_group}' does not"
|
||||
" exist.")
|
||||
|
||||
key = override.key_or_group
|
||||
value = override.value()
|
||||
try:
|
||||
if override.is_delete():
|
||||
config_val = OmegaConf.select(cfg, key, throw_on_missing=False)
|
||||
if config_val is None:
|
||||
raise ConfigCompositionException(f"Could not delete from config. '{override.key_or_group}'"
|
||||
" does not exist.")
|
||||
elif value is not None and value != config_val:
|
||||
raise ConfigCompositionException("Could not delete from config. The value of"
|
||||
f" '{override.key_or_group}' is {config_val} and not"
|
||||
f" {value}.")
|
||||
|
||||
last_dot = key.rfind(".")
|
||||
with open_dict(cfg):
|
||||
if last_dot == -1:
|
||||
del cfg[key]
|
||||
else:
|
||||
node = OmegaConf.select(cfg, key[0:last_dot])
|
||||
del node[key[last_dot + 1:]]
|
||||
|
||||
elif override.is_add():
|
||||
if OmegaConf.select(cfg, key, throw_on_missing=False) is None or isinstance(value, (dict, list)):
|
||||
OmegaConf.update(cfg, key, value, merge=True, force_add=True)
|
||||
else:
|
||||
assert override.input_line is not None
|
||||
raise ConfigCompositionException(
|
||||
dedent(f"""\
|
||||
Could not append to config. An item is already at '{override.key_or_group}'.
|
||||
Either remove + prefix: '{override.input_line[1:]}'
|
||||
Or add a second + to add or override '{override.key_or_group}': '+{override.input_line}'
|
||||
"""))
|
||||
elif override.is_force_add():
|
||||
OmegaConf.update(cfg, key, value, merge=True, force_add=True)
|
||||
else:
|
||||
try:
|
||||
OmegaConf.update(cfg, key, value, merge=True)
|
||||
except (ConfigAttributeError, ConfigKeyError) as ex:
|
||||
raise ConfigCompositionException(f"Could not override '{override.key_or_group}'."
|
||||
f"\nTo append to your config use +{override.input_line}") from ex
|
||||
except OmegaConfBaseException as ex:
|
||||
raise ConfigCompositionException(f"Error merging override {override.input_line}").with_traceback(
|
||||
sys.exc_info()[2]) from ex
|
||||
|
||||
|
||||
def check_config_mismatch(overrides, cfg):
|
||||
mismatched = []
|
||||
for option in overrides:
|
||||
if option not in cfg and 'hydra.' not in option:
|
||||
mismatched.append(option)
|
||||
|
||||
for option in mismatched:
|
||||
LOGGER.info(f"{colorstr(option)} is not a valid key. Similar keys: {get_close_matches(option, cfg, 3, 0.6)}")
|
||||
if mismatched:
|
||||
exit()
|
||||
|
||||
|
||||
hydra._internal.config_loader_impl.ConfigLoaderImpl._apply_overrides_to_config = override_config
|
@ -49,7 +49,7 @@ def attempt_download(file, repo='ultralytics/yolov5', release='v6.2'):
|
||||
# Attempt file download from GitHub release assets if not found locally. release = 'latest', 'v6.2', etc.
|
||||
|
||||
def github_assets(repository, version='latest'):
|
||||
# Return GitHub repo tag and assets (i.e. ['yolov5s.pt', 'yolov5m.pt', ...])
|
||||
# Return GitHub repo tag and assets (i.e. ['yolov8n.pt', 'yolov5m.pt', ...])
|
||||
if version != 'latest':
|
||||
version = f'tags/{version}' # i.e. tags/v6.2
|
||||
response = requests.get(f'https://api.github.com/repos/{repository}/releases/{version}').json() # github api
|
||||
|
@ -1,6 +1,7 @@
|
||||
import contextlib
|
||||
import glob
|
||||
import os
|
||||
import urllib
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
@ -43,7 +44,7 @@ def increment_path(path, exist_ok=False, sep='', mkdir=False):
|
||||
return path
|
||||
|
||||
|
||||
def save_yaml(file='data.yaml', data=None):
|
||||
def yaml_save(file='data.yaml', data=None):
|
||||
# Single-line safe yaml saving
|
||||
with open(file, 'w') as f:
|
||||
yaml.safe_dump({k: str(v) if isinstance(v, Path) else v for k, v in data.items()}, f, sort_keys=False)
|
||||
@ -52,7 +53,7 @@ def save_yaml(file='data.yaml', data=None):
|
||||
def yaml_load(file='data.yaml'):
|
||||
# Single-line safe yaml loading
|
||||
with open(file, errors='ignore') as f:
|
||||
return yaml.safe_load(f)
|
||||
return {**yaml.safe_load(f), 'yaml_file': file} # add YAML filename to dict and return
|
||||
|
||||
|
||||
def unzip_file(file, path=None, exclude=('.DS_Store', '__MACOSX')):
|
||||
@ -77,6 +78,24 @@ def file_date(path=__file__):
|
||||
return f'{t.year}-{t.month}-{t.day}'
|
||||
|
||||
|
||||
def file_size(path):
|
||||
# Return file/dir size (MB)
|
||||
mb = 1 << 20 # bytes to MiB (1024 ** 2)
|
||||
path = Path(path)
|
||||
if path.is_file():
|
||||
return path.stat().st_size / mb
|
||||
elif path.is_dir():
|
||||
return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / mb
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
|
||||
def url2file(url):
|
||||
# Convert URL to filename, i.e. https://url.com/file.txt?auth -> file.txt
|
||||
url = str(Path(url)).replace(':/', '://') # Pathlib turns :// -> :/
|
||||
return Path(urllib.parse.unquote(url)).name.split('?')[0] # '%2F' to '/', split https://url.com/file.txt?auth
|
||||
|
||||
|
||||
def get_latest_run(search_dir='.'):
|
||||
# Return path to most recent 'last.pt' in /runs (i.e. to --resume from)
|
||||
last_list = glob.glob(f'{search_dir}/**/last*.pt', recursive=True)
|
||||
|
@ -135,7 +135,7 @@ def non_max_suppression(
|
||||
for xi, x in enumerate(prediction): # image index, image inference
|
||||
# Apply constraints
|
||||
# x[((x[:, 2:4] < min_wh) | (x[:, 2:4] > max_wh)).any(1), 4] = 0 # width-height
|
||||
x = x.T[xc[xi]] # confidence
|
||||
x = x.transpose(0, -1)[xc[xi]] # confidence
|
||||
|
||||
# Cat apriori labels if autolabelling
|
||||
if labels and len(labels[xi]):
|
||||
|
@ -135,8 +135,8 @@ def model_info(model, verbose=False, imgsz=640):
|
||||
|
||||
flops = get_flops(model, imgsz)
|
||||
fs = f', {flops:.1f} GFLOPs' if flops else ''
|
||||
name = Path(model.yaml_file).stem.replace('yolov5', 'YOLOv5') if hasattr(model, 'yaml_file') else 'Model'
|
||||
LOGGER.info(f"{name} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}")
|
||||
m = Path(getattr(model, 'yaml_file', '') or model.yaml.get('yaml_file', '')).stem.replace('yolo', 'YOLO') or 'Model'
|
||||
LOGGER.info(f"{m} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}")
|
||||
|
||||
|
||||
def get_num_params(model):
|
||||
|
Reference in New Issue
Block a user