ultralytics 8.0.21 Windows, segments, YAML fixes (#655)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: corey-nm <109536191+corey-nm@users.noreply.github.com>
This commit is contained in:
Glenn Jocher
2023-01-26 23:07:27 +01:00
committed by GitHub
parent dc9502c700
commit 6c44ce21d9
16 changed files with 147 additions and 146 deletions

View File

@ -5,6 +5,7 @@ import inspect
import logging.config
import os
import platform
import re
import subprocess
import sys
import tempfile
@ -113,12 +114,66 @@ class IterableSimpleNamespace(SimpleNamespace):
return getattr(self, key, default)
def yaml_save(file='data.yaml', data=None):
"""
Save YAML data to a file.
Args:
file (str, optional): File name. Default is 'data.yaml'.
data (dict, optional): Data to save in YAML format. Default is None.
Returns:
None: Data is saved to the specified file.
"""
file = Path(file)
if not file.parent.exists():
# Create parent directories if they don't exist
file.parent.mkdir(parents=True, exist_ok=True)
with open(file, 'w') as f:
# Dump data to file in YAML format, converting Path objects to strings
yaml.safe_dump({k: str(v) if isinstance(v, Path) else v for k, v in data.items()}, f, sort_keys=False)
def yaml_load(file='data.yaml', append_filename=False):
"""
Load YAML data from a file.
Args:
file (str, optional): File name. Default is 'data.yaml'.
append_filename (bool): Add the YAML filename to the YAML dictionary. Default is False.
Returns:
dict: YAML data and file name.
"""
with open(file, errors='ignore', encoding='utf-8') as f:
# Add YAML filename to dict and return
s = f.read() # string
if not s.isprintable(): # remove special characters
s = re.sub(r'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]+', '', s)
return {**yaml.safe_load(s), 'yaml_file': str(file)} if append_filename else yaml.safe_load(s)
def yaml_print(yaml_file: Union[str, Path, dict]) -> None:
"""
Pretty prints a yaml file or a yaml-formatted dictionary.
Args:
yaml_file: The file path of the yaml file or a yaml-formatted dictionary.
Returns:
None
"""
yaml_dict = yaml_load(yaml_file) if isinstance(yaml_file, (str, Path)) else yaml_file
dump = yaml.dump(yaml_dict, default_flow_style=False)
LOGGER.info(f"Printing '{colorstr('bold', 'black', yaml_file)}'\n\n{dump}")
# Default configuration
with open(DEFAULT_CFG_PATH, errors='ignore') as f:
DEFAULT_CFG_DICT = yaml.safe_load(f)
for k, v in DEFAULT_CFG_DICT.items():
if isinstance(v, str) and v.lower() == 'none':
DEFAULT_CFG_DICT[k] = None
DEFAULT_CFG_DICT = yaml_load(DEFAULT_CFG_PATH)
for k, v in DEFAULT_CFG_DICT.items():
if isinstance(v, str) and v.lower() == 'none':
DEFAULT_CFG_DICT[k] = None
DEFAULT_CFG_KEYS = DEFAULT_CFG_DICT.keys()
DEFAULT_CFG = IterableSimpleNamespace(**DEFAULT_CFG_DICT)
@ -393,58 +448,6 @@ def threaded(func):
return wrapper
def yaml_save(file='data.yaml', data=None):
"""
Save YAML data to a file.
Args:
file (str, optional): File name. Default is 'data.yaml'.
data (dict, optional): Data to save in YAML format. Default is None.
Returns:
None: Data is saved to the specified file.
"""
file = Path(file)
if not file.parent.exists():
# Create parent directories if they don't exist
file.parent.mkdir(parents=True, exist_ok=True)
with open(file, 'w') as f:
# Dump data to file in YAML format, converting Path objects to strings
yaml.safe_dump({k: str(v) if isinstance(v, Path) else v for k, v in data.items()}, f, sort_keys=False)
def yaml_load(file='data.yaml', append_filename=False):
"""
Load YAML data from a file.
Args:
file (str, optional): File name. Default is 'data.yaml'.
append_filename (bool): Add the YAML filename to the YAML dictionary. Default is False.
Returns:
dict: YAML data and file name.
"""
with open(file, errors='ignore') as f:
# Add YAML filename to dict and return
return {**yaml.safe_load(f), 'yaml_file': str(file)} if append_filename else yaml.safe_load(f)
def yaml_print(yaml_file: Union[str, Path, dict]) -> None:
"""
Pretty prints a yaml file or a yaml-formatted dictionary.
Args:
yaml_file: The file path of the yaml file or a yaml-formatted dictionary.
Returns:
None
"""
yaml_dict = yaml_load(yaml_file) if isinstance(yaml_file, (str, Path)) else yaml_file
dump = yaml.dump(yaml_dict, default_flow_style=False)
LOGGER.info(f"Printing '{colorstr('bold', 'black', yaml_file)}'\n\n{dump}")
def set_sentry():
"""
Initialize the Sentry SDK for error tracking and reporting if pytest is not currently running.

View File

@ -207,9 +207,9 @@ def check_file(file, suffix=''):
# Search/download file (if necessary) and return path
check_suffix(file, suffix) # optional
file = str(file) # convert to str()
if Path(file).is_file() or not file: # exists
if not file or ('://' not in file and Path(file).is_file()): # exists ('://' check required in Windows Python<3.10)
return file
elif file.startswith(('http:/', 'https:/')): # download
elif file.lower().startswith(('https://', 'http://', 'rtsp://', 'rtmp://')): # download
url = file # warning: Pathlib turns :// -> :/
file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth
if Path(file).is_file():
@ -276,7 +276,7 @@ def git_describe(path=ROOT): # path must be a directory
try:
assert (Path(path) / '.git').is_dir()
return check_output(f'git -C {path} describe --tags --long --always', shell=True).decode()[:-1]
except Exception:
except AssertionError:
return ''

View File

@ -104,7 +104,7 @@ def download(url, dir=Path.cwd(), unzip=True, delete=True, curl=False, threads=1
def download_one(url, dir):
# Download 1 file
success = True
if Path(url).is_file():
if '://' not in str(url) and Path(url).is_file(): # exists ('://' check required in Windows Python<3.10)
f = Path(url) # filename
else: # does not exist
f = dir / Path(url).name

View File

@ -17,11 +17,8 @@ import torch.nn as nn
import torch.nn.functional as F
from torch.nn.parallel import DistributedDataParallel as DDP
import ultralytics
from ultralytics.yolo.utils import DEFAULT_CFG_DICT, DEFAULT_CFG_KEYS, LOGGER
from ultralytics.yolo.utils.checks import git_describe
from .checks import check_version
from ultralytics.yolo.utils.checks import check_version
LOCAL_RANK = int(os.getenv('LOCAL_RANK', -1)) # https://pytorch.org/docs/stable/elastic/run.html
RANK = int(os.getenv('RANK', -1))
@ -60,8 +57,8 @@ def DDP_model(model):
def select_device(device='', batch=0, newline=False):
# device = None or 'cpu' or 0 or '0' or '0,1,2,3'
ver = git_describe() or ultralytics.__version__ # git commit or pip package version
s = f'Ultralytics YOLOv{ver} 🚀 Python-{platform.python_version()} torch-{torch.__version__} '
from ultralytics import __version__
s = f'Ultralytics YOLOv{__version__} 🚀 Python-{platform.python_version()} torch-{torch.__version__} '
device = str(device).lower()
for remove in 'cuda:', 'none', '(', ')', '[', ']', "'", ' ':
device = device.replace(remove, '') # to string, 'cuda:0' -> '0' and '(0, 1)' -> '0,1'
@ -247,6 +244,7 @@ class ModelEMA:
""" Updated Exponential Moving Average (EMA) from https://github.com/rwightman/pytorch-image-models
Keeps a moving average of everything in the model state_dict (parameters and buffers)
For EMA details see https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage
To disable EMA set the `enabled` attribute to `False`.
"""
def __init__(self, model, decay=0.9999, tau=2000, updates=0):
@ -256,22 +254,25 @@ class ModelEMA:
self.decay = lambda x: decay * (1 - math.exp(-x / tau)) # decay exponential ramp (to help early epochs)
for p in self.ema.parameters():
p.requires_grad_(False)
self.enabled = True
def update(self, model):
# Update EMA parameters
self.updates += 1
d = self.decay(self.updates)
if self.enabled:
self.updates += 1
d = self.decay(self.updates)
msd = de_parallel(model).state_dict() # model state_dict
for k, v in self.ema.state_dict().items():
if v.dtype.is_floating_point: # true for FP16 and FP32
v *= d
v += (1 - d) * msd[k].detach()
# assert v.dtype == msd[k].dtype == torch.float32, f'{k}: EMA {v.dtype} and model {msd[k].dtype} must be FP32'
msd = de_parallel(model).state_dict() # model state_dict
for k, v in self.ema.state_dict().items():
if v.dtype.is_floating_point: # true for FP16 and FP32
v *= d
v += (1 - d) * msd[k].detach()
# assert v.dtype == msd[k].dtype == torch.float32, f'{k}: EMA {v.dtype}, model {msd[k].dtype}'
def update_attr(self, model, include=(), exclude=('process_group', 'reducer')):
# Update EMA attributes
copy_attr(self.ema, model, include, exclude)
if self.enabled:
copy_attr(self.ema, model, include, exclude)
def strip_optimizer(f='best.pt', s=''):
@ -285,8 +286,8 @@ def strip_optimizer(f='best.pt', s=''):
strip_optimizer(f)
Args:
f (str): file path to model state to strip the optimizer from. Default is 'best.pt'.
s (str): file path to save the model with stripped optimizer to. Default is ''. If not provided, the original file will be overwritten.
f (str): file path to model to strip the optimizer from. Default is 'best.pt'.
s (str): file path to save the model with stripped optimizer to. If not provided, 'f' will be overwritten.
Returns:
None
@ -364,12 +365,12 @@ class EarlyStopping:
Early stopping class that stops training when a specified number of epochs have passed without improvement.
"""
def __init__(self, patience=30):
def __init__(self, patience=50):
"""
Initialize early stopping object
Args:
patience (int, optional): Number of epochs to wait after fitness stops improving before stopping. Default is 30.
patience (int, optional): Number of epochs to wait after fitness stops improving before stopping.
"""
self.best_fitness = 0.0 # i.e. mAP
self.best_epoch = 0