ultralytics 8.0.51
add assets and CI actions (#1296)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Paul Kehrer <paulhkehrer@gmail.com>
This commit is contained in:
@ -1,6 +1,6 @@
|
||||
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||
|
||||
__version__ = '8.0.50'
|
||||
__version__ = '8.0.51'
|
||||
|
||||
from ultralytics.yolo.engine.model import YOLO
|
||||
from ultralytics.yolo.utils.checks import check_yolo as checks
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 476 KiB After Width: | Height: | Size: 134 KiB |
Binary file not shown.
Before Width: | Height: | Size: 165 KiB After Width: | Height: | Size: 49 KiB |
@ -14,7 +14,7 @@ import torch
|
||||
import torch.nn as nn
|
||||
from PIL import Image
|
||||
|
||||
from ultralytics.yolo.utils import LOGGER, ROOT, yaml_load
|
||||
from ultralytics.yolo.utils import LINUX, LOGGER, ROOT, yaml_load
|
||||
from ultralytics.yolo.utils.checks import check_requirements, check_suffix, check_version, check_yaml
|
||||
from ultralytics.yolo.utils.downloads import attempt_download_asset, is_url
|
||||
from ultralytics.yolo.utils.ops import xywh2xyxy
|
||||
@ -143,7 +143,12 @@ class AutoBackend(nn.Module):
|
||||
metadata = w.parent / 'metadata.yaml'
|
||||
elif engine: # TensorRT
|
||||
LOGGER.info(f'Loading {w} for TensorRT inference...')
|
||||
import tensorrt as trt # https://developer.nvidia.com/nvidia-tensorrt-download
|
||||
try:
|
||||
import tensorrt as trt # noqa https://developer.nvidia.com/nvidia-tensorrt-download
|
||||
except ImportError:
|
||||
if LINUX:
|
||||
check_requirements('nvidia-tensorrt', cmds='-U --index-url https://pypi.ngc.nvidia.com')
|
||||
import tensorrt as trt # noqa
|
||||
check_version(trt.__version__, '7.0.0', hard=True) # require tensorrt>=7.0.0
|
||||
if device.type == 'cpu':
|
||||
device = torch.device('cuda:0')
|
||||
@ -230,7 +235,7 @@ class AutoBackend(nn.Module):
|
||||
elif paddle: # PaddlePaddle
|
||||
LOGGER.info(f'Loading {w} for PaddlePaddle inference...')
|
||||
check_requirements('paddlepaddle-gpu' if cuda else 'paddlepaddle')
|
||||
import paddle.inference as pdi
|
||||
import paddle.inference as pdi # noqa
|
||||
w = Path(w)
|
||||
if not w.is_file(): # if not *.pdmodel
|
||||
w = next(w.rglob('*.pdmodel')) # get *.pdmodel file from *_paddle_model dir
|
||||
@ -260,11 +265,16 @@ class AutoBackend(nn.Module):
|
||||
if isinstance(metadata, (str, Path)) and Path(metadata).exists():
|
||||
metadata = yaml_load(metadata)
|
||||
if metadata:
|
||||
stride = int(metadata['stride'])
|
||||
for k, v in metadata.items():
|
||||
if k in ('stride', 'batch'):
|
||||
metadata[k] = int(v)
|
||||
elif k in ('imgsz', 'names') and isinstance(v, str):
|
||||
metadata[k] = eval(v)
|
||||
stride = metadata['stride']
|
||||
task = metadata['task']
|
||||
batch = int(metadata['batch'])
|
||||
imgsz = eval(metadata['imgsz']) if isinstance(metadata['imgsz'], str) else metadata['imgsz']
|
||||
names = eval(metadata['names']) if isinstance(metadata['names'], str) else metadata['names']
|
||||
batch = metadata['batch']
|
||||
imgsz = metadata['imgsz']
|
||||
names = metadata['names']
|
||||
elif not (pt or triton or nn_module):
|
||||
LOGGER.warning(f"WARNING ⚠️ Metadata not found for 'model={weights}'")
|
||||
|
||||
@ -285,7 +295,7 @@ class AutoBackend(nn.Module):
|
||||
visualize (bool): whether to visualize the output predictions, defaults to False
|
||||
|
||||
Returns:
|
||||
(tuple): Tuple containing the raw output tensor, and the processed output for visualization (if visualize=True)
|
||||
(tuple): Tuple containing the raw output tensor, and processed output for visualization (if visualize=True)
|
||||
"""
|
||||
b, ch, h, w = im.shape # batch, channel, height, width
|
||||
if self.fp16 and im.dtype != torch.float16:
|
||||
|
@ -67,7 +67,8 @@ class ConvTranspose(nn.Module):
|
||||
|
||||
|
||||
class DFL(nn.Module):
|
||||
# Integral module of Distribution Focal Loss (DFL) proposed in Generalized Focal Loss https://ieeexplore.ieee.org/document/9792391
|
||||
# Integral module of Distribution Focal Loss (DFL)
|
||||
# Proposed in Generalized Focal Loss https://ieeexplore.ieee.org/document/9792391
|
||||
def __init__(self, c1=16):
|
||||
super().__init__()
|
||||
self.conv = nn.Conv2d(c1, 1, 1, bias=False).requires_grad_(False)
|
||||
|
@ -8,7 +8,9 @@ import thop
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
|
||||
from ultralytics.nn.modules import * # noqa: F403
|
||||
from ultralytics.nn.modules import (C1, C2, C3, C3TR, SPP, SPPF, Bottleneck, BottleneckCSP, C2f, C3Ghost, C3x, Classify,
|
||||
Concat, Conv, ConvTranspose, Detect, DWConv, DWConvTranspose2d, Ensemble, Focus,
|
||||
GhostBottleneck, GhostConv, Segment)
|
||||
from ultralytics.yolo.utils import DEFAULT_CFG_DICT, DEFAULT_CFG_KEYS, LOGGER, RANK, colorstr, emojis, yaml_load
|
||||
from ultralytics.yolo.utils.checks import check_requirements, check_yaml
|
||||
from ultralytics.yolo.utils.torch_utils import (fuse_conv_and_bn, fuse_deconv_and_bn, initialize_weights,
|
||||
@ -324,9 +326,9 @@ class ClassificationModel(BaseModel):
|
||||
|
||||
def torch_safe_load(weight):
|
||||
"""
|
||||
This function attempts to load a PyTorch model with the torch.load() function. If a ModuleNotFoundError is raised, it
|
||||
catches the error, logs a warning message, and attempts to install the missing module via the check_requirements()
|
||||
function. After installation, the function again attempts to load the model using torch.load().
|
||||
This function attempts to load a PyTorch model with the torch.load() function. If a ModuleNotFoundError is raised,
|
||||
it catches the error, logs a warning message, and attempts to install the missing module via the
|
||||
check_requirements() function. After installation, the function again attempts to load the model using torch.load().
|
||||
|
||||
Args:
|
||||
weight (str): The file path of the PyTorch model.
|
||||
|
@ -40,7 +40,10 @@ def linear_assignment(cost_matrix, thresh):
|
||||
if cost_matrix.size == 0:
|
||||
return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1]))
|
||||
matches, unmatched_a, unmatched_b = [], [], []
|
||||
|
||||
# TODO: investigate scipy.optimize.linear_sum_assignment() for lap.lapjv()
|
||||
cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh)
|
||||
|
||||
matches.extend([ix, mx] for ix, mx in enumerate(x) if mx >= 0)
|
||||
unmatched_a = np.where(x < 0)[0]
|
||||
unmatched_b = np.where(y < 0)[0]
|
||||
|
@ -327,4 +327,4 @@ def copy_default_cfg():
|
||||
|
||||
if __name__ == '__main__':
|
||||
# entrypoint(debug='yolo predict model=yolov8n.pt')
|
||||
entrypoint(debug='yolo train model=yolov8n-seg.pt')
|
||||
entrypoint(debug='')
|
||||
|
@ -210,8 +210,7 @@ def check_det_dataset(dataset, autodownload=True):
|
||||
for k in 'train', 'val', 'names':
|
||||
if k not in data:
|
||||
raise SyntaxError(
|
||||
emojis(f"{dataset} '{k}:' key missing ❌.\n"
|
||||
f"'train', 'val' and 'names' are required in data.yaml files."))
|
||||
emojis(f"{dataset} '{k}:' key missing ❌.\n'train', 'val' and 'names' are required in all data YAMLs."))
|
||||
data['names'] = check_class_names(data['names'])
|
||||
data['nc'] = len(data['names'])
|
||||
|
||||
@ -236,11 +235,11 @@ def check_det_dataset(dataset, autodownload=True):
|
||||
if val:
|
||||
val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path
|
||||
if not all(x.exists() for x in val):
|
||||
msg = f"\nDataset '{dataset}' not found ⚠️, missing paths %s" % [str(x) for x in val if not x.exists()]
|
||||
m = f"\nDataset '{dataset}' images not found ⚠️, missing paths %s" % [str(x) for x in val if not x.exists()]
|
||||
if s and autodownload:
|
||||
LOGGER.warning(msg)
|
||||
LOGGER.warning(m)
|
||||
else:
|
||||
raise FileNotFoundError(msg)
|
||||
raise FileNotFoundError(m)
|
||||
t = time.time()
|
||||
if s.startswith('http') and s.endswith('.zip'): # URL
|
||||
safe_download(url=s, dir=DATASETS_DIR, delete=True)
|
||||
|
@ -69,7 +69,7 @@ from ultralytics.yolo.data.dataloaders.stream_loaders import LoadImages
|
||||
from ultralytics.yolo.data.utils import IMAGENET_MEAN, IMAGENET_STD, check_det_dataset
|
||||
from ultralytics.yolo.utils import (DEFAULT_CFG, LINUX, LOGGER, MACOS, __version__, callbacks, colorstr,
|
||||
get_default_args, yaml_save)
|
||||
from ultralytics.yolo.utils.checks import check_imgsz, check_requirements, check_version, check_yaml
|
||||
from ultralytics.yolo.utils.checks import check_imgsz, check_requirements, check_version
|
||||
from ultralytics.yolo.utils.files import file_size
|
||||
from ultralytics.yolo.utils.ops import Profile
|
||||
from ultralytics.yolo.utils.torch_utils import get_latest_opset, select_device, smart_inference_mode
|
||||
@ -601,7 +601,7 @@ class Exporter:
|
||||
if n >= n_images:
|
||||
break
|
||||
|
||||
dataset = LoadImages(check_det_dataset(check_yaml(self.args.data))['train'], imgsz=imgsz, auto=False)
|
||||
dataset = LoadImages(check_det_dataset(self.args.data)['train'], imgsz=imgsz, auto=False)
|
||||
converter.representative_dataset = lambda: representative_dataset_gen(dataset, n_images=100)
|
||||
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
|
||||
converter.target_spec.supported_types = []
|
||||
|
@ -10,7 +10,7 @@ from ultralytics.yolo.cfg import get_cfg
|
||||
from ultralytics.yolo.engine.exporter import Exporter
|
||||
from ultralytics.yolo.utils import (DEFAULT_CFG, DEFAULT_CFG_DICT, DEFAULT_CFG_KEYS, LOGGER, ONLINE, RANK, ROOT,
|
||||
callbacks, is_git_dir, is_pip_package, yaml_load)
|
||||
from ultralytics.yolo.utils.checks import check_file, check_imgsz, check_pip_update, check_yaml
|
||||
from ultralytics.yolo.utils.checks import check_file, check_imgsz, check_pip_update_available, check_yaml
|
||||
from ultralytics.yolo.utils.downloads import GITHUB_ASSET_STEMS
|
||||
from ultralytics.yolo.utils.torch_utils import smart_inference_mode
|
||||
|
||||
@ -158,7 +158,7 @@ class YOLO:
|
||||
Inform user of ultralytics package update availability
|
||||
"""
|
||||
if ONLINE and is_pip_package():
|
||||
check_pip_update()
|
||||
check_pip_update_available()
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
|
@ -263,8 +263,11 @@ class Boxes:
|
||||
return self.boxes.__str__()
|
||||
|
||||
def __repr__(self):
|
||||
return (f'Ultralytics YOLO {self.__class__.__name__}\n' + f'type: {type(self.boxes)}\n' +
|
||||
f'shape: {self.boxes.shape}\n' + f'dtype: {self.boxes.dtype}\n + {self.boxes.__repr__()}')
|
||||
return (f'{self.__class__.__module__}.{self.__class__.__name__}\n'
|
||||
f'type: {self.boxes.__class__.__module__}.{self.boxes.__class__.__name__}\n'
|
||||
f'shape: {self.boxes.shape}\n'
|
||||
f'dtype: {self.boxes.dtype}\n'
|
||||
f'{self.boxes.__repr__()}')
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return Boxes(self.boxes[idx], self.orig_shape)
|
||||
@ -339,8 +342,11 @@ class Masks:
|
||||
return self.masks.__str__()
|
||||
|
||||
def __repr__(self):
|
||||
return (f'Ultralytics YOLO {self.__class__.__name__}\n' + f'type: {type(self.masks)}\n' +
|
||||
f'shape: {self.masks.shape}\n' + f'dtype: {self.masks.dtype}\n + {self.masks.__repr__()}')
|
||||
return (f'{self.__class__.__module__}.{self.__class__.__name__}\n'
|
||||
f'type: {self.masks.__class__.__module__}.{self.masks.__class__.__name__}\n'
|
||||
f'shape: {self.masks.shape}\n'
|
||||
f'dtype: {self.masks.dtype}\n'
|
||||
f'{self.masks.__repr__()}')
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return Masks(self.masks[idx], self.orig_shape)
|
||||
|
@ -126,6 +126,37 @@ class IterableSimpleNamespace(SimpleNamespace):
|
||||
return getattr(self, key, default)
|
||||
|
||||
|
||||
def set_logging(name=LOGGING_NAME, verbose=True):
|
||||
# sets up logging for the given name
|
||||
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
|
||||
level = logging.INFO if verbose and rank in {-1, 0} else logging.ERROR
|
||||
logging.config.dictConfig({
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
name: {
|
||||
'format': '%(message)s'}},
|
||||
'handlers': {
|
||||
name: {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': name,
|
||||
'level': level}},
|
||||
'loggers': {
|
||||
name: {
|
||||
'level': level,
|
||||
'handlers': [name],
|
||||
'propagate': False}}})
|
||||
|
||||
|
||||
# Set logger
|
||||
set_logging(LOGGING_NAME, verbose=VERBOSE) # run before defining LOGGER
|
||||
LOGGER = logging.getLogger(LOGGING_NAME) # define globally (used in train.py, val.py, detect.py, etc.)
|
||||
if WINDOWS: # emoji-safe logging
|
||||
info_fn, warning_fn = LOGGER.info, LOGGER.warning
|
||||
setattr(LOGGER, info_fn.__name__, lambda x: info_fn(emojis(x)))
|
||||
setattr(LOGGER, warning_fn.__name__, lambda x: warning_fn(emojis(x)))
|
||||
|
||||
|
||||
def yaml_save(file='data.yaml', data=None):
|
||||
"""
|
||||
Save YAML data to a file.
|
||||
@ -163,10 +194,13 @@ def yaml_load(file='data.yaml', append_filename=False):
|
||||
dict: YAML data and file name.
|
||||
"""
|
||||
with open(file, errors='ignore', encoding='utf-8') as f:
|
||||
# Add YAML filename to dict and return
|
||||
s = f.read() # string
|
||||
if not s.isprintable(): # remove special characters
|
||||
|
||||
# Remove special characters
|
||||
if not s.isprintable():
|
||||
s = re.sub(r'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]+', '', s)
|
||||
|
||||
# Add YAML filename to dict and return
|
||||
return {**yaml.safe_load(s), 'yaml_file': str(file)} if append_filename else yaml.safe_load(s)
|
||||
|
||||
|
||||
@ -448,41 +482,6 @@ def colorstr(*input):
|
||||
return ''.join(colors[x] for x in args) + f'{string}' + colors['end']
|
||||
|
||||
|
||||
def remove_ansi_codes(string):
|
||||
"""
|
||||
Remove ANSI escape sequences from a string.
|
||||
|
||||
Args:
|
||||
string (str): The input string that may contain ANSI escape sequences.
|
||||
|
||||
Returns:
|
||||
str: The input string with ANSI escape sequences removed.
|
||||
"""
|
||||
return re.sub(r'\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]', '', string)
|
||||
|
||||
|
||||
def set_logging(name=LOGGING_NAME, verbose=True):
|
||||
# sets up logging for the given name
|
||||
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
|
||||
level = logging.INFO if verbose and rank in {-1, 0} else logging.ERROR
|
||||
logging.config.dictConfig({
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
name: {
|
||||
'format': '%(message)s'}},
|
||||
'handlers': {
|
||||
name: {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': name,
|
||||
'level': level}},
|
||||
'loggers': {
|
||||
name: {
|
||||
'level': level,
|
||||
'handlers': [name],
|
||||
'propagate': False}}})
|
||||
|
||||
|
||||
class TryExcept(contextlib.ContextDecorator):
|
||||
# YOLOv8 TryExcept class. Usage: @TryExcept() decorator or 'with TryExcept():' context manager
|
||||
def __init__(self, msg='', verbose=True):
|
||||
@ -609,13 +608,6 @@ def set_settings(kwargs, file=USER_CONFIG_DIR / 'settings.yaml'):
|
||||
|
||||
# Run below code on yolo/utils init ------------------------------------------------------------------------------------
|
||||
|
||||
# Set logger
|
||||
set_logging(LOGGING_NAME, verbose=VERBOSE) # run before defining LOGGER
|
||||
LOGGER = logging.getLogger(LOGGING_NAME) # define globally (used in train.py, val.py, detect.py, etc.)
|
||||
if WINDOWS:
|
||||
for fn in LOGGER.info, LOGGER.warning:
|
||||
setattr(LOGGER, fn.__name__, lambda x: fn(emojis(x))) # emoji safe logging
|
||||
|
||||
# Check first-install steps
|
||||
PREFIX = colorstr('Ultralytics: ')
|
||||
SETTINGS = get_settings()
|
||||
|
@ -134,12 +134,20 @@ def check_latest_pypi_version(package_name='ultralytics'):
|
||||
return None
|
||||
|
||||
|
||||
def check_pip_update():
|
||||
def check_pip_update_available():
|
||||
"""
|
||||
Checks if a new version of the ultralytics package is available on PyPI.
|
||||
|
||||
Returns:
|
||||
bool: True if an update is available, False otherwise.
|
||||
"""
|
||||
from ultralytics import __version__
|
||||
latest = check_latest_pypi_version()
|
||||
if pkg.parse_version(__version__) < pkg.parse_version(latest):
|
||||
if pkg.parse_version(__version__) < pkg.parse_version(latest): # update is available
|
||||
LOGGER.info(f'New https://pypi.org/project/ultralytics/{latest} available 😃 '
|
||||
f"Update with 'pip install -U ultralytics'")
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_font(font='Arial.ttf'):
|
||||
|
@ -9,7 +9,6 @@ import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import seaborn as sn
|
||||
import torch
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from PIL import __version__ as pil_version
|
||||
@ -161,6 +160,8 @@ class Annotator:
|
||||
|
||||
@TryExcept() # known issue https://github.com/ultralytics/yolov5/issues/5395
|
||||
def plot_labels(boxes, cls, names=(), save_dir=Path('')):
|
||||
import seaborn as sn
|
||||
|
||||
# plot dataset labels
|
||||
LOGGER.info(f"Plotting labels to {save_dir / 'labels.jpg'}... ")
|
||||
b = boxes.transpose() # classes, boxes
|
||||
|
Reference in New Issue
Block a user