ultralytics 8.0.37 add TFLite metadata in AutoBackend (#953)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Ayush Chaurasia <ayush.chaurarsia@gmail.com>
Co-authored-by: Yonghye Kwon <developer.0hye@gmail.com>
Co-authored-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
Glenn Jocher
2023-02-14 14:28:23 +04:00
committed by GitHub
parent 20fe708f31
commit bdc6cd4d8b
18 changed files with 86 additions and 46 deletions

View File

@ -1,3 +1,5 @@
# Ultralytics YOLO 🚀, GPL-3.0 license
from . import v8
__all__ = ["v8"]

View File

@ -142,7 +142,7 @@ def check_cfg_mismatch(base: Dict, custom: Dict, e=None):
string = ''
for x in mismatched:
matches = get_close_matches(x, base) # key list
matches = [f"{k}={DEFAULT_CFG_DICT[k]}" if DEFAULT_CFG_DICT[k] is not None else k for k in matches] # k=v
matches = [f"{k}={DEFAULT_CFG_DICT[k]}" if DEFAULT_CFG_DICT.get(k) is not None else k for k in matches]
match_str = f"Similar arguments are i.e. {matches}." if matches else ''
string += f"'{colorstr('red', 'bold', x)}' is not a valid YOLO argument. {match_str}\n"
raise SyntaxError(string + CLI_HELP_MSG) from e

View File

@ -4,3 +4,13 @@ from .base import BaseDataset
from .build import build_classification_dataloader, build_dataloader, load_inference_source
from .dataset import ClassificationDataset, SemanticDataset, YOLODataset
from .dataset_wrappers import MixAndRectDataset
__all__ = [
"BaseDataset",
"ClassificationDataset",
"MixAndRectDataset",
"SemanticDataset",
"YOLODataset",
"build_classification_dataloader",
"build_dataloader",
"load_inference_source",]

View File

@ -73,7 +73,7 @@ from ultralytics.yolo.utils import DEFAULT_CFG, LOGGER, __version__, callbacks,
from ultralytics.yolo.utils.checks import check_imgsz, check_requirements, check_version, check_yaml
from ultralytics.yolo.utils.files import file_size
from ultralytics.yolo.utils.ops import Profile
from ultralytics.yolo.utils.torch_utils import select_device, smart_inference_mode, get_latest_opset
from ultralytics.yolo.utils.torch_utils import get_latest_opset, select_device, smart_inference_mode
MACOS = platform.system() == 'Darwin' # macOS environment
@ -508,7 +508,7 @@ class Exporter:
onnx = self.file.with_suffix('.onnx')
# Export to TF SavedModel
subprocess.run(f'onnx2tf -i {onnx} --output_signaturedefs -o {f}', shell=True)
subprocess.run(f'onnx2tf -i {onnx} -o {f} --non_verbose', shell=True)
# Add TFLite metadata
for tflite_file in Path(f).rglob('*.tflite'):

View File

@ -108,8 +108,8 @@ class YOLO:
Raises TypeError is model is not a PyTorch model
"""
if not isinstance(self.model, nn.Module):
raise TypeError(f"model='{self.model}' must be a PyTorch model, but is a different type. PyTorch models "
f"can be used to train, val, predict and export, i.e. "
raise TypeError(f"model='{self.model}' must be a *.pt PyTorch model, but is a different type. "
f"PyTorch models can be used to train, val, predict and export, i.e. "
f"'yolo export model=yolov8n.pt', but exported formats like ONNX, TensorRT etc. only "
f"support 'predict' and 'val' modes, i.e. 'yolo predict model=yolov8n.onnx'.")
@ -240,7 +240,7 @@ class YOLO:
if RANK in {0, -1}:
self.model, _ = attempt_load_one_weight(str(self.trainer.best))
self.overrides = self.model.args
self.metrics_data = self.trainer.validator.metrics
self.metrics_data = self.trainer.validator.metrics
def to(self, device):
"""

View File

@ -221,11 +221,10 @@ def is_jupyter():
Returns:
bool: True if running inside a Jupyter Notebook, False otherwise.
"""
try:
with contextlib.suppress(Exception):
from IPython import get_ipython
return get_ipython() is not None
except ImportError:
return False
return False
def is_docker() -> bool:
@ -287,11 +286,9 @@ def is_pytest_running():
Returns:
(bool): True if pytest is running, False otherwise.
"""
try:
import sys
with contextlib.suppress(Exception):
return "pytest" in sys.modules
except ImportError:
return False
return False
def is_github_actions_ci() -> bool:

View File

@ -1 +1,5 @@
from .base import add_integration_callbacks, default_callbacks
__all__ = [
'add_integration_callbacks',
'default_callbacks',]

View File

@ -17,7 +17,6 @@ import numpy as np
import pkg_resources as pkg
import psutil
import torch
from IPython import display
from matplotlib import font_manager
from ultralytics.yolo.utils import (AUTOINSTALL, LOGGER, ROOT, USER_CONFIG_DIR, TryExcept, colorstr, downloads, emojis,
@ -292,8 +291,10 @@ def check_yolo(verbose=True):
gib = 1 << 30 # bytes per GiB
ram = psutil.virtual_memory().total
total, used, free = shutil.disk_usage("/")
display.clear_output()
s = f'({os.cpu_count()} CPUs, {ram / gib:.1f} GB RAM, {(total - free) / gib:.1f}/{total / gib:.1f} GB disk)'
with contextlib.suppress(Exception): # clear display if ipython is installed
from IPython import display
display.clear_output()
else:
s = ''

View File

@ -3,3 +3,5 @@
from ultralytics.yolo.v8.classify.predict import ClassificationPredictor, predict
from ultralytics.yolo.v8.classify.train import ClassificationTrainer, train
from ultralytics.yolo.v8.classify.val import ClassificationValidator, val
__all__ = ["ClassificationPredictor", "predict", "ClassificationTrainer", "train", "ClassificationValidator", "val"]

View File

@ -3,3 +3,5 @@
from .predict import DetectionPredictor, predict
from .train import DetectionTrainer, train
from .val import DetectionValidator, val
__all__ = ["DetectionPredictor", "predict", "DetectionTrainer", "train", "DetectionValidator", "val"]

View File

@ -3,3 +3,5 @@
from .predict import SegmentationPredictor, predict
from .train import SegmentationTrainer, train
from .val import SegmentationValidator, val
__all__ = ["SegmentationPredictor", "predict", "SegmentationTrainer", "train", "SegmentationValidator", "val"]