ultralytics 8.0.53
DDP AMP and Edge TPU fixes (#1362)
Co-authored-by: Richard Aljaste <richardaljasteabramson@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Vuong Kha Sieu <75152429+hotfur@users.noreply.github.com>
This commit is contained in:
@ -126,7 +126,7 @@ class IterableSimpleNamespace(SimpleNamespace):
|
||||
def set_logging(name=LOGGING_NAME, verbose=True):
|
||||
# sets up logging for the given name
|
||||
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
|
||||
level = logging.INFO if verbose and rank in {-1, 0} else logging.ERROR
|
||||
level = logging.INFO if verbose and rank in (-1, 0) else logging.ERROR
|
||||
logging.config.dictConfig({
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
@ -524,7 +524,7 @@ def set_sentry():
|
||||
return event
|
||||
|
||||
if SETTINGS['sync'] and \
|
||||
RANK in {-1, 0} and \
|
||||
RANK in (-1, 0) and \
|
||||
Path(sys.argv[0]).name == 'yolo' and \
|
||||
not TESTS_RUNNING and \
|
||||
ONLINE and \
|
||||
|
@ -28,7 +28,7 @@ from pathlib import Path
|
||||
|
||||
from ultralytics import YOLO
|
||||
from ultralytics.yolo.engine.exporter import export_formats
|
||||
from ultralytics.yolo.utils import LINUX, LOGGER, ROOT, SETTINGS
|
||||
from ultralytics.yolo.utils import LINUX, LOGGER, MACOS, ROOT, SETTINGS
|
||||
from ultralytics.yolo.utils.checks import check_yolo
|
||||
from ultralytics.yolo.utils.downloads import download
|
||||
from ultralytics.yolo.utils.files import file_size
|
||||
@ -51,6 +51,8 @@ def benchmark(model=Path(SETTINGS['weights_dir']) / 'yolov8n.pt', imgsz=160, hal
|
||||
if model.task == 'classify':
|
||||
assert i != 11, 'paddle cls exports coming soon'
|
||||
assert i != 9 or LINUX, 'Edge TPU export only supported on Linux'
|
||||
if i == 10:
|
||||
assert MACOS or LINUX, 'TF.js export only supported on macOS and Linux'
|
||||
if 'cpu' in device.type:
|
||||
assert cpu, 'inference not supported on CPU'
|
||||
if 'cuda' in device.type:
|
||||
|
@ -118,7 +118,7 @@ def safe_download(url,
|
||||
raise ConnectionError(f'❌ Download failure for {url}. Retry limit reached.') from e
|
||||
LOGGER.warning(f'⚠️ Download failure, retrying {i + 1}/{retry} {url}...')
|
||||
|
||||
if unzip and f.exists() and f.suffix in {'.zip', '.tar', '.gz'}:
|
||||
if unzip and f.exists() and f.suffix in ('.zip', '.tar', '.gz'):
|
||||
unzip_dir = dir or f.parent # unzip to dir if provided else unzip in place
|
||||
LOGGER.info(f'Unzipping {f} to {unzip_dir}...')
|
||||
if f.suffix == '.zip':
|
||||
|
@ -33,7 +33,7 @@ TORCH_1_12 = check_version(torch.__version__, '1.12.0')
|
||||
def torch_distributed_zero_first(local_rank: int):
|
||||
# Decorator to make all processes in distributed training wait for each local_master to do something
|
||||
initialized = torch.distributed.is_available() and torch.distributed.is_initialized()
|
||||
if initialized and local_rank not in {-1, 0}:
|
||||
if initialized and local_rank not in (-1, 0):
|
||||
dist.barrier(device_ids=[local_rank])
|
||||
yield
|
||||
if initialized and local_rank == 0:
|
||||
|
Reference in New Issue
Block a user