ultralytics 8.0.20
CLI yolo
simplifications, DDP and ONNX fixes (#608)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Sid Prabhakaran <s2siddhu@gmail.com>
This commit is contained in:
@ -116,6 +116,9 @@ class IterableSimpleNamespace(SimpleNamespace):
|
||||
# Default configuration
|
||||
with open(DEFAULT_CFG_PATH, errors='ignore') as f:
|
||||
DEFAULT_CFG_DICT = yaml.safe_load(f)
|
||||
for k, v in DEFAULT_CFG_DICT.items():
|
||||
if isinstance(v, str) and v.lower() == 'none':
|
||||
DEFAULT_CFG_DICT[k] = None
|
||||
DEFAULT_CFG_KEYS = DEFAULT_CFG_DICT.keys()
|
||||
DEFAULT_CFG = IterableSimpleNamespace(**DEFAULT_CFG_DICT)
|
||||
|
||||
@ -448,13 +451,13 @@ def set_sentry():
|
||||
"""
|
||||
|
||||
def before_send(event, hint):
|
||||
oss = 'colab' if is_colab() else 'kaggle' if is_kaggle() else 'jupyter' if is_jupyter() else \
|
||||
'docker' if is_docker() else platform.system()
|
||||
env = 'Colab' if is_colab() else 'Kaggle' if is_kaggle() else 'Jupyter' if is_jupyter() else \
|
||||
'Docker' if is_docker() else platform.system()
|
||||
event['tags'] = {
|
||||
"sys_argv": sys.argv[0],
|
||||
"sys_argv_name": Path(sys.argv[0]).name,
|
||||
"install": 'git' if is_git_dir() else 'pip' if is_pip_package() else 'other',
|
||||
"os": oss}
|
||||
"os": env}
|
||||
return event
|
||||
|
||||
if SETTINGS['sync'] and \
|
||||
@ -529,7 +532,7 @@ def set_settings(kwargs, file=USER_CONFIG_DIR / 'settings.yaml'):
|
||||
yaml_save(file, SETTINGS)
|
||||
|
||||
|
||||
# Run below code on utils init -----------------------------------------------------------------------------------------
|
||||
# Run below code on yolo/utils init ------------------------------------------------------------------------------------
|
||||
|
||||
# Set logger
|
||||
set_logging(LOGGING_NAME) # run before defining LOGGER
|
||||
|
@ -48,19 +48,19 @@ def on_train_end(trainer):
|
||||
|
||||
|
||||
def on_train_start(trainer):
|
||||
traces(trainer.args, traces_sample_rate=0.0)
|
||||
traces(trainer.args, traces_sample_rate=1.0)
|
||||
|
||||
|
||||
def on_val_start(validator):
|
||||
traces(validator.args, traces_sample_rate=0.0)
|
||||
traces(validator.args, traces_sample_rate=1.0)
|
||||
|
||||
|
||||
def on_predict_start(predictor):
|
||||
traces(predictor.args, traces_sample_rate=0.0)
|
||||
traces(predictor.args, traces_sample_rate=1.0)
|
||||
|
||||
|
||||
def on_export_start(exporter):
|
||||
traces(exporter.args, traces_sample_rate=0.0)
|
||||
traces(exporter.args, traces_sample_rate=1.0)
|
||||
|
||||
|
||||
callbacks = {
|
||||
|
@ -31,7 +31,7 @@ WORLD_SIZE = int(os.getenv('WORLD_SIZE', 1))
|
||||
@contextmanager
|
||||
def torch_distributed_zero_first(local_rank: int):
|
||||
# Decorator to make all processes in distributed training wait for each local_master to do something
|
||||
initialized = torch.distributed.is_initialized() # prevent 'Default process group has not been initialized' errors
|
||||
initialized = torch.distributed.is_available() and torch.distributed.is_initialized()
|
||||
if initialized and local_rank not in {-1, 0}:
|
||||
dist.barrier(device_ids=[local_rank])
|
||||
yield
|
||||
|
Reference in New Issue
Block a user