ultralytics 8.0.14 Hydra removal fixes and cleanup (#542)

Co-authored-by: ayush chaurasia <ayush.chaurarsia@gmail.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Kamlesh Kumar <patelkamleshpatel364@gmail.com>
This commit is contained in:
Glenn Jocher
2023-01-21 21:22:40 +01:00
committed by GitHub
parent cc3be0e223
commit d9a0fba251
30 changed files with 339 additions and 301 deletions

View File

@ -23,7 +23,7 @@ import yaml
# Constants
FILE = Path(__file__).resolve()
ROOT = FILE.parents[2] # YOLO
DEFAULT_CFG_PATH = ROOT / "yolo/configs/default.yaml"
DEFAULT_CFG_PATH = ROOT / "yolo/cfg/default.yaml"
RANK = int(os.getenv('RANK', -1))
NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads
AUTOINSTALL = str(os.getenv('YOLO_AUTOINSTALL', True)).lower() == 'true' # global auto-install mode

View File

@ -26,7 +26,7 @@ def on_pretrain_routine_start(trainer):
output_uri=True,
reuse_last_task_id=False,
auto_connect_frameworks={'pytorch': False})
task.connect(dict(trainer.args), name='General')
task.connect(vars(trainer.args), name='General')
def on_train_epoch_end(trainer):

View File

@ -11,7 +11,7 @@ except (ModuleNotFoundError, ImportError):
def on_pretrain_routine_start(trainer):
experiment = comet_ml.Experiment(project_name=trainer.args.project or "YOLOv8")
experiment.log_parameters(dict(trainer.args))
experiment.log_parameters(vars(trainer.args))
def on_train_epoch_end(trainer):

View File

@ -137,9 +137,10 @@ def model_info(model, verbose=False, imgsz=640):
(i, name, p.requires_grad, p.numel(), list(p.shape), p.mean(), p.std()))
flops = get_flops(model, imgsz)
fused = ' (fused)' if model.is_fused() else ''
fs = f', {flops:.1f} GFLOPs' if flops else ''
m = Path(getattr(model, 'yaml_file', '') or model.yaml.get('yaml_file', '')).stem.replace('yolo', 'YOLO') or 'Model'
LOGGER.info(f"{m} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}")
LOGGER.info(f"{m} summary{fused}: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}")
def get_num_params(model):