ultralytics 8.0.43 optimized Results class and fixes (#1069)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Alexander Duda <Alexander.Duda@me.com>
Co-authored-by: Laughing <61612323+Laughing-q@users.noreply.github.com>
This commit is contained in:
Glenn Jocher
2023-02-21 20:37:59 +01:00
committed by GitHub
parent f2a7a29e53
commit fe61018975
22 changed files with 180 additions and 102 deletions

View File

@ -290,13 +290,15 @@ class LoadPilAndNumpy:
self.transforms = transforms
self.mode = 'image'
# generate fake paths
self.paths = [f'image{i}.jpg' for i in range(len(self.im0))]
self.paths = [getattr(im, 'filename', f'image{i}.jpg') for i, im in enumerate(self.im0)]
self.bs = len(self.im0)
@staticmethod
def _single_check(im):
assert isinstance(im, (Image.Image, np.ndarray)), f'Expected PIL/np.ndarray image type, but got {type(im)}'
if isinstance(im, Image.Image):
if im.mode != 'RGB':
im = im.convert('RGB')
im = np.asarray(im)[:, :, ::-1]
im = np.ascontiguousarray(im) # contiguous
return im

View File

@ -1045,7 +1045,7 @@ class HUBDatasetStats():
autodownload: Attempt to download dataset if not found locally
Usage
from utils.dataloaders import HUBDatasetStats
from ultralytics.yolo.data.dataloaders.v5loader import HUBDatasetStats
stats = HUBDatasetStats('coco128.yaml', autodownload=True) # usage 1
stats = HUBDatasetStats('path/to/coco128.zip') # usage 2
stats.get_json(save=False)
@ -1055,15 +1055,15 @@ class HUBDatasetStats():
def __init__(self, path='coco128.yaml', autodownload=False):
# Initialize class
zipped, data_dir, yaml_path = self._unzip(Path(path))
try:
data = yaml_load(check_yaml(yaml_path)) # data dict
if zipped:
data['path'] = data_dir
except Exception as e:
raise Exception('error/HUB/dataset_stats/yaml_load') from e
# try:
# data = yaml_load(check_yaml(yaml_path)) # data dict
# if zipped:
# data['path'] = data_dir
# except Exception as e:
# raise Exception('error/HUB/dataset_stats/yaml_load') from e
check_det_dataset(data, autodownload) # download dataset if missing
self.hub_dir = Path(data['path'] + '-hub')
data = check_det_dataset(yaml_path, autodownload) # download dataset if missing
self.hub_dir = Path(str(data['path']) + '-hub')
self.im_dir = self.hub_dir / 'images'
self.im_dir.mkdir(parents=True, exist_ok=True) # makes /images
self.stats = {'nc': data['nc'], 'names': list(data['names'].values())} # statistics dictionary