ultralytics 8.0.52
reduced TAL CUDA usage and AMP check fix (#1333)
Co-authored-by: CNH5 <74132034+CNH5@users.noreply.github.com> Co-authored-by: Huijae Lee <46982469+ZeroAct@users.noreply.github.com> Co-authored-by: Lorenzo Mammana <lorenzom96@hotmail.it> Co-authored-by: Laughing <61612323+Laughing-q@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Hardik Dava <39372750+hardikdava@users.noreply.github.com> Co-authored-by: Ayush Chaurasia <ayush.chaurarsia@gmail.com>
This commit is contained in:
@ -184,7 +184,7 @@ class AutoBackend(nn.Module):
|
||||
LOGGER.info(f'Loading {w} for CoreML inference...')
|
||||
import coremltools as ct
|
||||
model = ct.models.MLModel(w)
|
||||
metadata = model.user_defined_metadata
|
||||
metadata = dict(model.user_defined_metadata)
|
||||
elif saved_model: # TF SavedModel
|
||||
LOGGER.info(f'Loading {w} for TensorFlow SavedModel inference...')
|
||||
import tensorflow as tf
|
||||
@ -256,10 +256,10 @@ class AutoBackend(nn.Module):
|
||||
nhwc = model.runtime.startswith("tensorflow")
|
||||
'''
|
||||
else:
|
||||
from ultralytics.yolo.engine.exporter import EXPORT_FORMATS_TABLE
|
||||
from ultralytics.yolo.engine.exporter import export_formats
|
||||
raise TypeError(f"model='{w}' is not a supported model format. "
|
||||
'See https://docs.ultralytics.com/tasks/detection/#export for help.'
|
||||
f'\n\n{EXPORT_FORMATS_TABLE}')
|
||||
f'\n\n{export_formats()}')
|
||||
|
||||
# Load external metadata YAML
|
||||
if isinstance(metadata, (str, Path)) and Path(metadata).exists():
|
||||
|
@ -8,7 +8,6 @@ from pathlib import Path
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import requests
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
@ -204,12 +203,13 @@ class Detections:
|
||||
|
||||
def pandas(self):
|
||||
# return detections as pandas DataFrames, i.e. print(results.pandas().xyxy[0])
|
||||
import pandas
|
||||
new = copy(self) # return copy
|
||||
ca = 'xmin', 'ymin', 'xmax', 'ymax', 'confidence', 'class', 'name' # xyxy columns
|
||||
cb = 'xcenter', 'ycenter', 'width', 'height', 'confidence', 'class', 'name' # xywh columns
|
||||
for k, c in zip(['xyxy', 'xyxyn', 'xywh', 'xywhn'], [ca, ca, cb, cb]):
|
||||
a = [[x[:5] + [int(x[5]), self.names[int(x[5])]] for x in x.tolist()] for x in getattr(self, k)] # update
|
||||
setattr(new, k, [pd.DataFrame(x, columns=c) for x in a])
|
||||
setattr(new, k, [pandas.DataFrame(x, columns=c) for x in a])
|
||||
return new
|
||||
|
||||
def tolist(self):
|
||||
|
@ -122,7 +122,7 @@ class BaseModel(nn.Module):
|
||||
bn = tuple(v for k, v in nn.__dict__.items() if 'Norm' in k) # normalization layers, i.e. BatchNorm2d()
|
||||
return sum(isinstance(v, bn) for v in self.modules()) < thresh # True if < 'thresh' BatchNorm layers in model
|
||||
|
||||
def info(self, verbose=False, imgsz=640):
|
||||
def info(self, verbose=True, imgsz=640):
|
||||
"""
|
||||
Prints model information
|
||||
|
||||
|
Reference in New Issue
Block a user