From a9fb8b239b06cd153dee4f7c16d1c47bcc1a874a Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Sat, 5 Aug 2023 15:35:44 +0200 Subject: [PATCH] Add Docs example for plotting tracks over time (#4170) --- README.md | 2 +- README.zh-CN.md | 2 +- docs/modes/track.md | 71 ++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 72 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d016bcb..7d0463a 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ Pip install the ultralytics package including all [requirements](https://github. pip install ultralytics ``` -For alternative installation methods including Conda, Docker, and Git, please refer to the [Quickstart Guide](https://docs.ultralytics.com/quickstart). +For alternative installation methods including [Conda](https://anaconda.org/conda-forge/ultralytics), [Docker](https://hub.docker.com/r/ultralytics/ultralytics), and Git, please refer to the [Quickstart Guide](https://docs.ultralytics.com/quickstart). diff --git a/README.zh-CN.md b/README.zh-CN.md index 29fdc0a..ab18209 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -65,7 +65,7 @@ pip install ultralytics ``` -如需使用包括Conda、Docker和Git在内的其他安装方法,请参考[快速入门指南](https://docs.ultralytics.com/quickstart)。 +如需使用包括[Conda](https://anaconda.org/conda-forge/ultralytics)、[Docker](https://hub.docker.com/r/ultralytics/ultralytics)和Git在内的其他安装方法,请参考[快速入门指南](https://docs.ultralytics.com/quickstart)。 diff --git a/docs/modes/track.md b/docs/modes/track.md index 208f191..a8fa212 100644 --- a/docs/modes/track.md +++ b/docs/modes/track.md @@ -122,7 +122,7 @@ Here is a Python script using OpenCV (`cv2`) and YOLOv8 to run object tracking o model = YOLO('yolov8n.pt') # Open the video file - video_path = "path/to/your/video/file.mp4" + video_path = "path/to/video.mp4" cap = cv2.VideoCapture(video_path) # Loop through the video frames @@ -154,6 +154,75 @@ Here is a Python script using OpenCV (`cv2`) and YOLOv8 to run object tracking o Please note the change from `model(frame)` to `model.track(frame)`, which enables object tracking instead of simple detection. This modified script will run the tracker on each frame of the video, visualize the results, and display them in a window. The loop can be exited by pressing 'q'. +### Plotting Tracks Over Time + +Visualizing object tracks over consecutive frames can provide valuable insights into the movement patterns and behavior of detected objects within a video. With Ultralytics YOLOv8, plotting these tracks is a seamless and efficient process. + +In the following example, we demonstrate how to utilize YOLOv8's tracking capabilities to plot the movement of detected objects across multiple video frames. This script involves opening a video file, reading it frame by frame, and utilizing the YOLO model to identify and track various objects. By retaining the center points of the detected bounding boxes and connecting them, we can draw lines that represent the paths followed by the tracked objects. + +!!! example "Plotting tracks over multiple video frames" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # Load the YOLOv8 model + model = YOLO('yolov8n.pt') + + # Open the video file + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Store the track history + track_history = defaultdict(lambda: []) + + # Loop through the video frames + while cap.isOpened(): + # Read a frame from the video + success, frame = cap.read() + + if success: + # Run YOLOv8 tracking on the frame, persisting tracks between frames + results = model.track(frame, persist=True) + + # Get the boxes and track IDs + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # Visualize the results on the frame + annotated_frame = results[0].plot() + + # Plot the tracks + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, y center point + if len(track) > 30: # retain 90 tracks for 90 frames + track.pop(0) + + # Draw the tracking lines + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # Display the annotated frame + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # Break the loop if 'q' is pressed + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Break the loop if the end of the video is reached + break + + # Release the video capture object and close the display window + cap.release() + cv2.destroyAllWindows() + ``` + ### Multithreaded Tracking Multithreaded tracking provides the capability to run object tracking on multiple video streams simultaneously. This is particularly useful when handling multiple video inputs, such as from multiple surveillance cameras, where concurrent processing can greatly enhance efficiency and performance.