Object tracker on RGB
This example shows how to run MobileNetv2SSD on the RGB input frame, and perform object tracking on persons.Similar samples:
Demo
Setup
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the scriptCommand Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
Source code
Python
C++
Python
PythonGitHub
1#!/usr/bin/env python3
2
3from pathlib import Path
4import cv2
5import depthai as dai
6import numpy as np
7import time
8import argparse
9
10labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
11 "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]
12
13nnPathDefault = str((Path(__file__).parent / Path('../models/mobilenet-ssd_openvino_2021.4_6shave.blob')).resolve().absolute())
14parser = argparse.ArgumentParser()
15parser.add_argument('nnPath', nargs='?', help="Path to mobilenet detection network blob", default=nnPathDefault)
16parser.add_argument('-ff', '--full_frame', action="store_true", help="Perform tracking on full RGB frame", default=False)
17
18args = parser.parse_args()
19
20fullFrameTracking = args.full_frame
21
22# Create pipeline
23pipeline = dai.Pipeline()
24
25# Define sources and outputs
26camRgb = pipeline.create(dai.node.ColorCamera)
27detectionNetwork = pipeline.create(dai.node.MobileNetDetectionNetwork)
28objectTracker = pipeline.create(dai.node.ObjectTracker)
29
30xlinkOut = pipeline.create(dai.node.XLinkOut)
31trackerOut = pipeline.create(dai.node.XLinkOut)
32
33xlinkOut.setStreamName("preview")
34trackerOut.setStreamName("tracklets")
35
36# Properties
37camRgb.setPreviewSize(300, 300)
38camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
39camRgb.setInterleaved(False)
40camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
41camRgb.setFps(40)
42
43# testing MobileNet DetectionNetwork
44detectionNetwork.setBlobPath(args.nnPath)
45detectionNetwork.setConfidenceThreshold(0.5)
46detectionNetwork.input.setBlocking(False)
47
48objectTracker.setDetectionLabelsToTrack([15]) # track only person
49# possible tracking types: ZERO_TERM_COLOR_HISTOGRAM, ZERO_TERM_IMAGELESS, SHORT_TERM_IMAGELESS, SHORT_TERM_KCF
50objectTracker.setTrackerType(dai.TrackerType.ZERO_TERM_COLOR_HISTOGRAM)
51# take the smallest ID when new object is tracked, possible options: SMALLEST_ID, UNIQUE_ID
52objectTracker.setTrackerIdAssignmentPolicy(dai.TrackerIdAssignmentPolicy.SMALLEST_ID)
53
54# Linking
55camRgb.preview.link(detectionNetwork.input)
56objectTracker.passthroughTrackerFrame.link(xlinkOut.input)
57
58if fullFrameTracking:
59 camRgb.video.link(objectTracker.inputTrackerFrame)
60else:
61 detectionNetwork.passthrough.link(objectTracker.inputTrackerFrame)
62
63detectionNetwork.passthrough.link(objectTracker.inputDetectionFrame)
64detectionNetwork.out.link(objectTracker.inputDetections)
65objectTracker.out.link(trackerOut.input)
66
67# Connect to device and start pipeline
68with dai.Device(pipeline) as device:
69
70 preview = device.getOutputQueue("preview", 4, False)
71 tracklets = device.getOutputQueue("tracklets", 4, False)
72
73 startTime = time.monotonic()
74 counter = 0
75 fps = 0
76 frame = None
77
78 while(True):
79 imgFrame = preview.get()
80 track = tracklets.get()
81
82 counter+=1
83 current_time = time.monotonic()
84 if (current_time - startTime) > 1 :
85 fps = counter / (current_time - startTime)
86 counter = 0
87 startTime = current_time
88
89 color = (255, 0, 0)
90 frame = imgFrame.getCvFrame()
91 trackletsData = track.tracklets
92 for t in trackletsData:
93 roi = t.roi.denormalize(frame.shape[1], frame.shape[0])
94 x1 = int(roi.topLeft().x)
95 y1 = int(roi.topLeft().y)
96 x2 = int(roi.bottomRight().x)
97 y2 = int(roi.bottomRight().y)
98
99 try:
100 label = labelMap[t.label]
101 except:
102 label = t.label
103
104 cv2.putText(frame, str(label), (x1 + 10, y1 + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
105 cv2.putText(frame, f"ID: {[t.id]}", (x1 + 10, y1 + 35), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
106 cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
107 cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)
108
109 cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color)
110
111 cv2.imshow("tracker", frame)
112
113 if cv2.waitKey(1) == ord('q'):
114 break
Pipeline
Need assistance?
Head over to Discussion Forum for technical support or any other questions you might have.