Object tracker on RGB
This example shows how to run MobileNetv2SSD on the RGB input frame, and perform object tracking on persons.Similar samples:
Demo
Setup
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the scriptCommand Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.pySource code
Python
C++
Python
PythonGitHub
1#!/usr/bin/env python3
2
3from pathlib import Path
4import cv2
5import depthai as dai
6import numpy as np
7import time
8import argparse
9
10labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
11 "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]
12
13nnPathDefault = str((Path(__file__).parent / Path('../models/mobilenet-ssd_openvino_2021.4_6shave.blob')).resolve().absolute())
14parser = argparse.ArgumentParser()
15parser.add_argument('nnPath', nargs='?', help="Path to mobilenet detection network blob", default=nnPathDefault)
16parser.add_argument('-ff', '--full_frame', action="store_true", help="Perform tracking on full RGB frame", default=False)
17
18args = parser.parse_args()
19
20fullFrameTracking = args.full_frame
21
22# Create pipeline
23pipeline = dai.Pipeline()
24
25# Define sources and outputs
26camRgb = pipeline.create(dai.node.ColorCamera)
27detectionNetwork = pipeline.create(dai.node.MobileNetDetectionNetwork)
28objectTracker = pipeline.create(dai.node.ObjectTracker)
29
30xlinkOut = pipeline.create(dai.node.XLinkOut)
31trackerOut = pipeline.create(dai.node.XLinkOut)
32xinTrackerConfig = pipeline.create(dai.node.XLinkIn)
33
34xlinkOut.setStreamName("preview")
35trackerOut.setStreamName("tracklets")
36xinTrackerConfig.setStreamName("trackerConfig")
37
38# Properties
39camRgb.setPreviewSize(300, 300)
40camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
41camRgb.setInterleaved(False)
42camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
43camRgb.setFps(40)
44
45# testing MobileNet DetectionNetwork
46detectionNetwork.setBlobPath(args.nnPath)
47detectionNetwork.setConfidenceThreshold(0.5)
48detectionNetwork.input.setBlocking(False)
49
50objectTracker.setDetectionLabelsToTrack([15]) # track only person
51# possible tracking types: ZERO_TERM_COLOR_HISTOGRAM, ZERO_TERM_IMAGELESS, SHORT_TERM_IMAGELESS, SHORT_TERM_KCF
52objectTracker.setTrackerType(dai.TrackerType.ZERO_TERM_COLOR_HISTOGRAM)
53# take the smallest ID when new object is tracked, possible options: SMALLEST_ID, UNIQUE_ID
54objectTracker.setTrackerIdAssignmentPolicy(dai.TrackerIdAssignmentPolicy.SMALLEST_ID)
55
56# Linking
57camRgb.preview.link(detectionNetwork.input)
58objectTracker.passthroughTrackerFrame.link(xlinkOut.input)
59
60if fullFrameTracking:
61 camRgb.video.link(objectTracker.inputTrackerFrame)
62else:
63 detectionNetwork.passthrough.link(objectTracker.inputTrackerFrame)
64
65detectionNetwork.passthrough.link(objectTracker.inputDetectionFrame)
66detectionNetwork.out.link(objectTracker.inputDetections)
67objectTracker.out.link(trackerOut.input)
68
69# set tracking parameters
70objectTracker.setOcclusionRatioThreshold(0.4)
71objectTracker.setTrackletMaxLifespan(120)
72objectTracker.setTrackletBirthThreshold(3)
73
74xinTrackerConfig.out.link(objectTracker.inputConfig)
75
76# Connect to device and start pipeline
77with dai.Device(pipeline) as device:
78
79 preview = device.getOutputQueue("preview", 4, False)
80 tracklets = device.getOutputQueue("tracklets", 4, False)
81 trackerConfigQueue = device.getInputQueue("trackerConfig")
82
83 startTime = time.monotonic()
84 counter = 0
85 fps = 0
86 frame = None
87
88 while(True):
89 latestTrackedIds = []
90 imgFrame = preview.get()
91 track = tracklets.get()
92
93 counter+=1
94 current_time = time.monotonic()
95 if (current_time - startTime) > 1 :
96 fps = counter / (current_time - startTime)
97 counter = 0
98 startTime = current_time
99
100 color = (255, 0, 0)
101 frame = imgFrame.getCvFrame()
102 trackletsData = track.tracklets
103 for t in trackletsData:
104 roi = t.roi.denormalize(frame.shape[1], frame.shape[0])
105 x1 = int(roi.topLeft().x)
106 y1 = int(roi.topLeft().y)
107 x2 = int(roi.bottomRight().x)
108 y2 = int(roi.bottomRight().y)
109
110 try:
111 label = labelMap[t.label]
112 except:
113 label = t.label
114
115 cv2.putText(frame, str(label), (x1 + 10, y1 + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
116 cv2.putText(frame, f"ID: {[t.id]}", (x1 + 10, y1 + 35), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
117 cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
118 cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)
119
120 if t.status == dai.Tracklet.TrackingStatus.TRACKED:
121 latestTrackedIds.append(t.id)
122
123 cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color)
124
125 cv2.imshow("tracker", frame)
126
127 key = cv2.waitKey(1)
128 if key == ord('q'):
129 break
130 elif key == ord('g'):
131 # send tracker config to device
132 config = dai.ObjectTrackerConfig()
133
134 # take a random ID from the latest tracked IDs
135 if len(latestTrackedIds) > 0:
136 idToRemove = (np.random.choice(latestTrackedIds))
137 print(f"Force removing ID: {idToRemove}")
138 config.forceRemoveID(idToRemove)
139 trackerConfigQueue.send(config)
140 else:
141 print("No tracked IDs available to force remove")Pipeline
Need assistance?
Head over to Discussion Forum for technical support or any other questions you might have.