Depth Align
The example showcases synchronized RGB and depth video stream processing with adjustable blending, colorized depth alignment, and real-time FPS display.Setup
This example requires the DepthAI v3 API, see installation instructions.Pipeline
Source code
Python
C++
Python
PythonGitHub
1#!/usr/bin/env python3
2
3import numpy as np
4import cv2
5import depthai as dai
6import time
7from datetime import timedelta
8FPS = 30.0
9
10RGB_SOCKET = dai.CameraBoardSocket.CAM_A
11LEFT_SOCKET = dai.CameraBoardSocket.CAM_B
12RIGHT_SOCKET = dai.CameraBoardSocket.CAM_C
13ALIGN_SOCKET = LEFT_SOCKET
14
15COLOR_RESOLUTION = dai.ColorCameraProperties.SensorResolution.THE_1080_P
16LEFT_RIGHT_RESOLUTION = dai.MonoCameraProperties.SensorResolution.THE_400_P
17
18ISP_SCALE = 3
19
20class FPSCounter:
21 def __init__(self):
22 self.frameTimes = []
23
24 def tick(self):
25 now = time.time()
26 self.frameTimes.append(now)
27 self.frameTimes = self.frameTimes[-10:]
28
29 def getFps(self):
30 if len(self.frameTimes) <= 1:
31 return 0
32 return (len(self.frameTimes) - 1) / (self.frameTimes[-1] - self.frameTimes[0])
33
34device = dai.Device()
35
36calibrationHandler = device.readCalibration()
37rgbDistortion = calibrationHandler.getDistortionCoefficients(RGB_SOCKET)
38distortionModel = calibrationHandler.getDistortionModel(RGB_SOCKET)
39if distortionModel != dai.CameraModel.Perspective:
40 raise RuntimeError("Unsupported distortion model for RGB camera. This example supports only Perspective model.")
41
42pipeline = dai.Pipeline()
43
44# Define sources and outputs
45camRgb = pipeline.create(dai.node.ColorCamera)
46left = pipeline.create(dai.node.MonoCamera)
47right = pipeline.create(dai.node.MonoCamera)
48stereo = pipeline.create(dai.node.StereoDepth)
49sync = pipeline.create(dai.node.Sync)
50out = pipeline.create(dai.node.XLinkOut)
51align = pipeline.create(dai.node.ImageAlign)
52
53left.setResolution(LEFT_RIGHT_RESOLUTION)
54left.setBoardSocket(LEFT_SOCKET)
55left.setFps(FPS)
56
57right.setResolution(LEFT_RIGHT_RESOLUTION)
58right.setBoardSocket(RIGHT_SOCKET)
59right.setFps(FPS)
60
61camRgb.setBoardSocket(RGB_SOCKET)
62camRgb.setResolution(COLOR_RESOLUTION)
63camRgb.setFps(FPS)
64camRgb.setIspScale(1, ISP_SCALE)
65
66
67stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
68stereo.setDepthAlign(dai.CameraBoardSocket.LEFT)
69
70out.setStreamName("out")
71
72sync.setSyncThreshold(timedelta(seconds=0.5 / FPS))
73
74# Linking
75camRgb.isp.link(sync.inputs["rgb"])
76left.out.link(stereo.left)
77right.out.link(stereo.right)
78stereo.depth.link(align.input)
79align.outputAligned.link(sync.inputs["depth_aligned"])
80camRgb.isp.link(align.inputAlignTo)
81sync.out.link(out.input)
82
83
84def colorizeDepth(frameDepth):
85 invalidMask = frameDepth == 0
86 # Log the depth, minDepth and maxDepth
87 try:
88 minDepth = np.percentile(frameDepth[frameDepth != 0], 3)
89 maxDepth = np.percentile(frameDepth[frameDepth != 0], 95)
90 logDepth = np.log(frameDepth, where=frameDepth != 0)
91 logMinDepth = np.log(minDepth)
92 logMaxDepth = np.log(maxDepth)
93 np.nan_to_num(logDepth, copy=False, nan=logMinDepth)
94 # Clip the values to be in the 0-255 range
95 logDepth = np.clip(logDepth, logMinDepth, logMaxDepth)
96
97 # Interpolate only valid logDepth values, setting the rest based on the mask
98 depthFrameColor = np.interp(logDepth, (logMinDepth, logMaxDepth), (0, 255))
99 depthFrameColor = np.nan_to_num(depthFrameColor)
100 depthFrameColor = depthFrameColor.astype(np.uint8)
101 depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_JET)
102 # Set invalid depth pixels to black
103 depthFrameColor[invalidMask] = 0
104 except IndexError:
105 # Frame is likely empty
106 depthFrameColor = np.zeros((frameDepth.shape[0], frameDepth.shape[1], 3), dtype=np.uint8)
107 except Exception as e:
108 raise e
109 return depthFrameColor
110
111
112rgbWeight = 0.4
113depthWeight = 0.6
114
115
116def updateBlendWeights(percentRgb):
117 """
118 Update the rgb and depth weights used to blend depth/rgb image
119
120 @param[in] percent_rgb The rgb weight expressed as a percentage (0..100)
121 """
122 global depthWeight
123 global rgbWeight
124 rgbWeight = float(percentRgb) / 100.0
125 depthWeight = 1.0 - rgbWeight
126
127
128# Connect to device and start pipeline
129with device:
130 device.startPipeline(pipeline)
131 queue = device.getOutputQueue("out", 8, False)
132
133 # Configure windows; trackbar adjusts blending ratio of rgb/depth
134 windowName = "rgb-depth"
135
136 # Set the window to be resizable and the initial size
137 cv2.namedWindow(windowName, cv2.WINDOW_NORMAL)
138 cv2.resizeWindow(windowName, 1280, 720)
139 cv2.createTrackbar(
140 "RGB Weight %",
141 windowName,
142 int(rgbWeight * 100),
143 100,
144 updateBlendWeights,
145 )
146 fpsCounter = FPSCounter()
147 while True:
148 messageGroup = queue.get()
149 fpsCounter.tick()
150 assert isinstance(messageGroup, dai.MessageGroup)
151 frameRgb = messageGroup["rgb"]
152 assert isinstance(frameRgb, dai.ImgFrame)
153 frameDepth = messageGroup["depth_aligned"]
154 assert isinstance(frameDepth, dai.ImgFrame)
155
156 sizeRgb = frameRgb.getData().size
157 sizeDepth = frameDepth.getData().size
158 # Blend when both received
159 if frameDepth is not None:
160 cvFrame = frameRgb.getCvFrame()
161
162 rgbIntrinsics = calibrationHandler.getCameraIntrinsics(RGB_SOCKET, int(cvFrame.shape[1]), int(cvFrame.shape[0]))
163
164 # Undistort the rgb frame
165 cvFrameUndistorted = cv2.undistort(
166 cvFrame,
167 np.array(rgbIntrinsics),
168 np.array(rgbDistortion),
169 )
170 # Colorize the aligned depth
171 alignedDepthColorized = colorizeDepth(frameDepth.getFrame())
172 # Resize depth to match the rgb frame
173 cv2.imshow("Depth aligned", alignedDepthColorized)
174
175 blended = cv2.addWeighted(
176 cvFrameUndistorted, rgbWeight, alignedDepthColorized, depthWeight, 0
177 )
178 cv2.putText(
179 blended,
180 f"FPS: {fpsCounter.getFps():.2f}",
181 (10, 30),
182 cv2.FONT_HERSHEY_SIMPLEX,
183 1,
184 (255, 255, 255),
185 2,
186 )
187 cv2.imshow(windowName, blended)
188
189 key = cv2.waitKey(1)
190 if key == ord("q"):
191 break
Need assistance?
Head over to Discussion Forum for technical support or any other questions you might have.