RGB-Left Align
This example demonstrates how to align depth information from a left camera to an RGB camera. This is particularly useful for applications where you need to overlay or compare depth and color data. An OpenCV window is created to display the blended image of the RGB and aligned depth data. Trackbars are provided to adjust the blending ratio and the static depth plane.Demo
Setup
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the scriptCommand Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
Source code
Python
C++
Python
PythonGitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5from datetime import timedelta
6import numpy as np
7
8# This is an interactive example that shows how two frame sources without depth information.
9FPS = 30.0
10
11RGB_SOCKET = dai.CameraBoardSocket.CAM_A
12LEFT_SOCKET = dai.CameraBoardSocket.CAM_B
13ALIGN_SOCKET = LEFT_SOCKET
14
15COLOR_RESOLUTION = dai.ColorCameraProperties.SensorResolution.THE_1080_P
16LEFT_RIGHT_RESOLUTION = dai.MonoCameraProperties.SensorResolution.THE_720_P
17
18ISP_SCALE = 3
19
20device = dai.Device()
21
22calibrationHandler = device.readCalibration()
23rgbDistortion = calibrationHandler.getDistortionCoefficients(RGB_SOCKET)
24distortionModel = calibrationHandler.getDistortionModel(RGB_SOCKET)
25if distortionModel != dai.CameraModel.Perspective:
26 raise RuntimeError("Unsupported distortion model for RGB camera. This example supports only Perspective model.")
27
28pipeline = dai.Pipeline()
29
30# Define sources and outputs
31camRgb = pipeline.create(dai.node.ColorCamera)
32left = pipeline.create(dai.node.MonoCamera)
33sync = pipeline.create(dai.node.Sync)
34out = pipeline.create(dai.node.XLinkOut)
35align = pipeline.create(dai.node.ImageAlign)
36cfgIn = pipeline.create(dai.node.XLinkIn)
37
38left.setResolution(LEFT_RIGHT_RESOLUTION)
39left.setBoardSocket(LEFT_SOCKET)
40left.setFps(FPS)
41
42camRgb.setBoardSocket(RGB_SOCKET)
43camRgb.setResolution(COLOR_RESOLUTION)
44camRgb.setFps(FPS)
45camRgb.setIspScale(1, ISP_SCALE)
46
47out.setStreamName("out")
48
49sync.setSyncThreshold(timedelta(seconds=0.5 / FPS))
50
51cfgIn.setStreamName("config")
52
53cfg = align.initialConfig.get()
54staticDepthPlane = cfg.staticDepthPlane
55
56# Linking
57align.outputAligned.link(sync.inputs["aligned"])
58camRgb.isp.link(sync.inputs["rgb"])
59camRgb.isp.link(align.inputAlignTo)
60left.out.link(align.input)
61sync.out.link(out.input)
62cfgIn.out.link(align.inputConfig)
63
64
65rgbWeight = 0.4
66leftWeight = 0.6
67
68
69def updateBlendWeights(percentRgb):
70 """
71 Update the rgb and left weights used to blend rgb/left image
72
73 @param[in] percent_rgb The rgb weight expressed as a percentage (0..100)
74 """
75 global leftWeight
76 global rgbWeight
77 rgbWeight = float(percentRgb) / 100.0
78 leftWeight = 1.0 - rgbWeight
79
80def updateDepthPlane(depth):
81 global staticDepthPlane
82 staticDepthPlane = depth
83
84# Connect to device and start pipeline
85with device:
86 device.startPipeline(pipeline)
87 queue = device.getOutputQueue("out", 8, False)
88 cfgQ = device.getInputQueue("config")
89
90 # Configure windows; trackbar adjusts blending ratio of rgb/depth
91 windowName = "rgb-left"
92
93 # Set the window to be resizable and the initial size
94 cv2.namedWindow(windowName, cv2.WINDOW_NORMAL)
95 cv2.resizeWindow(windowName, 1280, 720)
96 cv2.createTrackbar(
97 "RGB Weight %",
98 windowName,
99 int(rgbWeight * 100),
100 100,
101 updateBlendWeights,
102 )
103 cv2.createTrackbar(
104 "Static Depth Plane [mm]",
105 windowName,
106 0,
107 2000,
108 updateDepthPlane,
109 )
110 while True:
111 messageGroup = queue.get()
112 assert isinstance(messageGroup, dai.MessageGroup)
113 frameRgb = messageGroup["rgb"]
114 assert isinstance(frameRgb, dai.ImgFrame)
115 leftAligned = messageGroup["aligned"]
116 assert isinstance(leftAligned, dai.ImgFrame)
117
118 frameRgbCv = frameRgb.getCvFrame()
119 # Colorize the aligned depth
120 leftCv = leftAligned.getCvFrame()
121
122 rgbIntrinsics = calibrationHandler.getCameraIntrinsics(RGB_SOCKET, int(frameRgbCv.shape[1]), int(frameRgbCv.shape[0]))
123
124 cvFrameUndistorted = cv2.undistort(
125 frameRgbCv,
126 np.array(rgbIntrinsics),
127 np.array(rgbDistortion),
128 )
129
130 if len(leftCv.shape) == 2:
131 leftCv = cv2.cvtColor(leftCv, cv2.COLOR_GRAY2BGR)
132 if leftCv.shape != cvFrameUndistorted.shape:
133 leftCv = cv2.resize(leftCv, (cvFrameUndistorted.shape[1], cvFrameUndistorted.shape[0]))
134
135 blended = cv2.addWeighted(cvFrameUndistorted, rgbWeight, leftCv, leftWeight, 0)
136 cv2.imshow(windowName, blended)
137
138 key = cv2.waitKey(1)
139 if key == ord("q"):
140 break
141
142 cfg.staticDepthPlane = staticDepthPlane
143 cfgQ.send(cfg)
Pipeline
Need assistance?
Head over to Discussion Forum for technical support or any other questions you might have.