DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • RGB-ToF Align
  • Demo
  • Setup
  • Source code

RGB-ToF Align

This example demonstrates how to align depth information from a ToF (Time-of-Flight) sensor to an RGB camera. This setup is useful for applications requiring the overlay or comparison of depth and color data. An OpenCV window is created to display the blended image of the RGB and aligned depth data. Trackbars are provided to adjust the blending ratio.

Demo

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
Command Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
For additional information, please follow the installation guide.

Source code

Python
C++
Python
GitHub
1import numpy as np
2import cv2
3import depthai as dai
4import time
5from datetime import timedelta
6
7# This example is intended to run unchanged on an OAK-D-SR-PoE camera
8FPS = 30.0
9
10RGB_SOCKET = dai.CameraBoardSocket.CAM_C
11TOF_SOCKET = dai.CameraBoardSocket.CAM_A
12ALIGN_SOCKET = RGB_SOCKET
13
14class FPSCounter:
15    def __init__(self):
16        self.frameTimes = []
17
18    def tick(self):
19        now = time.time()
20        self.frameTimes.append(now)
21        self.frameTimes = self.frameTimes[-100:]
22
23    def getFps(self):
24        if len(self.frameTimes) <= 1:
25            return 0
26        # Calculate the FPS
27        return (len(self.frameTimes) - 1) / (self.frameTimes[-1] - self.frameTimes[0])
28
29
30
31pipeline = dai.Pipeline()
32# Define sources and outputs
33camRgb = pipeline.create(dai.node.ColorCamera)
34tof = pipeline.create(dai.node.ToF)
35camTof = pipeline.create(dai.node.Camera)
36sync = pipeline.create(dai.node.Sync)
37align = pipeline.create(dai.node.ImageAlign)
38out = pipeline.create(dai.node.XLinkOut)
39
40# ToF settings
41camTof.setFps(FPS)
42camTof.setImageOrientation(dai.CameraImageOrientation.ROTATE_180_DEG)
43camTof.setBoardSocket(TOF_SOCKET)
44
45# rgb settings
46camRgb.setBoardSocket(RGB_SOCKET)
47camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_800_P)
48camRgb.setFps(FPS)
49camRgb.setIspScale(1, 2)
50
51out.setStreamName("out")
52
53sync.setSyncThreshold(timedelta(seconds=(1 / FPS)))
54
55# Linking
56camRgb.isp.link(sync.inputs["rgb"])
57camTof.raw.link(tof.input)
58tof.depth.link(align.input)
59align.outputAligned.link(sync.inputs["depth_aligned"])
60sync.inputs["rgb"].setBlocking(False)
61camRgb.isp.link(align.inputAlignTo)
62sync.out.link(out.input)
63
64def colorizeDepth(frameDepth):
65    invalidMask = frameDepth == 0
66    # Log the depth, minDepth and maxDepth
67    try:
68        minDepth = np.percentile(frameDepth[frameDepth != 0], 3)
69        maxDepth = np.percentile(frameDepth[frameDepth != 0], 95)
70        logDepth = np.log(frameDepth, where=frameDepth != 0)
71        logMinDepth = np.log(minDepth)
72        logMaxDepth = np.log(maxDepth)
73        np.nan_to_num(logDepth, copy=False, nan=logMinDepth)
74        # Clip the values to be in the 0-255 range
75        logDepth = np.clip(logDepth, logMinDepth, logMaxDepth)
76
77        # Interpolate only valid logDepth values, setting the rest based on the mask
78        depthFrameColor = np.interp(logDepth, (logMinDepth, logMaxDepth), (0, 255))
79        depthFrameColor = np.nan_to_num(depthFrameColor)
80        depthFrameColor = depthFrameColor.astype(np.uint8)
81        depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_JET)
82        # Set invalid depth pixels to black
83        depthFrameColor[invalidMask] = 0
84    except IndexError:
85        # Frame is likely empty
86        depthFrameColor = np.zeros((frameDepth.shape[0], frameDepth.shape[1], 3), dtype=np.uint8)
87    except Exception as e:
88        raise e
89    return depthFrameColor
90
91
92rgbWeight = 0.4
93depthWeight = 0.6
94
95
96def updateBlendWeights(percentRgb):
97    """
98    Update the rgb and depth weights used to blend depth/rgb image
99
100    @param[in] percent_rgb The rgb weight expressed as a percentage (0..100)
101    """
102    global depthWeight
103    global rgbWeight
104    rgbWeight = float(percentRgb) / 100.0
105    depthWeight = 1.0 - rgbWeight
106
107
108
109# Connect to device and start pipeline
110with dai.Device(pipeline) as device:
111    queue = device.getOutputQueue("out", 8, False)
112
113    # Configure windows; trackbar adjusts blending ratio of rgb/depth
114    rgbDepthWindowName = "rgb-depth"
115
116    cv2.namedWindow(rgbDepthWindowName)
117    cv2.createTrackbar(
118        "RGB Weight %",
119        rgbDepthWindowName,
120        int(rgbWeight * 100),
121        100,
122        updateBlendWeights,
123    )
124    fpsCounter = FPSCounter()
125    while True:
126        messageGroup = queue.get()
127        fpsCounter.tick()
128        assert isinstance(messageGroup, dai.MessageGroup)
129        frameRgb = messageGroup["rgb"]
130        assert isinstance(frameRgb, dai.ImgFrame)
131        frameDepth = messageGroup["depth_aligned"]
132        assert isinstance(frameDepth, dai.ImgFrame)
133
134        sizeRgb = frameRgb.getData().size
135        sizeDepth = frameDepth.getData().size
136        # Blend when both received
137        if frameDepth is not None:
138            cvFrame = frameRgb.getCvFrame()
139            # Colorize the aligned depth
140            alignedDepthColorized = colorizeDepth(frameDepth.getFrame())
141            # Resize depth to match the rgb frame
142            cv2.putText(
143                alignedDepthColorized,
144                f"FPS: {fpsCounter.getFps():.2f}",
145                (10, 30),
146                cv2.FONT_HERSHEY_SIMPLEX,
147                1,
148                (255, 255, 255),
149                2,
150            )
151            cv2.imshow("depth", alignedDepthColorized)
152
153            blended = cv2.addWeighted(
154                cvFrame, rgbWeight, alignedDepthColorized, depthWeight, 0
155            )
156            cv2.imshow(rgbDepthWindowName, blended)
157
158        key = cv2.waitKey(1)
159        if key == ord("q"):
160            break

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.