DepthAI v2 has been superseded by DepthAI v3. You are viewing legacy documentation.
DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • Demo
  • Setup
  • Source code
  • Pipeline

RGB-ToF Align

This example demonstrates how to align depth information from a ToF (Time-of-Flight) sensor to an RGB camera. This setup is useful for applications requiring the overlay or comparison of depth and color data. An OpenCV window is created to display the blended image of the RGB and aligned depth data. Trackbars are provided to adjust the blending ratio.

Demo

RGB-ToF Alignment Demo

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
Command Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
For additional information, please follow the installation guide.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import numpy as np
4import cv2
5import depthai as dai
6import time
7from datetime import timedelta
8
9# This example is intended to run unchanged on an OAK-D-SR-PoE camera
10FPS = 30.0
11
12RGB_SOCKET = dai.CameraBoardSocket.CAM_C
13TOF_SOCKET = dai.CameraBoardSocket.CAM_A
14ALIGN_SOCKET = RGB_SOCKET
15
16class FPSCounter:
17    def __init__(self):
18        self.frameTimes = []
19
20    def tick(self):
21        now = time.time()
22        self.frameTimes.append(now)
23        self.frameTimes = self.frameTimes[-100:]
24
25    def getFps(self):
26        if len(self.frameTimes) <= 1:
27            return 0
28        # Calculate the FPS
29        return (len(self.frameTimes) - 1) / (self.frameTimes[-1] - self.frameTimes[0])
30
31
32ISP_SCALE = 2
33
34device = dai.Device()
35
36calibrationHandler = device.readCalibration()
37rgbDistortion = calibrationHandler.getDistortionCoefficients(RGB_SOCKET)
38distortionModel = calibrationHandler.getDistortionModel(RGB_SOCKET)
39if distortionModel != dai.CameraModel.Perspective:
40    raise RuntimeError("Unsupported distortion model for RGB camera. This example supports only Perspective model.")
41
42pipeline = dai.Pipeline()
43
44# Define sources and outputs
45camRgb = pipeline.create(dai.node.ColorCamera)
46tof = pipeline.create(dai.node.ToF)
47camTof = pipeline.create(dai.node.Camera)
48sync = pipeline.create(dai.node.Sync)
49align = pipeline.create(dai.node.ImageAlign)
50out = pipeline.create(dai.node.XLinkOut)
51
52# ToF settings
53camTof.setFps(FPS)
54camTof.setImageOrientation(dai.CameraImageOrientation.ROTATE_180_DEG)
55camTof.setBoardSocket(TOF_SOCKET)
56
57# rgb settings
58camRgb.setBoardSocket(RGB_SOCKET)
59camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_800_P)
60camRgb.setFps(FPS)
61camRgb.setIspScale(1, ISP_SCALE)
62
63out.setStreamName("out")
64
65sync.setSyncThreshold(timedelta(seconds=0.5 / FPS))
66
67# Linking
68camRgb.isp.link(sync.inputs["rgb"])
69camTof.raw.link(tof.input)
70tof.depth.link(align.input)
71align.outputAligned.link(sync.inputs["depth_aligned"])
72sync.inputs["rgb"].setBlocking(False)
73camRgb.isp.link(align.inputAlignTo)
74sync.out.link(out.input)
75
76def colorizeDepth(frameDepth):
77    invalidMask = frameDepth == 0
78    # Log the depth, minDepth and maxDepth
79    try:
80        minDepth = np.percentile(frameDepth[frameDepth != 0], 3)
81        maxDepth = np.percentile(frameDepth[frameDepth != 0], 95)
82        logDepth = np.log(frameDepth, where=frameDepth != 0)
83        logMinDepth = np.log(minDepth)
84        logMaxDepth = np.log(maxDepth)
85        np.nan_to_num(logDepth, copy=False, nan=logMinDepth)
86        # Clip the values to be in the 0-255 range
87        logDepth = np.clip(logDepth, logMinDepth, logMaxDepth)
88
89        # Interpolate only valid logDepth values, setting the rest based on the mask
90        depthFrameColor = np.interp(logDepth, (logMinDepth, logMaxDepth), (0, 255))
91        depthFrameColor = np.nan_to_num(depthFrameColor)
92        depthFrameColor = depthFrameColor.astype(np.uint8)
93        depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_JET)
94        # Set invalid depth pixels to black
95        depthFrameColor[invalidMask] = 0
96    except IndexError:
97        # Frame is likely empty
98        depthFrameColor = np.zeros((frameDepth.shape[0], frameDepth.shape[1], 3), dtype=np.uint8)
99    except Exception as e:
100        raise e
101    return depthFrameColor
102
103
104rgbWeight = 0.4
105depthWeight = 0.6
106
107
108def updateBlendWeights(percentRgb):
109    """
110    Update the rgb and depth weights used to blend depth/rgb image
111
112    @param[in] percent_rgb The rgb weight expressed as a percentage (0..100)
113    """
114    global depthWeight
115    global rgbWeight
116    rgbWeight = float(percentRgb) / 100.0
117    depthWeight = 1.0 - rgbWeight
118
119
120
121# Connect to device and start pipeline
122with device:
123    device.startPipeline(pipeline)
124    queue = device.getOutputQueue("out", 8, False)
125
126    # Configure windows; trackbar adjusts blending ratio of rgb/depth
127    rgbDepthWindowName = "rgb-depth"
128
129    cv2.namedWindow(rgbDepthWindowName)
130    cv2.createTrackbar(
131        "RGB Weight %",
132        rgbDepthWindowName,
133        int(rgbWeight * 100),
134        100,
135        updateBlendWeights,
136    )
137    fpsCounter = FPSCounter()
138    while True:
139        messageGroup = queue.get()
140        fpsCounter.tick()
141        assert isinstance(messageGroup, dai.MessageGroup)
142        frameRgb = messageGroup["rgb"]
143        assert isinstance(frameRgb, dai.ImgFrame)
144        frameDepth = messageGroup["depth_aligned"]
145        assert isinstance(frameDepth, dai.ImgFrame)
146
147        sizeRgb = frameRgb.getData().size
148        sizeDepth = frameDepth.getData().size
149        # Blend when both received
150        if frameDepth is not None:
151            cvFrame = frameRgb.getCvFrame()
152            rgbIntrinsics = calibrationHandler.getCameraIntrinsics(RGB_SOCKET, int(cvFrame.shape[1]), int(cvFrame.shape[0]))
153            cvFrameUndistorted = cv2.undistort(
154                cvFrame,
155                np.array(rgbIntrinsics),
156                np.array(rgbDistortion),
157            )
158            # Colorize the aligned depth
159            alignedDepthColorized = colorizeDepth(frameDepth.getFrame())
160            # Resize depth to match the rgb frame
161            cv2.putText(
162                alignedDepthColorized,
163                f"FPS: {fpsCounter.getFps():.2f}",
164                (10, 30),
165                cv2.FONT_HERSHEY_SIMPLEX,
166                1,
167                (255, 255, 255),
168                2,
169            )
170            cv2.imshow("depth", alignedDepthColorized)
171
172            blended = cv2.addWeighted(
173                cvFrameUndistorted, rgbWeight, alignedDepthColorized, depthWeight, 0
174            )
175            cv2.imshow(rgbDepthWindowName, blended)
176
177        key = cv2.waitKey(1)
178        if key == ord("q"):
179            break

C++

1#include <opencv2/opencv.hpp>
2
3#include "depthai/depthai.hpp"
4
5constexpr auto FPS = 30.0;
6
7constexpr auto RGB_SOCKET = dai::CameraBoardSocket::CAM_B;
8constexpr auto TOF_SOCKET = dai::CameraBoardSocket::CAM_A;
9constexpr auto ALIGN_SOCKET = RGB_SOCKET;
10
11class FPSCounter {
12   public:
13    void tick() {
14        auto now = std::chrono::steady_clock::now();
15        frameTimes.push(now);
16        if(frameTimes.size() > 100) {
17            frameTimes.pop();
18        }
19    }
20
21    double getFps() {
22        if(frameTimes.size() <= 1) return 0;
23        auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(frameTimes.back() - frameTimes.front()).count();
24        return (frameTimes.size() - 1) * 1000.0 / duration;
25    }
26
27   private:
28    std::queue<std::chrono::steady_clock::time_point> frameTimes;
29};
30
31cv::Mat colorizeDepth(const cv::Mat& frameDepth) {
32    cv::Mat invalidMask = (frameDepth == 0);
33    cv::Mat depthFrameColor;
34    try {
35        double minDepth = 0.0;
36        double maxDepth = 0.0;
37        cv::minMaxIdx(frameDepth, &minDepth, &maxDepth, nullptr, nullptr, ~invalidMask);
38        if(minDepth == maxDepth) {
39            depthFrameColor = cv::Mat::zeros(frameDepth.size(), CV_8UC3);
40            return depthFrameColor;
41        }
42        cv::Mat logDepth;
43        frameDepth.convertTo(logDepth, CV_32F);
44        cv::log(logDepth, logDepth);
45        logDepth.setTo(log(minDepth), invalidMask);
46        cv::normalize(logDepth, logDepth, 0, 255, cv::NORM_MINMAX, CV_8UC1);
47        cv::applyColorMap(logDepth, depthFrameColor, cv::COLORMAP_JET);
48        depthFrameColor.setTo(cv::Scalar(0, 0, 0), invalidMask);
49    } catch(const std::exception& e) {
50        depthFrameColor = cv::Mat::zeros(frameDepth.size(), CV_8UC3);
51    }
52    return depthFrameColor;
53}
54
55double rgbWeight = 0.4;
56double depthWeight = 0.6;
57
58void updateBlendWeights(int percentRgb, void*) {
59    rgbWeight = static_cast<double>(percentRgb) / 100.0;
60    depthWeight = 1.0 - rgbWeight;
61}
62
63int main() {
64    dai::Pipeline pipeline;
65
66    auto camRgb = pipeline.create<dai::node::ColorCamera>();
67    auto camTof = pipeline.create<dai::node::Camera>();
68    auto tof = pipeline.create<dai::node::ToF>();
69    auto sync = pipeline.create<dai::node::Sync>();
70    auto align = pipeline.create<dai::node::ImageAlign>();
71    auto xout = pipeline.create<dai::node::XLinkOut>();
72
73    camTof->setFps(FPS);
74    camTof->setImageOrientation(dai::CameraImageOrientation::ROTATE_180_DEG);
75    camTof->setBoardSocket(TOF_SOCKET);
76
77    camRgb->setBoardSocket(RGB_SOCKET);
78    camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_800_P);
79    camRgb->setFps(FPS);
80    camRgb->setIspScale(1, 2);
81
82    xout->setStreamName("out");
83
84    sync->setSyncThreshold(std::chrono::milliseconds(static_cast<int>(1000 / FPS)));
85
86    camRgb->isp.link(sync->inputs["rgb"]);
87    camTof->raw.link(tof->input);
88    tof->depth.link(align->input);
89    align->outputAligned.link(sync->inputs["depth_aligned"]);
90    camRgb->isp.link(align->inputAlignTo);
91    sync->out.link(xout->input);
92
93    dai::Device device(pipeline);
94    auto queue = device.getOutputQueue("out", 8, false);
95
96    FPSCounter fpsCounter;
97
98    std::string rgbDepthWindowName = "rgb-depth";
99    cv::namedWindow(rgbDepthWindowName);
100    cv::createTrackbar("RGB Weight %", rgbDepthWindowName, nullptr, 100, updateBlendWeights);
101
102    while(true) {
103        auto messageGroup = queue->get<dai::MessageGroup>();
104        fpsCounter.tick();
105
106        auto frameRgb = messageGroup->get<dai::ImgFrame>("rgb");
107        auto frameDepth = messageGroup->get<dai::ImgFrame>("depth_aligned");
108
109        if(frameRgb && frameDepth) {
110            auto cvFrame = frameRgb->getCvFrame();
111            auto alignedDepthColorized = colorizeDepth(frameDepth->getFrame());
112
113            cv::putText(alignedDepthColorized,
114                        "FPS: " + std::to_string(fpsCounter.getFps()),
115                        cv::Point(10, 30),
116                        cv::FONT_HERSHEY_SIMPLEX,
117                        1,
118                        cv::Scalar(255, 255, 255),
119                        2);
120
121            cv::imshow("depth", alignedDepthColorized);
122
123            cv::Mat blended;
124            cv::addWeighted(cvFrame, rgbWeight, alignedDepthColorized, depthWeight, 0, blended);
125            cv::imshow(rgbDepthWindowName, blended);
126        }
127
128        int key = cv::waitKey(1);
129        if(key == 'q' || key == 'Q') {
130            break;
131        }
132    }
133
134    return 0;
135}

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.