DepthAI
Software Stack

ON THIS PAGE

  • Source code

Neural Assisted Stereo

Supported on:RVC4
This example demonstrates the NeuralAssistedStereo node, which fuses NeuralDepth with classical StereoDepth.
This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1import numpy as np
2import cv2 as cv
3import depthai as dai
4
5FPS = 20
6
7def showDepth(depthFrame, windowName="Depth", minDistance=500, maxDistance=5000,
8               colormap=cv.COLORMAP_TURBO, useLog=False):
9    """
10    Nicely visualize a depth map.
11
12    Args:
13        depthFrame (np.ndarray): Depth frame (in millimeters).
14        windowName (str): OpenCV window name.
15        minDistance (int): Minimum depth to display (in mm).
16        maxDistance (int): Maximum depth to display (in mm).
17        colormap (int): OpenCV colormap (e.g., cv.COLORMAP_JET, COLORMAP_TURBO, etc.).
18        useLog (bool): Apply logarithmic scaling for better visual contrast.
19
20    Example:
21        frame = depth.getCvFrame()
22        showDepth(frame)
23    """
24    # Convert to float for processing
25    depthFrame = depthFrame.astype(np.float32)
26
27    # Optionally apply log scaling
28    if useLog:
29        depthFrame = np.log(depthFrame + 1)
30
31    # Clip to defined range (avoid far-out values)
32    depthFrame = np.uint8(np.clip(depthFrame, minDistance, maxDistance) / maxDistance * 255)
33
34    # Apply color map
35    depthColor = cv.applyColorMap(depthFrame, colormap)
36
37    # Show in a window
38    cv.imshow(windowName, depthColor)
39
40if __name__ == "__main__":
41    device = dai.Device()
42    pipeline = dai.Pipeline(device)
43    if not device.isNeuralDepthSupported():
44        print("Exiting NeuralAssistedStereo example: device doesn't support NeuralDepth.")
45        exit()
46
47    monoLeft = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B, sensorFps=FPS)
48    monoRight = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C, sensorFps=FPS)
49
50    monoLeftOut = monoLeft.requestFullResolutionOutput()
51    monoRightOut = monoRight.requestFullResolutionOutput()
52
53    neuralAssistedStereo = pipeline.create(dai.node.NeuralAssistedStereo).build(monoLeftOut, monoRightOut, neuralModel=dai.DeviceModelZoo.NEURAL_DEPTH_NANO)
54
55    disparityQueue = neuralAssistedStereo.disparity.createOutputQueue()
56
57    with pipeline:
58        pipeline.start()
59        while pipeline.isRunning():
60            disparity = disparityQueue.get()
61            showDepth(disparity.getCvFrame(), minDistance=100, maxDistance=6000, useLog=False)
62
63            key = cv.waitKey(1)
64            if key == ord('q'):
65                quit()

C++

1#include <depthai/depthai.hpp>
2#include <depthai/device/Device.hpp>
3#include <depthai/pipeline/node/Camera.hpp>
4#include <depthai/pipeline/node/NeuralAssistedStereo.hpp>
5#include <iostream>
6#include <opencv2/opencv.hpp>
7
8constexpr float FPS = 20.0f;
9// Nicely visualize a depth map.
10// The input depthFrame is assumed to be the raw disparity (CV_16UC1 or similar)
11// received from the DepthAI pipeline.
12void showDepth(const cv::Mat& depthFrameIn,
13               const std::string& windowName = "Depth",
14               int minDistance = 500,
15               int maxDistance = 5000,
16               int colormap = cv::COLORMAP_TURBO,
17               bool useLog = false) {
18    cv::Mat depthFrame = depthFrameIn.clone();
19
20    cv::Mat floatFrame;
21    depthFrame.convertTo(floatFrame, CV_32FC1);
22
23    // # Optionally apply log scaling
24    if(useLog) {
25        // depthFrame = np.log(depthFrame + 1)
26        cv::log(floatFrame + 1, floatFrame);
27    }
28
29    cv::Mat upperClamped;
30    cv::min(floatFrame, maxDistance, upperClamped);
31
32    cv::Mat clippedFrame;
33    cv::max(upperClamped, minDistance, clippedFrame);
34
35    double alpha = 255.0 / maxDistance;
36    clippedFrame.convertTo(clippedFrame, CV_8U, alpha);
37
38    cv::Mat depthColor;
39    cv::applyColorMap(clippedFrame, depthColor, colormap);
40
41    cv::imshow(windowName, depthColor);
42}
43
44int main() {
45    // 1. Create device and pipeline
46    auto device = std::make_shared<dai::Device>();
47    dai::Pipeline pipeline(device);
48    if(!device->isNeuralDepthSupported()) {
49        std::cout << "Exiting NeuralAssistedStereo example: device doesn't support NeuralDepth.\n";
50        return 0;
51    }
52
53    // 2. Define nodes
54    auto monoLeft = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_B, std::nullopt, FPS);
55    auto monoRight = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_C, std::nullopt, FPS);
56    auto monoRightOut = monoRight->requestFullResolutionOutput();
57    auto monoLeftOut = monoLeft->requestFullResolutionOutput();
58
59    auto neuralAssistedStereo = pipeline.create<dai::node::NeuralAssistedStereo>()->build(*monoLeftOut, *monoRightOut, dai::DeviceModelZoo::NEURAL_DEPTH_NANO);
60
61    // 6. Get output queue
62    auto disparityQueue = neuralAssistedStereo->disparity.createOutputQueue();
63
64    pipeline.start();
65    while(pipeline.isRunning()) {
66        auto disparityPacket = disparityQueue->get<dai::ImgFrame>();
67        showDepth(disparityPacket->getCvFrame(), "Depth", 100, 6000, cv::COLORMAP_TURBO, false);
68        int key = cv::waitKey(1);
69        if(key == 'q') {
70            break;
71        }
72    }
73    return 0;
74}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.