DepthAI
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

Neural Depth Minimal

Supported on:RVC4
Minimal example showing basic NeuralDepth usage with disparity output visualization.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5import numpy as np
6
7FPS = 10
8
9# Create pipeline
10with dai.Pipeline() as pipeline:
11    cameraLeft = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B, sensorFps=FPS)
12    cameraRight = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C, sensorFps=FPS)
13    leftOutput = cameraLeft.requestFullResolutionOutput()
14    rightOutput = cameraRight.requestFullResolutionOutput()
15
16    neuralDepth = pipeline.create(dai.node.NeuralDepth).build(leftOutput, rightOutput, dai.DeviceModelZoo.NEURAL_DEPTH_LARGE)
17
18    disparityQueue = neuralDepth.disparity.createOutputQueue()
19
20    # Connect to device and start pipeline
21    pipeline.start()
22    maxDisparity = 1
23    colorMap = cv2.applyColorMap(np.arange(256, dtype=np.uint8), cv2.COLORMAP_JET)
24    colorMap[0] = [0, 0, 0]  # to make zero-disparity pixels black
25
26    while pipeline.isRunning():
27        disparityData = disparityQueue.get()
28        assert isinstance(disparityData, dai.ImgFrame)
29        npDisparity = disparityData.getFrame()
30        maxDisparity = max(maxDisparity, np.max(npDisparity))
31        colorizedDisparity = cv2.applyColorMap(((npDisparity / maxDisparity) * 255).astype(np.uint8), colorMap)
32        cv2.imshow("disparity", colorizedDisparity)
33
34        key = cv2.waitKey(1)
35        if key == ord('q'):
36            pipeline.stop()
37            break
38
39        if cv2.waitKey(1) == ord('q'):
40            break

C++

1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <opencv2/opencv.hpp>
5#include <vector>
6
7#include "depthai/depthai.hpp"
8
9// Global flag to allow for a graceful shutdown
10std::atomic<bool> quitEvent(false);
11
12void signalHandler(int signum) {
13    quitEvent = true;
14}
15
16int main() {
17    // Set up signal handlers for clean exit on Ctrl+C
18    signal(SIGTERM, signalHandler);
19    signal(SIGINT, signalHandler);
20
21    constexpr float FPS = 10.0f;
22
23    // Create the DepthAI pipeline
24    dai::Pipeline pipeline;
25
26    // Define camera sources for the stereo pair
27    auto cameraLeft = pipeline.create<dai::node::Camera>();
28    cameraLeft->build(dai::CameraBoardSocket::CAM_B, std::nullopt, FPS);
29
30    auto cameraRight = pipeline.create<dai::node::Camera>();
31    cameraRight->build(dai::CameraBoardSocket::CAM_C, std::nullopt, FPS);
32
33    // Request full resolution output from each camera
34    auto* leftOutput = cameraLeft->requestFullResolutionOutput();
35    auto* rightOutput = cameraRight->requestFullResolutionOutput();
36
37    // Create and build the NeuralDepth node, linking the camera outputs to it
38    auto neuralDepth = pipeline.create<dai::node::NeuralDepth>();
39    neuralDepth->build(*leftOutput, *rightOutput, dai::DeviceModelZoo::NEURAL_DEPTH_LARGE);
40
41    // Create an output queue to get the disparity frames from the node
42    auto disparityQueue = neuralDepth->disparity.createOutputQueue();
43
44    // Start the pipeline
45    pipeline.start();
46
47    // Variables for visualization
48    double maxDisparity = 1.0;
49    cv::Mat colorMap;
50
51    // Pre-generate the color map for efficiency
52    cv::Mat gray(256, 1, CV_8UC1);
53    for(int i = 0; i < 256; i++) {
54        gray.at<uchar>(i) = i;
55    }
56    cv::applyColorMap(gray, colorMap, cv::COLORMAP_JET);
57    // Set the color for zero-disparity pixels to black, as in the Python example
58    colorMap.at<cv::Vec3b>(0) = cv::Vec3b(0, 0, 0);
59
60    while(!quitEvent && pipeline.isRunning()) {
61        // Get the disparity data from the queue
62        auto disparityData = disparityQueue->get<dai::ImgFrame>();
63        cv::Mat npDisparity = disparityData->getFrame();
64
65        // Find the current maximum disparity value to keep the visualization normalized
66        double minVal, currentMax;
67        cv::minMaxLoc(npDisparity, &minVal, &currentMax);
68        if(currentMax > 0) {
69            maxDisparity = std::max(maxDisparity, currentMax);
70        }
71
72        // Normalize the disparity image to a 0-255 scale for color mapping
73        cv::Mat normalized;
74        npDisparity.convertTo(normalized, CV_8UC1, 255.0 / maxDisparity);
75
76        // Apply the color map to create a visual representation
77        cv::Mat colorizedDisparity;
78        cv::applyColorMap(normalized, colorizedDisparity, colorMap);
79
80        // Display the colorized disparity map
81        cv::imshow("disparity", colorizedDisparity);
82
83        // Check for keyboard input to quit
84        int key = cv::waitKey(1);
85        if(key == 'q') {
86            break;
87        }
88    }
89
90    // The pipeline is stopped automatically when the 'pipeline' object goes out of scope
91    // at the end of the main function.
92    return 0;
93}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.