DepthAI
Software Stack

ON THIS PAGE

  • Source code

Neural Depth RGBD

Supported on:RVC4
Combines NeuralDepth with the RGBD node to generate a point cloud, viewable via remote connection.

Source code

Python

Python
GitHub
1import time
2import depthai as dai
3
4from argparse import ArgumentParser
5
6parser = ArgumentParser()
7parser.add_argument("--webSocketPort", type=int, default=8765)
8parser.add_argument("--httpPort", type=int, default=8082)
9args = parser.parse_args()
10
11FPS = 10
12with dai.Pipeline() as p:
13    remoteConnector = dai.RemoteConnection(
14        webSocketPort=args.webSocketPort, httpPort=args.httpPort
15    )
16    left = p.create(dai.node.Camera)
17    right = p.create(dai.node.Camera)
18    color = p.create(dai.node.Camera)
19    stereo = p.create(dai.node.NeuralDepth)
20    rgbd = p.create(dai.node.RGBD).build()
21    align = None
22
23    color.build(sensorFps=FPS)
24    left.build(dai.CameraBoardSocket.CAM_B, sensorFps=FPS)
25    right.build(dai.CameraBoardSocket.CAM_C, sensorFps=FPS)
26
27    # Linking
28    stereo.build(left.requestFullResolutionOutput(), right.requestFullResolutionOutput(), dai.DeviceModelZoo.NEURAL_DEPTH_LARGE)
29    out = color.requestOutput((1280, 800), dai.ImgFrame.Type.RGB888i, enableUndistortion=True)
30    align = p.create(dai.node.ImageAlign)
31    stereo.depth.link(align.input)
32    out.link(align.inputAlignTo)
33    align.outputAligned.link(rgbd.inDepth)
34    out.link(rgbd.inColor)
35    remoteConnector.addTopic("pcl", rgbd.pcl, "common")
36
37    p.start()
38    remoteConnector.registerPipeline(p)
39
40    while p.isRunning():
41        key = remoteConnector.waitKey(1)
42        if key == ord("q"):
43            print("Got q key from the remote connection!")
44            break

C++

1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <memory>
5#include <stdexcept>
6#include <string>
7
8#include "depthai/capabilities/ImgFrameCapability.hpp"
9#include "depthai/depthai.hpp"
10#include "depthai/remote_connection/RemoteConnection.hpp"
11
12// Global flag to allow for a graceful shutdown on Ctrl+C
13static std::atomic<bool> isRunning(true);
14
15void signalHandler(int signum) {
16    isRunning = false;
17}
18
19int main(int argc, char** argv) {
20    // Default port values
21    int webSocketPort = 8765;
22    int httpPort = 8082;
23
24    // A simple argument parser
25    for(int i = 1; i < argc; ++i) {
26        std::string arg = argv[i];
27        if(arg == "--webSocketPort" && i + 1 < argc) {
28            webSocketPort = std::stoi(argv[++i]);
29        } else if(arg == "--httpPort" && i + 1 < argc) {
30            httpPort = std::stoi(argv[++i]);
31        }
32    }
33
34    // Register a signal handler for Ctrl+C
35    std::signal(SIGINT, signalHandler);
36
37    constexpr float FPS = 10.0f;
38
39    try {
40        // Create a remote connection for visualization
41        dai::RemoteConnection remoteConnector(dai::RemoteConnection::DEFAULT_ADDRESS, webSocketPort, true, httpPort);
42
43        // Create the DepthAI pipeline
44        dai::Pipeline pipeline;
45
46        // --- Define pipeline nodes ---
47
48        // Color camera
49        auto color = pipeline.create<dai::node::Camera>();
50        color->build(dai::CameraBoardSocket::CAM_A, std::nullopt, FPS);
51
52        // Left and right mono cameras for the stereo pair
53        auto left = pipeline.create<dai::node::Camera>();
54        left->build(dai::CameraBoardSocket::CAM_B, std::nullopt, FPS);
55
56        auto right = pipeline.create<dai::node::Camera>();
57        right->build(dai::CameraBoardSocket::CAM_C, std::nullopt, FPS);
58
59        // NeuralDepth node for stereo processing
60        auto stereo = pipeline.create<dai::node::NeuralDepth>();
61
62        // RGBD node to generate a point cloud
63        auto rgbd = pipeline.create<dai::node::RGBD>();
64        rgbd->build();
65
66        // ImageAlign node to align depth to the color camera's perspective
67        auto align = pipeline.create<dai::node::ImageAlign>();
68
69        // --- Link pipeline nodes ---
70
71        // Build the NeuralDepth node using full-resolution outputs from mono cameras
72        auto* leftOut = left->requestFullResolutionOutput();
73        auto* rightOut = right->requestFullResolutionOutput();
74        stereo->build(*leftOut, *rightOut, dai::DeviceModelZoo::NEURAL_DEPTH_LARGE);
75
76        // Request an output from the color camera
77        auto* colorOut = color->requestOutput(std::make_pair(1280, 800), dai::ImgFrame::Type::RGB888i, dai::ImgResizeMode::CROP, std::nullopt, true);
78        if(colorOut == nullptr) {
79            throw std::runtime_error("Failed to create color camera output.");
80        }
81
82        // Connect the nodes to form the data flow
83        stereo->depth.link(align->input);
84        colorOut->link(align->inputAlignTo);
85        align->outputAligned.link(rgbd->inDepth);
86        colorOut->link(rgbd->inColor);
87
88        // Add the point cloud output as a topic for the remote visualizer
89        remoteConnector.addTopic("pcl", rgbd->pcl, "common");
90
91        // Start the pipeline
92        pipeline.start();
93
94        // Register the running pipeline with the remote connector
95        remoteConnector.registerPipeline(pipeline);
96
97        std::cout << "Pipeline started. Connect to the visualizer to see the point cloud." << std::endl;
98
99        // Main loop to keep the application running
100        while(isRunning && pipeline.isRunning()) {
101            // Check for a 'q' key press from the remote connection to quit
102            int key = remoteConnector.waitKey(1);
103            if(key == 'q') {
104                std::cout << "Received 'q' key from the remote connection. Stopping." << std::endl;
105                break;
106            }
107        }
108
109    } catch(const std::exception& e) {
110        std::cerr << "An error occurred: " << e.what() << std::endl;
111        return 1;
112    }
113
114    std::cout << "Application finished." << std::endl;
115    return 0;
116}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.