1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <memory>
5#include <stdexcept>
6#include <string>
7
8#include "depthai/capabilities/ImgFrameCapability.hpp"
9#include "depthai/depthai.hpp"
10#include "depthai/remote_connection/RemoteConnection.hpp"
11
12// Global flag to allow for a graceful shutdown on Ctrl+C
13static std::atomic<bool> isRunning(true);
14
15void signalHandler(int signum) {
16 isRunning = false;
17}
18
19int main(int argc, char** argv) {
20 // Default port values
21 int webSocketPort = 8765;
22 int httpPort = 8082;
23
24 // A simple argument parser
25 for(int i = 1; i < argc; ++i) {
26 std::string arg = argv[i];
27 if(arg == "--webSocketPort" && i + 1 < argc) {
28 webSocketPort = std::stoi(argv[++i]);
29 } else if(arg == "--httpPort" && i + 1 < argc) {
30 httpPort = std::stoi(argv[++i]);
31 }
32 }
33
34 // Register a signal handler for Ctrl+C
35 std::signal(SIGINT, signalHandler);
36
37 constexpr float FPS = 10.0f;
38
39 try {
40 // Create a remote connection for visualization
41 dai::RemoteConnection remoteConnector(dai::RemoteConnection::DEFAULT_ADDRESS, webSocketPort, true, httpPort);
42
43 // Create the DepthAI pipeline
44 dai::Pipeline pipeline;
45
46 // --- Define pipeline nodes ---
47
48 // Color camera
49 auto color = pipeline.create<dai::node::Camera>();
50 color->build(dai::CameraBoardSocket::CAM_A, std::nullopt, FPS);
51
52 // Left and right mono cameras for the stereo pair
53 auto left = pipeline.create<dai::node::Camera>();
54 left->build(dai::CameraBoardSocket::CAM_B, std::nullopt, FPS);
55
56 auto right = pipeline.create<dai::node::Camera>();
57 right->build(dai::CameraBoardSocket::CAM_C, std::nullopt, FPS);
58
59 // NeuralDepth node for stereo processing
60 auto stereo = pipeline.create<dai::node::NeuralDepth>();
61
62 // RGBD node to generate a point cloud
63 auto rgbd = pipeline.create<dai::node::RGBD>();
64 rgbd->build();
65
66 // ImageAlign node to align depth to the color camera's perspective
67 auto align = pipeline.create<dai::node::ImageAlign>();
68
69 // --- Link pipeline nodes ---
70
71 // Build the NeuralDepth node using full-resolution outputs from mono cameras
72 auto* leftOut = left->requestFullResolutionOutput();
73 auto* rightOut = right->requestFullResolutionOutput();
74 stereo->build(*leftOut, *rightOut, dai::DeviceModelZoo::NEURAL_DEPTH_LARGE);
75
76 // Request an output from the color camera
77 auto* colorOut = color->requestOutput(std::make_pair(1280, 800), dai::ImgFrame::Type::RGB888i, dai::ImgResizeMode::CROP, std::nullopt, true);
78 if(colorOut == nullptr) {
79 throw std::runtime_error("Failed to create color camera output.");
80 }
81
82 // Connect the nodes to form the data flow
83 stereo->depth.link(align->input);
84 colorOut->link(align->inputAlignTo);
85 align->outputAligned.link(rgbd->inDepth);
86 colorOut->link(rgbd->inColor);
87
88 // Add the point cloud output as a topic for the remote visualizer
89 remoteConnector.addTopic("pcl", rgbd->pcl, "common");
90
91 // Start the pipeline
92 pipeline.start();
93
94 // Register the running pipeline with the remote connector
95 remoteConnector.registerPipeline(pipeline);
96
97 std::cout << "Pipeline started. Connect to the visualizer to see the point cloud." << std::endl;
98
99 // Main loop to keep the application running
100 while(isRunning && pipeline.isRunning()) {
101 // Check for a 'q' key press from the remote connection to quit
102 int key = remoteConnector.waitKey(1);
103 if(key == 'q') {
104 std::cout << "Received 'q' key from the remote connection. Stopping." << std::endl;
105 break;
106 }
107 }
108
109 } catch(const std::exception& e) {
110 std::cerr << "An error occurred: " << e.what() << std::endl;
111 return 1;
112 }
113
114 std::cout << "Application finished." << std::endl;
115 return 0;
116}