DepthAI
  • DepthAI Components
    • AprilTags
    • Benchmark
    • Camera
    • Calibration
    • DetectionNetwork
    • Events
    • FeatureTracker
    • Gate
    • HostNodes
    • ImageAlign
    • ImageManip
    • IMU
    • Misc
    • Model Zoo
    • NeuralDepth
    • NeuralNetwork
    • ObjectTracker
    • PointCloud
    • RecordReplay
    • RGBD
    • Script
    • SpatialDetectionNetwork
    • SpatialLocationCalculator
    • StereoDepth
    • Sync
    • VideoEncoder
    • Visualizer
    • Warp
    • RVC2-specific
  • Advanced Tutorials
  • API Reference
  • Tools
Software Stack

ON THIS PAGE

  • Source code

PointCloud Visualizer

Supported on:RVC2RVC4
Live 3D visualization of a colorized point cloud using Open3D. Links stereo depth and an aligned RGB camera to the PointCloud node and displays the result in an interactive Open3D window alongside a colorized depth view.This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2"""
3PointCloud Visualizer
4
5Live 3D visualization of stereo-depth point clouds using Open3D.
6Press Q in the Open3D window (or Ctrl-C in the terminal) to quit.
7"""
8
9import time
10
11import cv2
12import numpy as np
13
14try:
15    import open3d as o3d
16except ImportError:
17    raise SystemExit("Please install open3d: pip install open3d")
18
19import depthai as dai
20
21
22def colorizeDepth(frame: np.ndarray) -> np.ndarray:
23    """Normalize a uint16 depth frame and apply a colormap for display."""
24    downscaled = frame[::4, ::4]
25    nonZero = downscaled[downscaled != 0]
26    if nonZero.size == 0:
27        minD, maxD = 0, 1
28    else:
29        minD = np.percentile(nonZero, 1)
30        maxD = np.percentile(nonZero, 99)
31    colored = np.interp(frame, (minD, maxD), (0, 255)).astype(np.uint8)
32    return cv2.applyColorMap(colored, cv2.COLORMAP_HOT)
33
34
35def main() -> None:
36    print("PointCloud Visualizer")
37    print("=====================")
38    print("Connecting to device...")
39
40    device = dai.Device()
41    print(f"Device: {device.getDeviceName()}  (ID: {device.getDeviceId()})\n")
42
43    with dai.Pipeline(device) as pipeline:
44        # ── Camera + StereoDepth ──────────────────────────────────────
45        left = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B)
46        right = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C)
47        color = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
48        stereo = pipeline.create(dai.node.StereoDepth)
49        left.requestOutput((640, 400)).link(stereo.left)
50        right.requestOutput((640, 400)).link(stereo.right)
51
52        # Align depth to color camera
53        platform = pipeline.getDefaultDevice().getPlatform()
54        colorOut = color.requestOutput(
55            (640, 400), type=dai.ImgFrame.Type.RGB888i,
56            resizeMode=dai.ImgResizeMode.CROP, enableUndistortion=True,
57        )
58        if platform == dai.Platform.RVC4:
59            align = pipeline.create(dai.node.ImageAlign)
60            stereo.depth.link(align.input)
61            colorOut.link(align.inputAlignTo)
62            alignedDepth = align.outputAligned
63        else:
64            colorOut.link(stereo.inputAlignTo)
65            alignedDepth = stereo.depth
66
67        # ── PointCloud node ───────────────────────────────────────────
68        pc = pipeline.create(dai.node.PointCloud)
69        pc.setRunOnHost(True)
70        pc.initialConfig.setLengthUnit(dai.LengthUnit.METER)
71        alignedDepth.link(pc.inputDepth)
72        colorOut.link(pc.inputColor)
73
74        queue = pc.outputPointCloud.createOutputQueue(maxSize=4, blocking=False)
75        qDepth = pc.passthroughDepth.createOutputQueue(maxSize=4, blocking=False)
76
77        # ── Open3D setup ──────────────────────────────────────────────
78        vis = o3d.visualization.Visualizer()
79        vis.create_window("PointCloud Visualizer")
80        pcd = o3d.geometry.PointCloud()
81        coord = o3d.geometry.TriangleMesh.create_coordinate_frame(size=0.1)
82        vis.add_geometry(coord)
83        first = True
84
85        # ── Start pipeline & wait for auto-exposure ───────────────────
86        pipeline.start()
87        print("Waiting for auto-exposure to settle...")
88        time.sleep(1)
89        queue.tryGetAll()  # drain stale frames
90        qDepth.tryGetAll()
91
92        print("Streaming... Press Q in the Open3D window to quit.")
93
94        try:
95            while True:
96                pclData = queue.tryGet()
97
98                if pclData is not None:
99                    if pclData.isColor():
100                        points, colors = pclData.getPointsRGB()
101                        if len(points) > 0:
102                            pcd.points = o3d.utility.Vector3dVector(points.astype(np.float64))
103                            pcd.colors = o3d.utility.Vector3dVector(
104                                np.delete(colors / 255.0, 3, 1).astype(np.float64)
105                            )
106                    else:
107                        points = pclData.getPoints()
108                        if len(points) > 0:
109                            pcd.points = o3d.utility.Vector3dVector(points)
110                            pcd.colors = o3d.utility.Vector3dVector()
111
112                    if len(pcd.points) > 0:
113                        if first:
114                            vis.add_geometry(pcd)
115                            ctr = vis.get_view_control()
116                            ctr.set_front([0, 0, -1])
117                            ctr.set_up([0, -1, 0])
118                            ctr.set_lookat([0, 0, 1])
119                            ctr.set_zoom(0.3)
120                            first = False
121                        else:
122                            vis.update_geometry(pcd)
123
124                # Show colorized depth in an OpenCV window
125                depthMsg = qDepth.tryGet()
126                if depthMsg is not None:
127                    cv2.imshow("Depth", colorizeDepth(depthMsg.getCvFrame()))
128
129                if cv2.waitKey(1) == ord("q"):
130                    break
131                if not vis.poll_events():
132                    break
133                vis.update_renderer()
134
135        except KeyboardInterrupt:
136            print("\nStopping...")
137
138        vis.destroy_window()
139        cv2.destroyAllWindows()
140
141    print("Done.")
142
143
144if __name__ == "__main__":
145    main()

C++

1/**
2 * PointCloud Visualizer
3 *
4 * Live 3D visualization of colorized stereo-depth point clouds using the
5 * built-in RemoteConnection (foxglove) visualizer.
6 *
7 * Run the example, then open the visualizer in a browser (default
8 * http://localhost:8082) to see the live point cloud.
9 * Press 'q' in the viewer or Ctrl-C in the terminal to quit.
10 */
11
12#include <csignal>
13#include <iostream>
14
15#include "depthai/depthai.hpp"
16#include "depthai/remote_connection/RemoteConnection.hpp"
17
18// ---------------------------------------------------------------------------
19static volatile std::sig_atomic_t isRunning{1};
20void signalHandler(int) {
21    isRunning = 0;
22}
23
24// ---------------------------------------------------------------------------
25int main() {
26    std::signal(SIGINT, signalHandler);
27
28    std::cout << "PointCloud Visualizer\n"
29              << "=====================\n"
30              << "Connecting to device...\n";
31
32    dai::RemoteConnection remoteConnector;
33    dai::Pipeline pipeline;
34
35    auto device = pipeline.getDefaultDevice();
36    std::cout << "Device: " << device->getDeviceName() << "  (ID: " << device->getDeviceId() << ")\n\n";
37
38    const auto size = std::make_pair(640, 400);
39
40    // ── Cameras ──────────────────────────────────────────────────────
41    auto left = pipeline.create<dai::node::Camera>();
42    auto right = pipeline.create<dai::node::Camera>();
43    auto color = pipeline.create<dai::node::Camera>();
44
45    left->build(dai::CameraBoardSocket::CAM_B);
46    right->build(dai::CameraBoardSocket::CAM_C);
47    color->build(dai::CameraBoardSocket::CAM_A);
48
49    // ── StereoDepth ──────────────────────────────────────────────────
50    auto stereo = pipeline.create<dai::node::StereoDepth>();
51    left->requestOutput(size)->link(stereo->left);
52    right->requestOutput(size)->link(stereo->right);
53
54    // ── Align depth to color camera ──────────────────────────────────
55    auto colorOut = color->requestOutput(size, dai::ImgFrame::Type::RGB888i, dai::ImgResizeMode::CROP, std::nullopt, true);
56
57    auto align = pipeline.create<dai::node::ImageAlign>();
58    stereo->depth.link(align->input);
59    colorOut->link(align->inputAlignTo);
60
61    // ── PointCloud node ──────────────────────────────────────────────
62    auto pc = pipeline.create<dai::node::PointCloud>();
63    pc->setRunOnHost(true);
64    align->outputAligned.link(pc->inputDepth);
65    colorOut->link(pc->getColorInput());
66
67    // Publish the point cloud to the remote visualizer
68    remoteConnector.addTopic("pcl", pc->outputPointCloud);
69
70    pipeline.start();
71    remoteConnector.registerPipeline(pipeline);
72
73    device->setIrLaserDotProjectorIntensity(0.7);
74
75    std::cout << "Pipeline started.\n"
76              << "Open the visualizer at http://localhost:8082 to see the point cloud.\n"
77              << "Press 'q' in the viewer or Ctrl-C to quit.\n";
78
79    while(isRunning != 0 && pipeline.isRunning()) {
80        int key = remoteConnector.waitKey(1);
81        if(key == 'q') {
82            std::cout << "Got 'q' key from the remote connection.\n";
83            break;
84        }
85    }
86
87    std::cout << "Done.\n";
88    return 0;
89}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.