DepthAI
  • DepthAI Components
    • AprilTags
    • Benchmark
    • Camera
    • Calibration
    • DetectionNetwork
    • Events
    • FeatureTracker
    • Gate
    • HostNodes
    • ImageAlign
    • ImageManip
    • IMU
    • Misc
    • Model Zoo
    • NeuralDepth
    • NeuralNetwork
    • ObjectTracker
    • PointCloud
    • RecordReplay
    • RGBD
    • Script
    • SpatialDetectionNetwork
    • SpatialLocationCalculator
    • StereoDepth
    • Sync
    • VideoEncoder
    • Visualizer
    • Warp
    • RVC2-specific
  • Advanced Tutorials
  • API Reference
  • Tools
Software Stack

ON THIS PAGE

  • Source code

PointCloud

Supported on:RVC2RVC4
Minimal example that creates a colorized point cloud from stereo depth and an aligned RGB camera using the PointCloud node. Outputs point count, dimensions, and depth range to the console.This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2"""Minimal PointCloud example: colorized point cloud from stereo depth + RGB."""
3
4import depthai as dai
5
6pipeline = dai.Pipeline()
7
8# Cameras
9left = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B)
10right = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C)
11color = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
12
13# Stereo depth
14stereo = pipeline.create(dai.node.StereoDepth)
15left.requestFullResolutionOutput().link(stereo.left)
16right.requestFullResolutionOutput().link(stereo.right)
17
18# Color output aligned to depth
19colorOut = color.requestOutput((640, 400), type=dai.ImgFrame.Type.RGB888i,
20                               resizeMode=dai.ImgResizeMode.CROP, enableUndistortion=True)
21
22# Point cloud
23pc = pipeline.create(dai.node.PointCloud)
24pc.initialConfig.setLengthUnit(dai.LengthUnit.METER)
25
26# Align depth to color on RVC4
27platform = pipeline.getDefaultDevice().getPlatform()
28if platform == dai.Platform.RVC4:
29    imageAlign = pipeline.create(dai.node.ImageAlign)
30    stereo.depth.link(imageAlign.input)
31    colorOut.link(imageAlign.inputAlignTo)
32    imageAlign.outputAligned.link(pc.inputDepth)
33else:
34    colorOut.link(stereo.inputAlignTo)
35    stereo.depth.link(pc.inputDepth)
36
37colorOut.link(pc.inputColor)
38
39q = pc.outputPointCloud.createOutputQueue(maxSize=4, blocking=False)
40
41with pipeline:
42    pipeline.start()
43    while pipeline.isRunning():
44        pcd = q.get()
45        if pcd.isColor():
46            xyz, rgba = pcd.getPointsRGB()
47            numPoints = len(xyz)
48        else:
49            xyz = pcd.getPoints()
50            numPoints = len(xyz)
51        print(f"Points: {numPoints}, {pcd.getWidth()}x{pcd.getHeight()}, "
52              f"color={pcd.isColor()}, Z=[{pcd.getMinZ():.2f}, {pcd.getMaxZ():.2f}]")

C++

1// Minimal PointCloud example: colorized point cloud from stereo depth + RGB.
2#include <iostream>
3
4#include "depthai/depthai.hpp"
5
6int main() {
7    dai::Pipeline pipeline;
8
9    // Cameras
10    auto left = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_B);
11    auto right = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_C);
12    auto color = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
13
14    // Stereo depth
15    auto stereo = pipeline.create<dai::node::StereoDepth>();
16    left->requestFullResolutionOutput()->link(stereo->left);
17    right->requestFullResolutionOutput()->link(stereo->right);
18
19    // Color output aligned to depth
20    auto colorOut = color->requestOutput(std::make_pair(640, 400), dai::ImgFrame::Type::RGB888i, dai::ImgResizeMode::CROP, std::nullopt, true);
21
22    // Point cloud
23    auto pc = pipeline.create<dai::node::PointCloud>();
24    pc->initialConfig->setLengthUnit(dai::LengthUnit::METER);
25
26    // Align depth to color on RVC4, or color to depth on RVC2/3
27    auto platform = pipeline.getDefaultDevice()->getPlatform();
28    if(platform == dai::Platform::RVC4) {
29        auto imageAlign = pipeline.create<dai::node::ImageAlign>();
30        stereo->depth.link(imageAlign->input);
31        colorOut->link(imageAlign->inputAlignTo);
32        imageAlign->outputAligned.link(pc->inputDepth);
33    } else {
34        colorOut->link(stereo->inputAlignTo);
35        stereo->depth.link(pc->inputDepth);
36    }
37
38    colorOut->link(pc->getColorInput());
39
40    auto q = pc->outputPointCloud.createOutputQueue(4, false);
41
42    pipeline.start();
43    while(pipeline.isRunning()) {
44        auto pcd = q->get<dai::PointCloudData>();
45        if(pcd->isColor()) {
46            auto points = pcd->getPointsRGB();
47            std::cout << "Points: " << points.size();
48        } else {
49            auto points = pcd->getPoints();
50            std::cout << "Points: " << points.size();
51        }
52        std::cout << ", " << pcd->getWidth() << "x" << pcd->getHeight() << ", color=" << (pcd->isColor() ? "yes" : "no") << ", Z=[" << pcd->getMinZ() << ", "
53                  << pcd->getMaxZ() << "]" << std::endl;
54    }
55    pipeline.stop();
56    return 0;
57}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.