DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • PointCloud Control
  • Demo
  • Setup
  • Source code
  • Pipeline

PointCloud Control

This example demonstrates how to use PointCloudConfig message to dynamically update the transformation matrix of a point cloud and visualize the transformed point cloud using Open3D.The transformation matrix is updated to make it look like the point cloud is rotating about the Z-axis. This is achieved by first moving the point cloud along the Y axis:
Python
1# Move the point cloud along Y axis
2translate_y_matrix = np.array([
3    [1, 0, 0, 0],
4    [0, 1, 0, 500],
5    [0, 0, 1, 0],
6    [0, 0, 0, 1]
7])
Then, the point cloud is rotated about the Z axis:
Python
1# Rotate the point cloud about Z axis
2rotate_z_matrix = np.array([
3    [np.cos(angle), -np.sin(angle), 0, 0],
4    [np.sin(angle), np.cos(angle), 0, 0],
5    [0, 0, 1, 0],
6    [0, 0, 0, 1]
7])

Demo

Setup

Ensure DepthAI and Open3D are installed in your Python environment:
Command Line
1python3 -m pip install depthai open3d

Source code

The example initializes the DepthAI pipeline with color and mono cameras and a stereo depth node to generate depth information. It then creates a point cloud node, dynamically updates its transformation matrix based on a rotation value, and visualizes this transformed point cloud using Open3D. Each point's color corresponds to the color image captured by the RGB camera.
Python

Python

Python
GitHub
1import depthai as dai
2import numpy as np
3import cv2
4import sys
5
6try:
7    import open3d as o3d
8except ImportError:
9    sys.exit(f"Critical dependency missing: Open3D. Please install it using the command: '{sys.executable} -m pip install open3d' and then rerun the script.")
10
11
12# Continue with the DepthAI and Open3D setup as before
13pipeline = dai.Pipeline()
14camRgb = pipeline.create(dai.node.ColorCamera)
15monoLeft = pipeline.create(dai.node.MonoCamera)
16monoRight = pipeline.create(dai.node.MonoCamera)
17depth = pipeline.create(dai.node.StereoDepth)
18pointcloud: dai.node.PointCloud = pipeline.create(dai.node.PointCloud)
19sync = pipeline.create(dai.node.Sync)
20xOut = pipeline.create(dai.node.XLinkOut)
21xOut.input.setBlocking(False)
22
23
24camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
25camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
26camRgb.setIspScale(1,3)
27
28monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
29monoLeft.setCamera("left")
30
31monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
32monoRight.setCamera("right")
33
34
35depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
36depth.initialConfig.setMedianFilter(dai.MedianFilter.KERNEL_7x7)
37depth.setLeftRightCheck(True)
38depth.setExtendedDisparity(False)
39depth.setSubpixel(True)
40depth.setDepthAlign(dai.CameraBoardSocket.CAM_A)
41config = depth.initialConfig.get()
42config.postProcessing.thresholdFilter.minRange = 200
43config.postProcessing.thresholdFilter.maxRange = 1000
44depth.initialConfig.set(config)
45
46monoLeft.out.link(depth.left)
47monoRight.out.link(depth.right)
48depth.depth.link(pointcloud.inputDepth)
49camRgb.isp.link(sync.inputs["rgb"])
50pointcloud.outputPointCloud.link(sync.inputs["pcl"])
51pointcloud.initialConfig.setSparse(False)
52sync.out.link(xOut.input)
53xOut.setStreamName("out")
54
55inConfig = pipeline.create(dai.node.XLinkIn)
56inConfig.setStreamName("config")
57inConfig.out.link(pointcloud.inputConfig)
58
59with dai.Device(pipeline) as device:
60
61    isRunning = True
62    def key_callback(vis, action, mods):
63        global isRunning
64        if action == 0:
65            isRunning = False
66
67    q = device.getOutputQueue(name="out", maxSize=4, blocking=False)
68    pclConfIn = device.getInputQueue(name="config", maxSize=4, blocking=False)
69
70    vis = o3d.visualization.Visualizer()
71    vis.create_window()
72    pcd = o3d.geometry.PointCloud()
73    coordinateFrame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=1000, origin=[0,0,0])
74    vis.add_geometry(coordinateFrame)
75    vis.add_geometry(pcd)
76
77    first = True
78    rot = 0
79    while device.isPipelineRunning():
80        
81        inMessage = q.get()
82        inColor = inMessage["rgb"]
83        inPointCloud = inMessage["pcl"]
84        cvColorFrame = inColor.getCvFrame()
85        
86        cvRGBFrame = cv2.cvtColor(cvColorFrame, cv2.COLOR_BGR2RGB)
87
88        if inPointCloud:
89            points = inPointCloud.getPoints().astype(np.float64)
90            pcd.points = o3d.utility.Vector3dVector(points)
91            colors = (cvRGBFrame.reshape(-1, 3) / 255.0).astype(np.float64)
92            pcd.colors = o3d.utility.Vector3dVector(colors)
93            
94            vis.update_geometry(pcd)
95        vis.poll_events()
96        vis.update_renderer()
97
98        # Create a transformation matrix that rotates around all 3 axes at the same time
99        rotate_z_matrix = np.array([[np.cos(rot), -np.sin(rot), 0, 0],
100                                          [np.sin(rot), np.cos(rot), 0, 0],
101                                          [0, 0, 1, 0],
102                                          [0, 0, 0, 1]])
103        
104        translate_y_matrix = np.array([[1, 0, 0, 0],
105                                  [0, 1, 0, 500],
106                                  [0, 0, 1, 0],
107                                  [0, 0, 0, 1]])
108    
109        
110        # Combine the 3 transformation matrices into one
111        transformation_matrix = rotate_z_matrix @ translate_y_matrix
112        
113        rot += 0.05
114
115        pclConfig = dai.PointCloudConfig()
116        pclConfig.setTransformationMatrix(transformation_matrix)
117        pclConfig.setSparse(False)
118        pclConfIn.send(pclConfig)
119
120            
121    vis.destroy_window()
122    cv2.destroyAllWindows()

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.