Stereo Depth custom Mesh
def getMesh() to calculate mesh files whenever horizontal FOV is larger than 90°. You could also force calculate the mesh using:Python
1stereo = pipeline.create(dai.node.StereoDepth)
2# Enable mesh calculation to correct distortion:
3stereo.enableDistortionCorrection(True)Python
1stereo = pipeline.create(dai.node.StereoDepth)
2stereo.loadMeshFiles('path/to/left_mesh', 'path/to/right_mesh')Demo
Setup
Command Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.pySource code
Python
Python
PythonGitHub
1#!/usr/bin/env python3
2
3import cv2
4import numpy as np
5import depthai as dai
6import argparse
7
8parser = argparse.ArgumentParser()
9parser.add_argument("-res", "--resolution", type=str, default="720",
10 help="Sets the resolution on mono cameras. Options: 800 | 720 | 400")
11parser.add_argument("-md", "--mesh_dir", type=str, default=None,
12 help="Output directory for mesh files. If not specified mesh files won't be saved")
13parser.add_argument("-lm", "--load_mesh", default=False, action="store_true",
14 help="Read camera intrinsics, generate mesh files and load them into the stereo node.")
15args = parser.parse_args()
16
17meshDirectory = args.mesh_dir # Output dir for mesh files
18generateMesh = args.load_mesh # Load mesh files
19RES_MAP = {
20 '800': {'w': 1280, 'h': 800, 'res': dai.MonoCameraProperties.SensorResolution.THE_800_P },
21 '720': {'w': 1280, 'h': 720, 'res': dai.MonoCameraProperties.SensorResolution.THE_720_P },
22 '400': {'w': 640, 'h': 400, 'res': dai.MonoCameraProperties.SensorResolution.THE_400_P }
23}
24if args.resolution not in RES_MAP:
25 exit("Unsupported resolution!")
26
27resolution = RES_MAP[args.resolution]
28
29def getMesh(calibData):
30 M1 = np.array(calibData.getCameraIntrinsics(dai.CameraBoardSocket.CAM_B, resolution['w'], resolution['h']))
31 d1 = np.array(calibData.getDistortionCoefficients(dai.CameraBoardSocket.CAM_B))
32 R1 = np.array(calibData.getStereoLeftRectificationRotation())
33 M2 = np.array(calibData.getCameraIntrinsics(dai.CameraBoardSocket.CAM_C, resolution['w'], resolution['h']))
34 d2 = np.array(calibData.getDistortionCoefficients(dai.CameraBoardSocket.CAM_C))
35 R2 = np.array(calibData.getStereoRightRectificationRotation())
36 mapXL, mapYL = cv2.initUndistortRectifyMap(M1, d1, R1, M2, (resolution['w'], resolution['h']), cv2.CV_32FC1)
37 mapXR, mapYR = cv2.initUndistortRectifyMap(M2, d2, R2, M2, (resolution['w'], resolution['h']), cv2.CV_32FC1)
38
39 meshCellSize = 16
40 meshLeft = []
41 meshRight = []
42
43 for y in range(mapXL.shape[0] + 1):
44 if y % meshCellSize == 0:
45 rowLeft = []
46 rowRight = []
47 for x in range(mapXL.shape[1] + 1):
48 if x % meshCellSize == 0:
49 if y == mapXL.shape[0] and x == mapXL.shape[1]:
50 rowLeft.append(mapYL[y - 1, x - 1])
51 rowLeft.append(mapXL[y - 1, x - 1])
52 rowRight.append(mapYR[y - 1, x - 1])
53 rowRight.append(mapXR[y - 1, x - 1])
54 elif y == mapXL.shape[0]:
55 rowLeft.append(mapYL[y - 1, x])
56 rowLeft.append(mapXL[y - 1, x])
57 rowRight.append(mapYR[y - 1, x])
58 rowRight.append(mapXR[y - 1, x])
59 elif x == mapXL.shape[1]:
60 rowLeft.append(mapYL[y, x - 1])
61 rowLeft.append(mapXL[y, x - 1])
62 rowRight.append(mapYR[y, x - 1])
63 rowRight.append(mapXR[y, x - 1])
64 else:
65 rowLeft.append(mapYL[y, x])
66 rowLeft.append(mapXL[y, x])
67 rowRight.append(mapYR[y, x])
68 rowRight.append(mapXR[y, x])
69 if (mapXL.shape[1] % meshCellSize) % 2 != 0:
70 rowLeft.append(0)
71 rowLeft.append(0)
72 rowRight.append(0)
73 rowRight.append(0)
74
75 meshLeft.append(rowLeft)
76 meshRight.append(rowRight)
77
78 meshLeft = np.array(meshLeft)
79 meshRight = np.array(meshRight)
80
81 return meshLeft, meshRight
82
83def saveMeshFiles(meshLeft, meshRight, outputPath):
84 print("Saving mesh to:", outputPath)
85 meshLeft.tofile(outputPath + "/left_mesh.calib")
86 meshRight.tofile(outputPath + "/right_mesh.calib")
87
88
89def create_pipeline(device: dai.Device) -> dai.Pipeline:
90 calibData = device.readCalibration()
91 print("Creating Stereo Depth pipeline")
92 pipeline = dai.Pipeline()
93
94 camLeft = pipeline.create(dai.node.MonoCamera)
95 camLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
96
97 camRight = pipeline.create(dai.node.MonoCamera)
98 camRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
99
100 xoutRight = pipeline.create(dai.node.XLinkOut)
101 xoutRight.setStreamName("right")
102 camRight.out.link(xoutRight.input)
103
104 for monoCam in (camLeft, camRight): # Common config
105 monoCam.setResolution(resolution['res'])
106 # monoCam.setFps(20.0)
107
108 stereo = pipeline.create(dai.node.StereoDepth)
109 camLeft.out.link(stereo.left)
110 camRight.out.link(stereo.right)
111 stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
112 stereo.setRectifyEdgeFillColor(0) # Black, to better see the cutout
113 stereo.setLeftRightCheck(True)
114 stereo.setExtendedDisparity(True)
115
116
117
118
119 xoutDisparity = pipeline.create(dai.node.XLinkOut)
120 xoutDisparity.setStreamName("disparity")
121 stereo.disparity.link(xoutDisparity.input)
122
123 xoutRectifRight = pipeline.create(dai.node.XLinkOut)
124 xoutRectifRight.setStreamName("rectifiedRight")
125 stereo.rectifiedRight.link(xoutRectifRight.input)
126
127 # Create custom meshes from calibration data. Here you could also
128 # load your own mesh files, or generate them in any other way.
129 leftMesh, rightMesh = getMesh(calibData)
130 if generateMesh:
131 meshLeft = list(leftMesh.tobytes())
132 meshRight = list(rightMesh.tobytes())
133 # Load mesh data to the StereoDepth node
134 stereo.loadMeshData(meshLeft, meshRight)
135
136 if meshDirectory is not None:
137 saveMeshFiles(leftMesh, rightMesh, meshDirectory)
138 return pipeline
139
140with dai.Device() as device:
141 device.startPipeline(create_pipeline(device))
142
143 # Create a receive queue for each stream
144 qList = [device.getOutputQueue(stream, 8, blocking=False) for stream in ['right', 'rectifiedRight', 'disparity']]
145
146 while True:
147 for q in qList:
148 name = q.getName()
149 frame = q.get().getCvFrame()
150 cv2.imshow(name, frame)
151 if cv2.waitKey(1) == ord("q"):
152 breakPipeline
Need assistance?
Head over to Discussion Forum for technical support or any other questions you might have.