Spatial Calculator Multi-ROI¶
This example shows how one can use multiple ROIs with a single SpatialLocationCalculator node. A similar logic could be used as a simple depth line scanning camera for mobile robots.
Similar samples:
Demo¶

Setup¶
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
For additional information, please follow installation guide
Source code¶
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 | #!/usr/bin/env python3 import cv2 import depthai as dai import math import numpy as np # Create pipeline pipeline = dai.Pipeline() # Define sources and outputs monoLeft = pipeline.create(dai.node.MonoCamera) monoRight = pipeline.create(dai.node.MonoCamera) stereo = pipeline.create(dai.node.StereoDepth) spatialLocationCalculator = pipeline.create(dai.node.SpatialLocationCalculator) xoutDepth = pipeline.create(dai.node.XLinkOut) xoutSpatialData = pipeline.create(dai.node.XLinkOut) xinSpatialCalcConfig = pipeline.create(dai.node.XLinkIn) xoutDepth.setStreamName("depth") xoutSpatialData.setStreamName("spatialData") xinSpatialCalcConfig.setStreamName("spatialCalcConfig") # Properties monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P) monoLeft.setCamera("left") monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P) monoRight.setCamera("right") stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY) stereo.setLeftRightCheck(True) stereo.setSubpixel(True) spatialLocationCalculator.inputConfig.setWaitForMessage(False) # Create 10 ROIs for i in range(10): config = dai.SpatialLocationCalculatorConfigData() config.depthThresholds.lowerThreshold = 200 config.depthThresholds.upperThreshold = 10000 config.roi = dai.Rect(dai.Point2f(i*0.1, 0.45), dai.Point2f((i+1)*0.1, 0.55)) spatialLocationCalculator.initialConfig.addROI(config) # Linking monoLeft.out.link(stereo.left) monoRight.out.link(stereo.right) spatialLocationCalculator.passthroughDepth.link(xoutDepth.input) stereo.depth.link(spatialLocationCalculator.inputDepth) spatialLocationCalculator.out.link(xoutSpatialData.input) xinSpatialCalcConfig.out.link(spatialLocationCalculator.inputConfig) # Connect to device and start pipeline with dai.Device(pipeline) as device: device.setIrLaserDotProjectorBrightness(1000) # Output queue will be used to get the depth frames from the outputs defined above depthQueue = device.getOutputQueue(name="depth", maxSize=4, blocking=False) spatialCalcQueue = device.getOutputQueue(name="spatialData", maxSize=4, blocking=False) color = (0,200,40) fontType = cv2.FONT_HERSHEY_TRIPLEX while True: inDepth = depthQueue.get() # Blocking call, will wait until a new data has arrived depthFrame = inDepth.getFrame() # depthFrame values are in millimeters depth_downscaled = depthFrame[::4] min_depth = np.percentile(depth_downscaled[depth_downscaled != 0], 1) max_depth = np.percentile(depth_downscaled, 99) depthFrameColor = np.interp(depthFrame, (min_depth, max_depth), (0, 255)).astype(np.uint8) depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_HOT) spatialData = spatialCalcQueue.get().getSpatialLocations() for depthData in spatialData: roi = depthData.config.roi roi = roi.denormalize(width=depthFrameColor.shape[1], height=depthFrameColor.shape[0]) xmin = int(roi.topLeft().x) ymin = int(roi.topLeft().y) xmax = int(roi.bottomRight().x) ymax = int(roi.bottomRight().y) coords = depthData.spatialCoordinates distance = math.sqrt(coords.x ** 2 + coords.y ** 2 + coords.z ** 2) cv2.rectangle(depthFrameColor, (xmin, ymin), (xmax, ymax), color, thickness=2) cv2.putText(depthFrameColor, "{:.1f}m".format(distance/1000), (xmin + 10, ymin + 20), fontType, 0.6, color) # Show the frame cv2.imshow("depth", depthFrameColor) if cv2.waitKey(1) == ord('q'): break |
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 | #include <iostream> #include "utility.hpp" // Includes common necessary includes for development using depthai library #include "depthai/depthai.hpp" static constexpr float stepSize = 0.05f; static std::atomic<bool> newConfig{false}; int main() { using namespace std; // Create pipeline dai::Pipeline pipeline; // Define sources and outputs auto monoLeft = pipeline.create<dai::node::MonoCamera>(); auto monoRight = pipeline.create<dai::node::MonoCamera>(); auto stereo = pipeline.create<dai::node::StereoDepth>(); auto spatialLocationCalculator = pipeline.create<dai::node::SpatialLocationCalculator>(); auto xoutDepth = pipeline.create<dai::node::XLinkOut>(); auto xoutSpatialData = pipeline.create<dai::node::XLinkOut>(); auto xinSpatialCalcConfig = pipeline.create<dai::node::XLinkIn>(); xoutDepth->setStreamName("depth"); xoutSpatialData->setStreamName("spatialData"); xinSpatialCalcConfig->setStreamName("spatialCalcConfig"); // Properties monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P); monoLeft->setCamera("left"); monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P); monoRight->setCamera("right"); stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY); stereo->setLeftRightCheck(true); stereo->setExtendedDisparity(true); spatialLocationCalculator->inputConfig.setWaitForMessage(false); // Create 10 ROIs for(int i = 0; i < 10; i++) { dai::SpatialLocationCalculatorConfigData config; config.depthThresholds.lowerThreshold = 200; config.depthThresholds.upperThreshold = 10000; config.roi = dai::Rect(dai::Point2f(i * 0.1, 0.45), dai::Point2f((i + 1) * 0.1, 0.55)); spatialLocationCalculator->initialConfig.addROI(config); } // Linking monoLeft->out.link(stereo->left); monoRight->out.link(stereo->right); spatialLocationCalculator->passthroughDepth.link(xoutDepth->input); stereo->depth.link(spatialLocationCalculator->inputDepth); spatialLocationCalculator->out.link(xoutSpatialData->input); xinSpatialCalcConfig->out.link(spatialLocationCalculator->inputConfig); // Connect to device and start pipeline dai::Device device(pipeline); device.setIrLaserDotProjectorBrightness(1000); // Output queue will be used to get the depth frames from the outputs defined above auto depthQueue = device.getOutputQueue("depth", 4, false); auto spatialCalcQueue = device.getOutputQueue("spatialData", 4, false); auto color = cv::Scalar(0, 200, 40); auto fontType = cv::FONT_HERSHEY_TRIPLEX; while(true) { auto inDepth = depthQueue->get<dai::ImgFrame>(); cv::Mat depthFrame = inDepth->getFrame(); // depthFrame values are in millimeters cv::Mat depthFrameColor; cv::normalize(depthFrame, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1); cv::equalizeHist(depthFrameColor, depthFrameColor); cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_HOT); auto spatialData = spatialCalcQueue->get<dai::SpatialLocationCalculatorData>()->getSpatialLocations(); for(auto depthData : spatialData) { auto roi = depthData.config.roi; roi = roi.denormalize(depthFrameColor.cols, depthFrameColor.rows); auto xmin = static_cast<int>(roi.topLeft().x); auto ymin = static_cast<int>(roi.topLeft().y); auto xmax = static_cast<int>(roi.bottomRight().x); auto ymax = static_cast<int>(roi.bottomRight().y); auto coords = depthData.spatialCoordinates; auto distance = std::sqrt(coords.x * coords.x + coords.y * coords.y + coords.z * coords.z); cv::rectangle(depthFrameColor, cv::Rect(cv::Point(xmin, ymin), cv::Point(xmax, ymax)), color); std::stringstream depthDistance; depthDistance.precision(2); depthDistance << fixed << static_cast<float>(distance / 1000.0f) << "m"; cv::putText(depthFrameColor, depthDistance.str(), cv::Point(xmin + 10, ymin + 20), fontType, 0.5, color); } // Show the frame cv::imshow("depth", depthFrameColor); if(cv::waitKey(1) == 'q') { break; } } return 0; } |