DepthAI
  • DepthAI Components
    • AprilTags
    • Benchmark
    • Camera
    • Calibration
    • DetectionNetwork
    • Events
    • FeatureTracker
    • Gate
    • HostNodes
    • ImageAlign
    • ImageManip
    • IMU
    • Misc
    • Model Zoo
    • NeuralDepth
    • NeuralNetwork
    • ObjectTracker
    • RecordReplay
    • RGBD
    • Script
    • SpatialDetectionNetwork
    • SpatialLocationCalculator
    • StereoDepth
    • Sync
    • VideoEncoder
    • Visualizer
    • Warp
    • RVC2-specific
  • Advanced Tutorials
  • API Reference
  • Tools
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

RTab Map VIO

Supported on:RVC2
This example sets up a pipeline to process 640x400 stereo camera frames and 100 Hz IMU data, using Shi-Thomasi feature tracking and RTAB-Map VIO for visual odometry, with transformations and rectified images visualized in real-time via a RerunNode.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1import time
2import depthai as dai
3from rerun_node import RerunNode
4
5# Create pipeline
6
7with dai.Pipeline() as p:
8    fps = 30
9    width = 640
10    height = 400
11    # Define sources and outputs
12    left = p.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B, sensorFps=fps)
13    right = p.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C, sensorFps=fps)
14    imu = p.create(dai.node.IMU)
15    stereo = p.create(dai.node.StereoDepth)
16    featureTracker = p.create(dai.node.FeatureTracker)
17    odom = p.create(dai.node.RTABMapVIO)
18
19    rerunViewer = p.create(RerunNode)
20    imu.enableIMUSensor([dai.IMUSensor.ACCELEROMETER_RAW, dai.IMUSensor.GYROSCOPE_RAW], 200)
21    imu.setBatchReportThreshold(1)
22    imu.setMaxBatchReports(10)
23
24    featureTracker.setHardwareResources(1,2)
25    featureTracker.initialConfig.setCornerDetector(dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS)
26    featureTracker.initialConfig.setNumTargetFeatures(1000)
27    featureTracker.initialConfig.setMotionEstimator(False)
28    featureTracker.initialConfig.FeatureMaintainer.minimumDistanceBetweenFeatures = 49
29
30    stereo.setExtendedDisparity(False)
31    stereo.setLeftRightCheck(True)
32    stereo.setRectifyEdgeFillColor(0)
33    stereo.enableDistortionCorrection(True)
34    stereo.initialConfig.setLeftRightCheckThreshold(10)
35    stereo.setDepthAlign(dai.CameraBoardSocket.CAM_B)
36
37
38    # Linking
39
40    left.requestOutput((width, height)).link(stereo.left)
41    right.requestOutput((width, height)).link(stereo.right)
42    stereo.rectifiedLeft.link(featureTracker.inputImage)
43    featureTracker.passthroughInputImage.link(odom.rect)
44    stereo.depth.link(odom.depth)
45    featureTracker.outputFeatures.link(odom.features)
46    imu.out.link(odom.imu)
47    odom.passthroughRect.link(rerunViewer.inputImg)
48    odom.transform.link(rerunViewer.inputTrans)
49    p.start()
50    while p.isRunning():
51        time.sleep(1)

C++

1#include "depthai/depthai.hpp"
2#include "rerun_node.hpp"
3
4int main() {
5    using namespace std;
6    // ULogger::setType(ULogger::kTypeConsole);
7    // ULogger::setLevel(ULogger::kDebug);
8    // Create pipeline
9    dai::Pipeline pipeline;
10    int width = 640;
11    int height = 400;
12    // Define sources and outputs
13    auto left = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_B);
14    auto right = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_C);
15    auto stereo = pipeline.create<dai::node::StereoDepth>();
16    auto imu = pipeline.create<dai::node::IMU>();
17    auto featureTracker = pipeline.create<dai::node::FeatureTracker>();
18    auto odom = pipeline.create<dai::node::RTABMapVIO>();
19    auto rerun = pipeline.create<RerunNode>();
20    std::map<std::string, std::string> params{};
21    params.insert(std::make_pair<std::string, std::string>("Odom/ResetCountDown", "30"));
22
23    odom->setParams(params);
24    imu->enableIMUSensor({dai::IMUSensor::ACCELEROMETER_RAW, dai::IMUSensor::GYROSCOPE_RAW}, 100);
25    imu->setBatchReportThreshold(1);
26    imu->setMaxBatchReports(10);
27
28    featureTracker->setHardwareResources(1, 2);
29
30    featureTracker->initialConfig->setCornerDetector(dai::FeatureTrackerConfig::CornerDetector::Type::SHI_THOMASI);
31    featureTracker->initialConfig->setNumTargetFeatures(1000);
32    featureTracker->initialConfig->setMotionEstimator(false);
33    featureTracker->initialConfig->featureMaintainer.minimumDistanceBetweenFeatures = 49.0;
34
35    stereo->setExtendedDisparity(false);
36
37    stereo->setLeftRightCheck(true);
38    stereo->setRectifyEdgeFillColor(0);  // black, to better see the cutout
39    stereo->enableDistortionCorrection(true);
40    stereo->initialConfig->setLeftRightCheckThreshold(10);
41    stereo->setDepthAlign(dai::StereoDepthProperties::DepthAlign::RECTIFIED_LEFT);
42
43    // Linking
44    left->requestOutput(std::make_pair(width, height))->link(stereo->left);
45    right->requestOutput(std::make_pair(width, height))->link(stereo->right);
46    stereo->rectifiedLeft.link(featureTracker->inputImage);
47    featureTracker->passthroughInputImage.link(odom->rect);
48    stereo->depth.link(odom->depth);
49    imu->out.link(odom->imu);
50    featureTracker->outputFeatures.link(odom->features);
51    odom->transform.link(rerun->inputTrans);
52    odom->passthroughRect.link(rerun->inputImg);
53    pipeline.start();
54    pipeline.wait();
55}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.