DepthAI
  • DepthAI Components
    • AprilTags
    • Benchmark
    • Camera
    • Calibration
    • DetectionNetwork
    • Events
    • FeatureTracker
    • Gate
    • HostNodes
    • ImageAlign
    • ImageManip
    • IMU
    • Misc
    • Model Zoo
    • NeuralDepth
    • NeuralNetwork
    • ObjectTracker
    • RecordReplay
    • RGBD
    • Script
    • SpatialDetectionNetwork
    • SpatialLocationCalculator
    • StereoDepth
    • Sync
    • VideoEncoder
    • Visualizer
    • Warp
    • RVC2-specific
  • Advanced Tutorials
  • API Reference
  • Tools
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

Basalt VIO RTab

Supported on:RVC2
The example sets up a pipeline integrating stereo cameras, IMU, and visual odometry with RTAB-Map SLAM to generate and visualize 3D occupancy grids, point clouds, and rectified images in real-time using a RerunNode for visualization, with precise stereo depth and IMU configurations at 60 FPS.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1import time
2import depthai as dai
3from rerun_node import RerunNode
4
5# Create pipeline
6
7with dai.Pipeline() as p:
8    fps = 60
9    width = 640
10    height = 400
11    # Define sources and outputs
12    left = p.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B, sensorFps=fps)
13    right = p.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C, sensorFps=fps)
14    imu = p.create(dai.node.IMU)
15    odom = p.create(dai.node.BasaltVIO)
16    slam = p.create(dai.node.RTABMapSLAM)
17    stereo = p.create(dai.node.StereoDepth)
18    params = {"RGBD/CreateOccupancyGrid": "true",
19            "Grid/3D": "true",
20            "Rtabmap/SaveWMState": "true"}
21    slam.setParams(params)
22
23    rerunViewer = p.create(RerunNode)
24    imu.enableIMUSensor([dai.IMUSensor.ACCELEROMETER_RAW, dai.IMUSensor.GYROSCOPE_RAW], 200)
25    imu.setBatchReportThreshold(1)
26    imu.setMaxBatchReports(10)
27
28    stereo.setExtendedDisparity(False)
29    stereo.setLeftRightCheck(True)
30    stereo.setSubpixel(True)
31    stereo.setRectifyEdgeFillColor(0)
32    stereo.enableDistortionCorrection(True)
33    stereo.initialConfig.setLeftRightCheckThreshold(10)
34    stereo.setDepthAlign(dai.CameraBoardSocket.CAM_B)
35
36
37    left.requestOutput((width, height)).link(stereo.left)
38    right.requestOutput((width, height)).link(stereo.right)
39    stereo.syncedLeft.link(odom.left)
40    stereo.syncedRight.link(odom.right)
41    stereo.depth.link(slam.depth)
42    stereo.rectifiedLeft.link(slam.rect)
43    imu.out.link(odom.imu)
44
45    odom.transform.link(slam.odom)
46    slam.transform.link(rerunViewer.inputTrans)
47    slam.passthroughRect.link(rerunViewer.inputImg)
48    slam.occupancyGridMap.link(rerunViewer.inputGrid)
49    slam.obstaclePCL.link(rerunViewer.inputObstaclePCL)
50    slam.groundPCL.link(rerunViewer.inputGroundPCL)
51    p.start()
52    while p.isRunning():
53        time.sleep(1)

C++

1#include "depthai/depthai.hpp"
2#include "rerun_node.hpp"
3
4int main() {
5    using namespace std;
6    // ULogger::setType(ULogger::kTypeConsole);
7    // ULogger::setLevel(ULogger::kDebug);
8    // Create pipeline
9    dai::Pipeline pipeline;
10    int fps = 60;
11    int width = 640;
12    int height = 400;
13    // Define sources and outputs
14    auto left = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_B, std::nullopt, fps);
15    auto right = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_C, std::nullopt, fps);
16    auto stereo = pipeline.create<dai::node::StereoDepth>();
17    auto imu = pipeline.create<dai::node::IMU>();
18    auto odom = pipeline.create<dai::node::BasaltVIO>();
19    auto slam = pipeline.create<dai::node::RTABMapSLAM>();
20    std::map<std::string, std::string> params{};
21    params.insert(std::make_pair<std::string, std::string>("RGBD/CreateOccupancyGrid", "true"));
22    params.insert(std::make_pair<std::string, std::string>("Grid/3D", "true"));
23    params.insert(std::make_pair<std::string, std::string>("Rtabmap/SaveWMState", "true"));
24    slam->setParams(params);
25    auto rerun = pipeline.create<RerunNode>();
26
27    imu->enableIMUSensor({dai::IMUSensor::ACCELEROMETER_RAW, dai::IMUSensor::GYROSCOPE_RAW}, 200);
28    odom->setImuUpdateRate(200);
29    imu->setBatchReportThreshold(1);
30    imu->setMaxBatchReports(10);
31    stereo->setExtendedDisparity(false);
32    stereo->setSubpixel(true);
33    stereo->setLeftRightCheck(true);
34    stereo->setRectifyEdgeFillColor(0);  // black, to better see the cutout
35    stereo->enableDistortionCorrection(true);
36    stereo->initialConfig->setLeftRightCheckThreshold(10);
37    stereo->setDepthAlign(dai::StereoDepthProperties::DepthAlign::RECTIFIED_LEFT);
38
39    // Linking
40    left->requestOutput(std::make_pair(width, height))->link(stereo->left);
41    right->requestOutput(std::make_pair(width, height))->link(stereo->right);
42    stereo->syncedLeft.link(odom->left);
43    stereo->syncedRight.link(odom->right);
44    stereo->depth.link(slam->depth);
45    stereo->rectifiedLeft.link(slam->rect);
46    imu->out.link(odom->imu);
47
48    odom->transform.link(slam->odom);
49    slam->transform.link(rerun->inputTrans);
50    slam->passthroughRect.link(rerun->inputImg);
51    slam->occupancyGridMap.link(rerun->inputMap);
52    slam->obstaclePCL.link(rerun->inputObstaclePCL);
53    slam->groundPCL.link(rerun->inputGroundPCL);
54    pipeline.start();
55    pipeline.wait();
56}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.