Stereo Depth Video

This example is an upgraded Depth Preview. It has higher resolution (720p), each frame can be shown (mono left-right, rectified left-right, disparity and depth). There are 6 modes which you can select inside the code:

  1. withDepth: if you turn it off it will became Mono Preview, so it will show only the 2 mono cameras

  2. outputDepth: if you turn it on it will show the depth

  3. lrcheck: used for better occlusion handling. For more information click here

  4. extended: suitable for short range objects. For more information click here

  5. subpixel: suitable for long range. For more information click here

Similiar samples:

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script

git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py

For additional information, please follow installation guide

Source code

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python3

import cv2
import depthai as dai
import numpy as np

withDepth = True

outputDepth = False
outputRectified = True
lrcheck = True
extended = False
subpixel = False

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
monoLeft = pipeline.createMonoCamera()
monoRight = pipeline.createMonoCamera()
if withDepth:
    stereo = pipeline.createStereoDepth()

xoutLeft = pipeline.createXLinkOut()
xoutRight = pipeline.createXLinkOut()
xoutDisp = pipeline.createXLinkOut()
xoutDepth = pipeline.createXLinkOut()
xoutRectifL = pipeline.createXLinkOut()
xoutRectifR = pipeline.createXLinkOut()

# XLinkOut
xoutLeft.setStreamName("left")
xoutRight.setStreamName("right")
if withDepth:
    xoutDisp.setStreamName("disparity")
    xoutDepth.setStreamName("depth")
    xoutRectifL.setStreamName("rectified_left")
    xoutRectifR.setStreamName("rectified_right")

# Properties
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)

if withDepth:
    # StereoDepth
    stereo.initialConfig.setConfidenceThreshold(230)
    stereo.setRectifyEdgeFillColor(0)  # black, to better see the cutout
    # stereo.setInputResolution(1280, 720)
    stereo.initialConfig.setMedianFilter(dai.MedianFilter.KERNEL_5x5)
    stereo.setLeftRightCheck(lrcheck)
    stereo.setExtendedDisparity(extended)
    stereo.setSubpixel(subpixel)

    # Linking
    monoLeft.out.link(stereo.left)
    monoRight.out.link(stereo.right)

    stereo.syncedLeft.link(xoutLeft.input)
    stereo.syncedRight.link(xoutRight.input)
    stereo.disparity.link(xoutDisp.input)

    if outputRectified:
        stereo.rectifiedLeft.link(xoutRectifL.input)
        stereo.rectifiedRight.link(xoutRectifR.input)

    if outputDepth:
        stereo.depth.link(xoutDepth.input)

else:
    # Link plugins CAM . XLINK
    monoLeft.out.link(xoutLeft.input)
    monoRight.out.link(xoutRight.input)

# Connect to device and start pipeline
with dai.Device(pipeline) as device:

    leftQueue = device.getOutputQueue(name="left", maxSize=8, blocking=False)
    rightQueue = device.getOutputQueue(name="right", maxSize=8, blocking=False)
    if (withDepth):
        dispQueue = device.getOutputQueue(name="disparity", maxSize=8, blocking=False)
        depthQueue = device.getOutputQueue(name="depth", maxSize=8, blocking=False)
        rectifLeftQueue = device.getOutputQueue(name="rectified_left", maxSize=8, blocking=False)
        rectifRightQueue = device.getOutputQueue(name="rectified_right", maxSize=8, blocking=False)

    # Disparity range is used for normalization
    disparityMultiplier = 255 / stereo.getMaxDisparity()

    while True:
        left = leftQueue.get()
        cv2.imshow("left", left.getFrame())
        right = rightQueue.get()
        cv2.imshow("right", right.getFrame())

        if withDepth:
            disparity = dispQueue.get()
            disp = disparity.getCvFrame()
            disp = (disp*disparityMultiplier).astype(np.uint8) # Extended disparity range
            cv2.imshow("disparity", disp)
            disp = cv2.applyColorMap(disp, cv2.COLORMAP_JET)
            cv2.imshow("disparity_color", disp)

            if outputDepth:
                depth = depthQueue.get()
                cv2.imshow("depth", depth.getCvFrame().astype(np.uint16))

            if outputRectified:
                rectifL = rectifLeftQueue.get()
                cv2.imshow("rectified_left", rectifL.getFrame())

                rectifR = rectifRightQueue.get()
                cv2.imshow("rectified_right", rectifR.getFrame())

        if cv2.waitKey(1) == ord('q'):
            break

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
#include <iostream>

// Inludes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"

static std::atomic<bool> withDepth{true};

static std::atomic<bool> outputDepth{false};
static std::atomic<bool> outputRectified{true};
static std::atomic<bool> lrcheck{true};
static std::atomic<bool> extended{false};
static std::atomic<bool> subpixel{false};

int main() {
    using namespace std;

    // Create pipeline
    dai::Pipeline pipeline;

    // Define sources and outputs
    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
    auto monoRight = pipeline.create<dai::node::MonoCamera>();
    auto stereo = withDepth ? pipeline.create<dai::node::StereoDepth>() : nullptr;

    auto xoutLeft = pipeline.create<dai::node::XLinkOut>();
    auto xoutRight = pipeline.create<dai::node::XLinkOut>();
    auto xoutDisp = pipeline.create<dai::node::XLinkOut>();
    auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
    auto xoutRectifL = pipeline.create<dai::node::XLinkOut>();
    auto xoutRectifR = pipeline.create<dai::node::XLinkOut>();

    // XLinkOut
    xoutLeft->setStreamName("left");
    xoutRight->setStreamName("right");
    if(withDepth) {
        xoutDisp->setStreamName("disparity");
        xoutDepth->setStreamName("depth");
        xoutRectifL->setStreamName("rectified_left");
        xoutRectifR->setStreamName("rectified_right");
    }

    // Properties
    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
    monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
    monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

    if(withDepth) {
        // StereoDepth
        stereo->initialConfig.setConfidenceThreshold(200);
        stereo->setRectifyEdgeFillColor(0);  // black, to better see the cutout
        // stereo->loadCalibrationFile("../../../../depthai/resources/depthai.calib");
        // stereo->setInputResolution(1280, 720);
        // TODO: median filtering is disabled on device with (lrcheck || extended || subpixel)
        stereo->initialConfig.setMedianFilter(dai::MedianFilter::MEDIAN_OFF);
        stereo->setLeftRightCheck(lrcheck);
        stereo->setExtendedDisparity(extended);
        stereo->setSubpixel(subpixel);

        // Linking
        monoLeft->out.link(stereo->left);
        monoRight->out.link(stereo->right);

        stereo->syncedLeft.link(xoutLeft->input);
        stereo->syncedRight.link(xoutRight->input);
        stereo->disparity.link(xoutDisp->input);

        if(outputRectified) {
            stereo->rectifiedLeft.link(xoutRectifL->input);
            stereo->rectifiedRight.link(xoutRectifR->input);
        }

        if(outputDepth) {
            stereo->depth.link(xoutDepth->input);
        }

    } else {
        // Link plugins CAM -> XLINK
        monoLeft->out.link(xoutLeft->input);
        monoRight->out.link(xoutRight->input);
    }

    // Connect to device and start pipeline
    dai::Device device(pipeline);

    auto leftQueue = device.getOutputQueue("left", 8, false);
    auto rightQueue = device.getOutputQueue("right", 8, false);
    auto dispQueue = withDepth ? device.getOutputQueue("disparity", 8, false) : nullptr;
    auto depthQueue = withDepth ? device.getOutputQueue("depth", 8, false) : nullptr;
    auto rectifLeftQueue = withDepth ? device.getOutputQueue("rectified_left", 8, false) : nullptr;
    auto rectifRightQueue = withDepth ? device.getOutputQueue("rectified_right", 8, false) : nullptr;

    // Disparity range is used for normalization
    float disparityMultiplier = withDepth ? 255 / stereo->getMaxDisparity() : 0;

    while(true) {
        auto left = leftQueue->get<dai::ImgFrame>();
        cv::imshow("left", left->getFrame());
        auto right = rightQueue->get<dai::ImgFrame>();
        cv::imshow("right", right->getFrame());

        if(withDepth) {
            // Note: in some configurations (if depth is enabled), disparity may output garbage data
            auto disparity = dispQueue->get<dai::ImgFrame>();
            cv::Mat disp(disparity->getCvFrame());
            disp.convertTo(disp, CV_8UC1, disparityMultiplier);  // Extend disparity range
            cv::imshow("disparity", disp);
            cv::Mat disp_color;
            cv::applyColorMap(disp, disp_color, cv::COLORMAP_JET);
            cv::imshow("disparity_color", disp_color);

            if(outputDepth) {
                auto depth = depthQueue->get<dai::ImgFrame>();
                cv::imshow("depth", cv::Mat(depth->getHeight(), depth->getWidth(), CV_16UC1, depth->getData().data()));
            }

            if(outputRectified) {
                auto rectifL = rectifLeftQueue->get<dai::ImgFrame>();
                // cv::flip(rectifiedLeftFrame, rectifiedLeftFrame, 1);
                cv::imshow("rectified_left", rectifL->getFrame());

                auto rectifR = rectifRightQueue->get<dai::ImgFrame>();
                // cv::flip(rectifiedRightFrame, rectifiedRightFrame, 1);
                cv::imshow("rectified_right", rectifR->getFrame());
            }
        }

        int key = cv::waitKey(1);
        if(key == 'q' || key == 'Q') {
            return 0;
        }
    }
    return 0;
}

Got questions?

We’re always happy to help with code or other questions you might have.