ON THIS PAGE

  • Demo
  • Source code
  • Pipeline

Mono Preview

This example shows how to set up a pipeline that outputs the left and right grayscale camera images, connects over XLink to transfer these to the host real-time, and displays both using OpenCV.

Similar samples:

Demo

This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5
6# Create pipeline
7pipeline = dai.Pipeline()
8
9# Define sources and outputs
10monoLeft = pipeline.create(dai.node.MonoCamera)
11monoRight = pipeline.create(dai.node.MonoCamera)
12xoutLeft = pipeline.create(dai.node.XLinkOut)
13xoutRight = pipeline.create(dai.node.XLinkOut)
14
15xoutLeft.setStreamName('left')
16xoutRight.setStreamName('right')
17
18# Properties
19monoLeft.setCamera("left")
20monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
21monoRight.setCamera("right")
22monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
23
24# Linking
25monoRight.out.link(xoutRight.input)
26monoLeft.out.link(xoutLeft.input)
27
28# Connect to device and start pipeline
29with dai.Device(pipeline) as device:
30
31    # Output queues will be used to get the grayscale frames from the outputs defined above
32    qLeft = device.getOutputQueue(name="left", maxSize=4, blocking=False)
33    qRight = device.getOutputQueue(name="right", maxSize=4, blocking=False)
34
35    while True:
36        # Instead of get (blocking), we use tryGet (non-blocking) which will return the available data or None otherwise
37        inLeft = qLeft.tryGet()
38        inRight = qRight.tryGet()
39
40        if inLeft is not None:
41            cv2.imshow("left", inLeft.getCvFrame())
42
43        if inRight is not None:
44            cv2.imshow("right", inRight.getCvFrame())
45
46        if cv2.waitKey(1) == ord('q'):
47            break

C++

1#include <iostream>
2
3// Includes common necessary includes for development using depthai library
4#include "depthai/depthai.hpp"
5
6int main() {
7    // Create pipeline
8    dai::Pipeline pipeline;
9
10    // Define sources and outputs
11    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
12    auto monoRight = pipeline.create<dai::node::MonoCamera>();
13    auto xoutLeft = pipeline.create<dai::node::XLinkOut>();
14    auto xoutRight = pipeline.create<dai::node::XLinkOut>();
15
16    xoutLeft->setStreamName("left");
17    xoutRight->setStreamName("right");
18
19    // Properties
20    monoLeft->setCamera("left");
21    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
22    monoRight->setCamera("right");
23    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
24
25    // Linking
26    monoRight->out.link(xoutRight->input);
27    monoLeft->out.link(xoutLeft->input);
28
29    // Connect to device and start pipeline
30    dai::Device device(pipeline);
31
32    // Output queues will be used to get the grayscale frames from the outputs defined above
33    auto qLeft = device.getOutputQueue("left", 4, false);
34    auto qRight = device.getOutputQueue("right", 4, false);
35
36    while(true) {
37        // Instead of get (blocking), we use tryGet (non-blocking) which will return the available data or None otherwise
38        auto inLeft = qLeft->tryGet<dai::ImgFrame>();
39        auto inRight = qRight->tryGet<dai::ImgFrame>();
40
41        if(inLeft) {
42            cv::imshow("left", inLeft->getCvFrame());
43        }
44
45        if(inRight) {
46            cv::imshow("right", inRight->getCvFrame());
47        }
48
49        int key = cv::waitKey(1);
50        if(key == 'q' || key == 'Q') {
51            return 0;
52        }
53    }
54    return 0;
55}

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.