DepthAI v2 has been superseded by DepthAI v3. You are viewing legacy documentation.
DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • Similar samples:
  • Demo
  • Setup
  • Source code
  • Pipeline

Mono Camera Control

This example shows how to control the device-side crop and camera triggers. TWo output is a displayed mono cropped frame, that can be manipulated using the following keys:
  • w will move the crop up
  • a will move the crop left
  • s will move the crop down
  • d will move the crop right
  • e will trigger autoexposure
  • i and o will decrease/increase the exposure time
  • k and l will decrease/increase the sensitivity iso

Similar samples:

Demo

Setup

This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3"""
4This example shows usage of mono camera in crop mode with the possibility to move the crop.
5Uses 'WASD' controls to move the crop window, 'T' to trigger autofocus, 'IOKL,.' for manual exposure/focus:
6  Control:      key[dec/inc]  min..max
7  exposure time:     I   O      1..33000 [us]
8  sensitivity iso:   K   L    100..1600
9To go back to auto controls:
10  'E' - autoexposure
11"""
12
13import cv2
14import depthai as dai
15
16# Step size ('W','A','S','D' controls)
17stepSize = 0.02
18# Manual exposure/focus set step
19expStep = 500  # us
20isoStep = 50
21
22def clamp(num, v0, v1):
23    return max(v0, min(num, v1))
24
25sendCamConfig = False
26
27# Create pipeline
28pipeline = dai.Pipeline()
29
30# Define sources and outputs
31monoRight = pipeline.create(dai.node.MonoCamera)
32monoLeft = pipeline.create(dai.node.MonoCamera)
33manipRight = pipeline.create(dai.node.ImageManip)
34manipLeft = pipeline.create(dai.node.ImageManip)
35
36controlIn = pipeline.create(dai.node.XLinkIn)
37configIn = pipeline.create(dai.node.XLinkIn)
38manipOutRight = pipeline.create(dai.node.XLinkOut)
39manipOutLeft = pipeline.create(dai.node.XLinkOut)
40
41controlIn.setStreamName('control')
42configIn.setStreamName('config')
43manipOutRight.setStreamName("right")
44manipOutLeft.setStreamName("left")
45
46# Crop range
47topLeft = dai.Point2f(0.2, 0.2)
48bottomRight = dai.Point2f(0.8, 0.8)
49
50# Properties
51monoRight.setCamera("right")
52monoLeft.setCamera("left")
53monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
54monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
55manipRight.initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
56manipLeft.initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
57manipRight.setMaxOutputFrameSize(monoRight.getResolutionHeight()*monoRight.getResolutionWidth()*3)
58
59# Linking
60monoRight.out.link(manipRight.inputImage)
61monoLeft.out.link(manipLeft.inputImage)
62controlIn.out.link(monoRight.inputControl)
63controlIn.out.link(monoLeft.inputControl)
64configIn.out.link(manipRight.inputConfig)
65configIn.out.link(manipLeft.inputConfig)
66manipRight.out.link(manipOutRight.input)
67manipLeft.out.link(manipOutLeft.input)
68
69# Connect to device and start pipeline
70with dai.Device(pipeline) as device:
71
72    # Output queues will be used to get the grayscale frames
73    qRight = device.getOutputQueue(manipOutRight.getStreamName(), maxSize=4, blocking=False)
74    qLeft = device.getOutputQueue(manipOutLeft.getStreamName(), maxSize=4, blocking=False)
75    configQueue = device.getInputQueue(configIn.getStreamName())
76    controlQueue = device.getInputQueue(controlIn.getStreamName())
77
78    # Defaults and limits for manual focus/exposure controls
79    expTime = 20000
80    expMin = 1
81    expMax = 33000
82
83    sensIso = 800
84    sensMin = 100
85    sensMax = 1600
86
87    while True:
88        inRight = qRight.get()
89        inLeft = qLeft.get()
90        cv2.imshow("right", inRight.getCvFrame())
91        cv2.imshow("left", inLeft.getCvFrame())
92
93        # Update screen (1ms pooling rate)
94        key = cv2.waitKey(1)
95        if key == ord('q'):
96            break
97        elif key == ord('e'):
98            print("Autoexposure enable")
99            ctrl = dai.CameraControl()
100            ctrl.setAutoExposureEnable()
101            controlQueue.send(ctrl)
102        elif key in [ord('i'), ord('o'), ord('k'), ord('l')]:
103            if key == ord('i'): expTime -= expStep
104            if key == ord('o'): expTime += expStep
105            if key == ord('k'): sensIso -= isoStep
106            if key == ord('l'): sensIso += isoStep
107            expTime = clamp(expTime, expMin, expMax)
108            sensIso = clamp(sensIso, sensMin, sensMax)
109            print("Setting manual exposure, time:", expTime, "iso:", sensIso)
110            ctrl = dai.CameraControl()
111            ctrl.setManualExposure(expTime, sensIso)
112            controlQueue.send(ctrl)
113        elif key == ord('w'):
114            if topLeft.y - stepSize >= 0:
115                topLeft.y -= stepSize
116                bottomRight.y -= stepSize
117                sendCamConfig = True
118        elif key == ord('a'):
119            if topLeft.x - stepSize >= 0:
120                topLeft.x -= stepSize
121                bottomRight.x -= stepSize
122                sendCamConfig = True
123        elif key == ord('s'):
124            if bottomRight.y + stepSize <= 1:
125                topLeft.y += stepSize
126                bottomRight.y += stepSize
127                sendCamConfig = True
128        elif key == ord('d'):
129            if bottomRight.x + stepSize <= 1:
130                topLeft.x += stepSize
131                bottomRight.x += stepSize
132                sendCamConfig = True
133
134        # Send new config to camera
135        if sendCamConfig:
136            cfg = dai.ImageManipConfig()
137            cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
138            configQueue.send(cfg)
139            sendCamConfig = False

C++

1/**
2 * This example shows usage of mono camera in crop mode with the possibility to move the crop.
3 * Uses 'WASD' controls to move the crop window, 'T' to trigger autofocus, 'IOKL,.' for manual exposure/focus:
4 *   Control:      key[dec/inc]  min..max
5 *   exposure time:     I   O      1..33000 [us]
6 *   sensitivity iso:   K   L    100..1600
7 * To go back to auto controls:
8 *   'E' - autoexposure
9 */
10#include <iostream>
11
12// Includes common necessary includes for development using depthai library
13#include "depthai/depthai.hpp"
14
15// Step size ('W','A','S','D' controls)
16static constexpr float stepSize = 0.02f;
17
18// Manual exposure/focus set step
19static constexpr int EXP_STEP = 500;  // us
20static constexpr int ISO_STEP = 50;
21
22static int clamp(int num, int v0, int v1) {
23    return std::max(v0, std::min(num, v1));
24}
25
26static std::atomic<bool> sendCamConfig{false};
27
28int main() {
29    // Create pipeline
30    dai::Pipeline pipeline;
31
32    // Define sources and outputs
33    auto monoRight = pipeline.create<dai::node::MonoCamera>();
34    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
35    auto manipRight = pipeline.create<dai::node::ImageManip>();
36    auto manipLeft = pipeline.create<dai::node::ImageManip>();
37
38    auto controlIn = pipeline.create<dai::node::XLinkIn>();
39    auto configIn = pipeline.create<dai::node::XLinkIn>();
40    auto manipOutRight = pipeline.create<dai::node::XLinkOut>();
41    auto manipOutLeft = pipeline.create<dai::node::XLinkOut>();
42
43    controlIn->setStreamName("control");
44    configIn->setStreamName("config");
45    manipOutRight->setStreamName("right");
46    manipOutLeft->setStreamName("left");
47
48    // Crop range
49    dai::Point2f topLeft(0.2f, 0.2f);
50    dai::Point2f bottomRight(0.8f, 0.8f);
51
52    // Properties
53    monoRight->setCamera("right");
54    monoLeft->setCamera("left");
55    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
56    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
57    manipRight->initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
58    manipLeft->initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
59
60    // Linking
61    monoRight->out.link(manipRight->inputImage);
62    monoLeft->out.link(manipLeft->inputImage);
63    controlIn->out.link(monoRight->inputControl);
64    controlIn->out.link(monoLeft->inputControl);
65    configIn->out.link(manipRight->inputConfig);
66    configIn->out.link(manipLeft->inputConfig);
67    manipRight->out.link(manipOutRight->input);
68    manipLeft->out.link(manipOutLeft->input);
69
70    // Connect to device and start pipeline
71    dai::Device device(pipeline);
72
73    // Output queues will be used to get the grayscale frames
74    auto qRight = device.getOutputQueue(manipOutRight->getStreamName(), 4, false);
75    auto qLeft = device.getOutputQueue(manipOutLeft->getStreamName(), 4, false);
76    auto controlQueue = device.getInputQueue(controlIn->getStreamName());
77    auto configQueue = device.getInputQueue(configIn->getStreamName());
78
79    // Defaults and limits for manual focus/exposure controls
80    int exp_time = 20000;
81    int exp_min = 1;
82    int exp_max = 33000;
83
84    int sens_iso = 800;
85    int sens_min = 100;
86    int sens_max = 1600;
87
88    while(true) {
89        auto inRight = qRight->get<dai::ImgFrame>();
90        auto inLeft = qLeft->get<dai::ImgFrame>();
91        cv::imshow("right", inRight->getCvFrame());
92        cv::imshow("left", inLeft->getCvFrame());
93
94        // Update screen (1ms pooling rate)
95        int key = cv::waitKey(1);
96        if(key == 'q') {
97            break;
98        } else if(key == 'e') {
99            printf("Autoexposure enable\n");
100            dai::CameraControl ctrl;
101            ctrl.setAutoExposureEnable();
102            controlQueue->send(ctrl);
103        } else if(key == 'i' || key == 'o' || key == 'k' || key == 'l') {
104            if(key == 'i') exp_time -= EXP_STEP;
105            if(key == 'o') exp_time += EXP_STEP;
106            if(key == 'k') sens_iso -= ISO_STEP;
107            if(key == 'l') sens_iso += ISO_STEP;
108            exp_time = clamp(exp_time, exp_min, exp_max);
109            sens_iso = clamp(sens_iso, sens_min, sens_max);
110            printf("Setting manual exposure, time: %d, iso: %d\n", exp_time, sens_iso);
111            dai::CameraControl ctrl;
112            ctrl.setManualExposure(exp_time, sens_iso);
113            controlQueue->send(ctrl);
114        } else if(key == 'w') {
115            if(topLeft.y - stepSize >= 0) {
116                topLeft.y -= stepSize;
117                bottomRight.y -= stepSize;
118                sendCamConfig = true;
119            }
120        } else if(key == 'a') {
121            if(topLeft.x - stepSize >= 0) {
122                topLeft.x -= stepSize;
123                bottomRight.x -= stepSize;
124                sendCamConfig = true;
125            }
126        } else if(key == 's') {
127            if(bottomRight.y + stepSize <= 1) {
128                topLeft.y += stepSize;
129                bottomRight.y += stepSize;
130                sendCamConfig = true;
131            }
132        } else if(key == 'd') {
133            if(bottomRight.x + stepSize <= 1) {
134                topLeft.x += stepSize;
135                bottomRight.x += stepSize;
136                sendCamConfig = true;
137            }
138        }
139
140        // Send new config to camera
141        if(sendCamConfig) {
142            dai::ImageManipConfig cfg;
143            cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
144            configQueue->send(cfg);
145            sendCamConfig = false;
146        }
147    }
148    return 0;
149}

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.