DepthAI
Software Stack

ON THIS PAGE

  • Demo
  • Pipeline
  • Source code

Camera ROI-based exposure and focus

Supported on:RVC2RVC4
This example lets user select a region of interest (ROI) on the frame (on the host computer), and after selection is complete, it will set the auto-exposure (AE) and auto-focus (AF) of the camera to the selected ROI.Similar application could be to set AF/AE based on object detection results, so the camera would automatically refocus and adjust exposure when a specific object is detected.

Demo

This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5
6# Create pipeline
7with dai.Pipeline() as pipeline:
8    # Define source and output
9    cam = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
10    cam_input_q = cam.inputControl.createInputQueue()
11    stream_q = cam.requestOutput((1920, 1080)).createOutputQueue()
12
13    cam_q_in = cam.inputControl.createInputQueue()
14
15    # Connect to device and start pipeline
16    pipeline.start()
17
18    # ROI selection variables
19    start_points = []
20    roi_rect = None
21    scale_factors = None
22    # Mouse callback function for ROI selection
23    def select_roi(event, x, y, flags, param):
24        global start_points, roi_rect
25        def set_roi_rect():
26            global roi_rect
27            x1, y1 = start_points
28            x2, y2 = (x, y)
29            roi_rect = (min(x1, x2), min(y1, y2), abs(x2-x1), abs(y2-y1))
30
31        if event == cv2.EVENT_LBUTTONDOWN:
32            roi_rect = None
33            start_points = (x, y)
34        elif event == cv2.EVENT_MOUSEMOVE and start_points:
35            set_roi_rect()
36        elif event == cv2.EVENT_LBUTTONUP and start_points:
37            set_roi_rect()
38            roi_rect_scaled = (
39                int(roi_rect[0] * scale_factors[0]),
40                int(roi_rect[1] * scale_factors[1]),
41                int(roi_rect[2] * scale_factors[0]),
42                int(roi_rect[3] * scale_factors[1])
43            )
44            print(f"ROI selected: {roi_rect}")
45            ctrl = dai.CameraControl()
46            print(f"Scaled ROI selected: {roi_rect_scaled}. Setting exposure and focus to this region.")
47            ctrl.setAutoExposureRegion(*roi_rect_scaled)
48            ctrl.setAutoFocusRegion(*roi_rect_scaled)
49            cam_q_in.send(ctrl)
50            start_points = None
51
52    # Create a window and set the mouse callback
53    cv2.namedWindow("video")
54    cv2.setMouseCallback("video", select_roi)
55
56    while pipeline.isRunning():
57        img_hd: dai.ImgFrame = stream_q.get()
58        if scale_factors is None:
59            print(img_hd.getTransformation().getSourceSize(), img_hd.getTransformation().getSize())
60            scale_factors = (img_hd.getTransformation().getSourceSize()[0] / img_hd.getTransformation().getSize()[0],
61                            img_hd.getTransformation().getSourceSize()[1] / img_hd.getTransformation().getSize()[1])
62        frame = img_hd.getCvFrame()
63
64        # Draw the ROI rectangle if it exists
65        if roi_rect is not None:
66            x, y, w, h = roi_rect
67            cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
68
69        cv2.imshow("video", frame)
70
71        key = cv2.waitKey(1)
72        if key == ord("q"):
73            break

C++

1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <memory>
5#include <opencv2/opencv.hpp>
6#include <tuple>
7
8#include "depthai/depthai.hpp"
9
10std::atomic<bool> quitEvent(false);
11
12void signalHandler(int) {
13    quitEvent = true;
14}
15
16// Global variables for ROI selection
17std::vector<cv::Point> startPoints;
18cv::Rect roiRect;
19std::pair<float, float> scaleFactors;
20
21// Mouse callback function for ROI selection
22void selectRoi(int event, int x, int y, int flags, void* userdata) {
23    auto* camQIn = static_cast<std::shared_ptr<dai::InputQueue>*>(userdata);
24
25    if(event == cv::EVENT_LBUTTONDOWN) {
26        roiRect = cv::Rect();
27        startPoints.clear();
28        startPoints.push_back(cv::Point(x, y));
29    } else if(event == cv::EVENT_MOUSEMOVE && !startPoints.empty()) {
30        cv::Point start = startPoints[0];
31        roiRect = cv::Rect(std::min(start.x, x), std::min(start.y, y), std::abs(x - start.x), std::abs(y - start.y));
32    } else if(event == cv::EVENT_LBUTTONUP && !startPoints.empty()) {
33        cv::Point start = startPoints[0];
34        roiRect = cv::Rect(std::min(start.x, x), std::min(start.y, y), std::abs(x - start.x), std::abs(y - start.y));
35
36        // Scale ROI to original resolution
37        cv::Rect roiRectScaled(static_cast<int>(roiRect.x * scaleFactors.first),
38                               static_cast<int>(roiRect.y * scaleFactors.second),
39                               static_cast<int>(roiRect.width * scaleFactors.first),
40                               static_cast<int>(roiRect.height * scaleFactors.second));
41
42        std::cout << "ROI selected: " << roiRect << std::endl;
43        std::cout << "Scaled ROI selected: " << roiRectScaled << ". Setting exposure and focus to this region." << std::endl;
44
45        // Create and send control message
46        auto ctrl = std::make_shared<dai::CameraControl>();
47        ctrl->setAutoExposureRegion(roiRectScaled.x, roiRectScaled.y, roiRectScaled.width, roiRectScaled.height);
48        ctrl->setAutoFocusRegion(roiRectScaled.x, roiRectScaled.y, roiRectScaled.width, roiRectScaled.height);
49        (*camQIn)->send(ctrl);
50
51        startPoints.clear();
52    }
53}
54
55int main() {
56    signal(SIGTERM, signalHandler);
57    signal(SIGINT, signalHandler);
58
59    // Create device
60    std::shared_ptr<dai::Device> device = std::make_shared<dai::Device>();
61
62    // Create pipeline
63    dai::Pipeline pipeline(device);
64
65    // Create nodes
66    auto cam = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
67    auto camQIn = cam->inputControl.createInputQueue();
68    auto streamQ = cam->requestOutput(std::make_pair(1920, 1080))->createOutputQueue();
69
70    // Start pipeline
71    pipeline.start();
72
73    // Create window and set mouse callback
74    cv::namedWindow("video");
75    cv::setMouseCallback("video", selectRoi, &camQIn);
76
77    while(pipeline.isRunning() && !quitEvent) {
78        auto imgHd = streamQ->get<dai::ImgFrame>();
79        if(imgHd == nullptr) continue;
80
81        // Calculate scale factors if not set
82        if(scaleFactors.first == 0.0f) {
83            auto sourceSize = imgHd->transformation.getSourceSize();
84            auto targetSize = imgHd->transformation.getSize();
85            scaleFactors = std::make_pair(static_cast<float>(sourceSize.first) / targetSize.first, static_cast<float>(sourceSize.second) / targetSize.second);
86            std::cout << "Source size: " << sourceSize.first << "x" << sourceSize.second << std::endl;
87            std::cout << "Target size: " << targetSize.first << "x" << targetSize.second << std::endl;
88        }
89
90        cv::Mat frame = imgHd->getCvFrame();
91
92        // Draw ROI rectangle if it exists
93        if(roiRect.width > 0 && roiRect.height > 0) {
94            cv::rectangle(frame, roiRect, cv::Scalar(0, 255, 0), 2);
95        }
96
97        cv::imshow("video", frame);
98
99        if(cv::waitKey(1) == 'q') {
100            break;
101        }
102    }
103
104    pipeline.stop();
105    pipeline.wait();
106
107    return 0;
108}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.