DepthAI v2 has been superseded by DepthAI v3. You are viewing legacy documentation.
DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • Demo
  • Setup
  • Source code
  • Pipeline

Feature Detector

Example shows capabilities of FeatureTracker. It only detects features. Feature Tracker example also tracks these features.

Demo

software/depthai/examples/feature_detector.webp

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
Command Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
For additional information, please follow the installation guide.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5
6
7# Create pipeline
8pipeline = dai.Pipeline()
9
10# Define sources and outputs
11monoLeft = pipeline.create(dai.node.MonoCamera)
12monoRight = pipeline.create(dai.node.MonoCamera)
13featureTrackerLeft = pipeline.create(dai.node.FeatureTracker)
14featureTrackerRight = pipeline.create(dai.node.FeatureTracker)
15
16xoutPassthroughFrameLeft = pipeline.create(dai.node.XLinkOut)
17xoutTrackedFeaturesLeft = pipeline.create(dai.node.XLinkOut)
18xoutPassthroughFrameRight = pipeline.create(dai.node.XLinkOut)
19xoutTrackedFeaturesRight = pipeline.create(dai.node.XLinkOut)
20xinTrackedFeaturesConfig = pipeline.create(dai.node.XLinkIn)
21
22xoutPassthroughFrameLeft.setStreamName("passthroughFrameLeft")
23xoutTrackedFeaturesLeft.setStreamName("trackedFeaturesLeft")
24xoutPassthroughFrameRight.setStreamName("passthroughFrameRight")
25xoutTrackedFeaturesRight.setStreamName("trackedFeaturesRight")
26xinTrackedFeaturesConfig.setStreamName("trackedFeaturesConfig")
27
28# Properties
29monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
30monoLeft.setCamera("left")
31monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
32monoRight.setCamera("right")
33
34# Disable optical flow
35featureTrackerLeft.initialConfig.setMotionEstimator(False)
36featureTrackerRight.initialConfig.setMotionEstimator(False)
37
38# Linking
39monoLeft.out.link(featureTrackerLeft.inputImage)
40featureTrackerLeft.passthroughInputImage.link(xoutPassthroughFrameLeft.input)
41featureTrackerLeft.outputFeatures.link(xoutTrackedFeaturesLeft.input)
42xinTrackedFeaturesConfig.out.link(featureTrackerLeft.inputConfig)
43
44monoRight.out.link(featureTrackerRight.inputImage)
45featureTrackerRight.passthroughInputImage.link(xoutPassthroughFrameRight.input)
46featureTrackerRight.outputFeatures.link(xoutTrackedFeaturesRight.input)
47xinTrackedFeaturesConfig.out.link(featureTrackerRight.inputConfig)
48
49featureTrackerConfig = featureTrackerRight.initialConfig.get()
50
51print("Press 's' to switch between Harris and Shi-Thomasi corner detector!")
52
53# Connect to device and start pipeline
54with dai.Device(pipeline) as device:
55
56    # Output queues used to receive the results
57    passthroughImageLeftQueue = device.getOutputQueue("passthroughFrameLeft", 8, False)
58    outputFeaturesLeftQueue = device.getOutputQueue("trackedFeaturesLeft", 8, False)
59    passthroughImageRightQueue = device.getOutputQueue("passthroughFrameRight", 8, False)
60    outputFeaturesRightQueue = device.getOutputQueue("trackedFeaturesRight", 8, False)
61
62    inputFeatureTrackerConfigQueue = device.getInputQueue("trackedFeaturesConfig")
63
64    leftWindowName = "left"
65    rightWindowName = "right"
66
67    def drawFeatures(frame, features):
68        pointColor = (0, 0, 255)
69        circleRadius = 2
70        for feature in features:
71            cv2.circle(frame, (int(feature.position.x), int(feature.position.y)), circleRadius, pointColor, -1, cv2.LINE_AA, 0)
72
73    while True:
74        inPassthroughFrameLeft = passthroughImageLeftQueue.get()
75        passthroughFrameLeft = inPassthroughFrameLeft.getFrame()
76        leftFrame = cv2.cvtColor(passthroughFrameLeft, cv2.COLOR_GRAY2BGR)
77
78        inPassthroughFrameRight = passthroughImageRightQueue.get()
79        passthroughFrameRight = inPassthroughFrameRight.getFrame()
80        rightFrame = cv2.cvtColor(passthroughFrameRight, cv2.COLOR_GRAY2BGR)
81
82        trackedFeaturesLeft = outputFeaturesLeftQueue.get().trackedFeatures
83        drawFeatures(leftFrame, trackedFeaturesLeft)
84
85        trackedFeaturesRight = outputFeaturesRightQueue.get().trackedFeatures
86        drawFeatures(rightFrame, trackedFeaturesRight)
87
88        # Show the frame
89        cv2.imshow(leftWindowName, leftFrame)
90        cv2.imshow(rightWindowName, rightFrame)
91
92        key = cv2.waitKey(1)
93        if key == ord('q'):
94            break
95        elif key == ord('s'):
96            if featureTrackerConfig.cornerDetector.type == dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS:
97                featureTrackerConfig.cornerDetector.type = dai.FeatureTrackerConfig.CornerDetector.Type.SHI_THOMASI
98                print("Switching to Shi-Thomasi")
99            else:
100                featureTrackerConfig.cornerDetector.type = dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS
101                print("Switching to Harris")
102
103            cfg = dai.FeatureTrackerConfig()
104            cfg.set(featureTrackerConfig)
105            inputFeatureTrackerConfigQueue.send(cfg)

C++

1#include <iostream>
2
3// Includes common necessary includes for development using depthai library
4#include "depthai/depthai.hpp"
5#include "deque"
6#include "unordered_map"
7#include "unordered_set"
8
9static void drawFeatures(cv::Mat& frame, std::vector<dai::TrackedFeature>& features) {
10    static const auto pointColor = cv::Scalar(0, 0, 255);
11    static const int circleRadius = 2;
12    for(auto& feature : features) {
13        cv::circle(frame, cv::Point(feature.position.x, feature.position.y), circleRadius, pointColor, -1, cv::LINE_AA, 0);
14    }
15}
16
17int main() {
18    using namespace std;
19
20    // Create pipeline
21    dai::Pipeline pipeline;
22
23    // Define sources and outputs
24    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
25    auto monoRight = pipeline.create<dai::node::MonoCamera>();
26    auto featureTrackerLeft = pipeline.create<dai::node::FeatureTracker>();
27    auto featureTrackerRight = pipeline.create<dai::node::FeatureTracker>();
28
29    auto xoutPassthroughFrameLeft = pipeline.create<dai::node::XLinkOut>();
30    auto xoutTrackedFeaturesLeft = pipeline.create<dai::node::XLinkOut>();
31    auto xoutPassthroughFrameRight = pipeline.create<dai::node::XLinkOut>();
32    auto xoutTrackedFeaturesRight = pipeline.create<dai::node::XLinkOut>();
33    auto xinTrackedFeaturesConfig = pipeline.create<dai::node::XLinkIn>();
34
35    xoutPassthroughFrameLeft->setStreamName("passthroughFrameLeft");
36    xoutTrackedFeaturesLeft->setStreamName("trackedFeaturesLeft");
37    xoutPassthroughFrameRight->setStreamName("passthroughFrameRight");
38    xoutTrackedFeaturesRight->setStreamName("trackedFeaturesRight");
39    xinTrackedFeaturesConfig->setStreamName("trackedFeaturesConfig");
40
41    // Properties
42    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
43    monoLeft->setCamera("left");
44    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
45    monoRight->setCamera("right");
46
47    // Disable optical flow
48    featureTrackerLeft->initialConfig.setMotionEstimator(false);
49    featureTrackerRight->initialConfig.setMotionEstimator(false);
50
51    // Linking
52    monoLeft->out.link(featureTrackerLeft->inputImage);
53    featureTrackerLeft->passthroughInputImage.link(xoutPassthroughFrameLeft->input);
54    featureTrackerLeft->outputFeatures.link(xoutTrackedFeaturesLeft->input);
55    xinTrackedFeaturesConfig->out.link(featureTrackerLeft->inputConfig);
56
57    monoRight->out.link(featureTrackerRight->inputImage);
58    featureTrackerRight->passthroughInputImage.link(xoutPassthroughFrameRight->input);
59    featureTrackerRight->outputFeatures.link(xoutTrackedFeaturesRight->input);
60    xinTrackedFeaturesConfig->out.link(featureTrackerRight->inputConfig);
61
62    auto featureTrackerConfig = featureTrackerRight->initialConfig.get();
63
64    printf("Press 's' to switch between Harris and Shi-Thomasi corner detector! \n");
65
66    // Connect to device and start pipeline
67    dai::Device device(pipeline);
68
69    // Output queues used to receive the results
70    auto passthroughImageLeftQueue = device.getOutputQueue("passthroughFrameLeft", 8, false);
71    auto outputFeaturesLeftQueue = device.getOutputQueue("trackedFeaturesLeft", 8, false);
72    auto passthroughImageRightQueue = device.getOutputQueue("passthroughFrameRight", 8, false);
73    auto outputFeaturesRightQueue = device.getOutputQueue("trackedFeaturesRight", 8, false);
74
75    auto inputFeatureTrackerConfigQueue = device.getInputQueue("trackedFeaturesConfig");
76
77    const auto leftWindowName = "left";
78    const auto rightWindowName = "right";
79
80    while(true) {
81        auto inPassthroughFrameLeft = passthroughImageLeftQueue->get<dai::ImgFrame>();
82        cv::Mat passthroughFrameLeft = inPassthroughFrameLeft->getFrame();
83        cv::Mat leftFrame;
84        cv::cvtColor(passthroughFrameLeft, leftFrame, cv::COLOR_GRAY2BGR);
85
86        auto inPassthroughFrameRight = passthroughImageRightQueue->get<dai::ImgFrame>();
87        cv::Mat passthroughFrameRight = inPassthroughFrameRight->getFrame();
88        cv::Mat rightFrame;
89        cv::cvtColor(passthroughFrameRight, rightFrame, cv::COLOR_GRAY2BGR);
90
91        auto trackedFeaturesLeft = outputFeaturesLeftQueue->get<dai::TrackedFeatures>()->trackedFeatures;
92        drawFeatures(leftFrame, trackedFeaturesLeft);
93
94        auto trackedFeaturesRight = outputFeaturesRightQueue->get<dai::TrackedFeatures>()->trackedFeatures;
95        drawFeatures(rightFrame, trackedFeaturesRight);
96
97        // Show the frame
98        cv::imshow(leftWindowName, leftFrame);
99        cv::imshow(rightWindowName, rightFrame);
100
101        int key = cv::waitKey(1);
102        if(key == 'q') {
103            break;
104        } else if(key == 's') {
105            if(featureTrackerConfig.cornerDetector.type == dai::FeatureTrackerConfig::CornerDetector::Type::HARRIS) {
106                featureTrackerConfig.cornerDetector.type = dai::FeatureTrackerConfig::CornerDetector::Type::SHI_THOMASI;
107                printf("Switching to Shi-Thomasi \n");
108            } else {
109                featureTrackerConfig.cornerDetector.type = dai::FeatureTrackerConfig::CornerDetector::Type::HARRIS;
110                printf("Switching to Harris \n");
111            }
112            auto cfg = dai::FeatureTrackerConfig();
113            cfg.set(featureTrackerConfig);
114            inputFeatureTrackerConfigQueue->send(cfg);
115        }
116    }
117    return 0;
118}

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.