Edge detector

This example performs edge detection on 3 different inputs: left, right and RGB camera. HW accelerated sobel filter 3x3 is used. Sobel filter parameters can be changed by keys 1 and 2.

Demo

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script

git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py

For additional information, please follow installation guide

Source code

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
#!/usr/bin/env python3

import cv2
import depthai as dai
import numpy as np

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
camRgb = pipeline.createColorCamera()
monoLeft = pipeline.createMonoCamera()
monoRight = pipeline.createMonoCamera()

edgeDetectorLeft = pipeline.createEdgeDetector()
edgeDetectorRight = pipeline.createEdgeDetector()
edgeDetectorRgb = pipeline.createEdgeDetector()

xoutEdgeLeft = pipeline.createXLinkOut()
xoutEdgeRight = pipeline.createXLinkOut()
xoutEdgeRgb = pipeline.createXLinkOut()
xinEdgeCfg = pipeline.createXLinkIn()

edgeLeftStr = "edge left"
edgeRightStr = "edge right"
edgeRgbStr = "edge rgb"
edgeCfgStr = "edge cfg"

xoutEdgeLeft.setStreamName(edgeLeftStr)
xoutEdgeRight.setStreamName(edgeRightStr)
xoutEdgeRgb.setStreamName(edgeRgbStr)
xinEdgeCfg.setStreamName(edgeCfgStr)

# Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.RGB)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)

monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)

edgeDetectorRgb.setMaxOutputFrameSize(camRgb.getVideoWidth() * camRgb.getVideoHeight())

# Linking
monoLeft.out.link(edgeDetectorLeft.inputImage)
monoRight.out.link(edgeDetectorRight.inputImage)
camRgb.video.link(edgeDetectorRgb.inputImage)

edgeDetectorLeft.outputImage.link(xoutEdgeLeft.input)
edgeDetectorRight.outputImage.link(xoutEdgeRight.input)
edgeDetectorRgb.outputImage.link(xoutEdgeRgb.input)

xinEdgeCfg.out.link(edgeDetectorLeft.inputConfig)
xinEdgeCfg.out.link(edgeDetectorRight.inputConfig)
xinEdgeCfg.out.link(edgeDetectorRgb.inputConfig)

# Connect to device and start pipeline
with dai.Device(pipeline) as device:

    # Output/input queues
    edgeLeftQueue = device.getOutputQueue(edgeLeftStr, 8, False)
    edgeRightQueue = device.getOutputQueue(edgeRightStr, 8, False)
    edgeRgbQueue = device.getOutputQueue(edgeRgbStr, 8, False)
    edgeCfgQueue = device.getInputQueue(edgeCfgStr)

    print("Switch between sobel filter kernels using keys '1' and '2'")

    while(True):
        edgeLeft = edgeLeftQueue.get()
        edgeRight = edgeRightQueue.get()
        edgeRgb = edgeRgbQueue.get()

        edgeLeftFrame = edgeLeft.getFrame()
        edgeRightFrame = edgeRight.getFrame()
        edgeRgbFrame = edgeRgb.getFrame()

        # Show the frame
        cv2.imshow(edgeLeftStr, edgeLeftFrame)
        cv2.imshow(edgeRightStr, edgeRightFrame)
        cv2.imshow(edgeRgbStr, edgeRgbFrame)

        key = cv2.waitKey(1)
        if key == ord('q'):
            break

        if key == ord('1'):
            print("Switching sobel filter kernel.")
            cfg = dai.EdgeDetectorConfig()
            sobelHorizontalKernel = [[1, 0, -1], [2, 0, -2], [1, 0, -1]]
            sobelVerticalKernel = [[1, 2, 1], [0, 0, 0], [-1, -2, -1]]
            cfg.setSobelFilterKernels(sobelHorizontalKernel, sobelVerticalKernel)
            edgeCfgQueue.send(cfg)

        if key == ord('2'):
            print("Switching sobel filter kernel.")
            cfg = dai.EdgeDetectorConfig()
            sobelHorizontalKernel = [[3, 0, -3], [10, 0, -10], [3, 0, -3]]
            sobelVerticalKernel = [[3, 10, 3], [0, 0, 0], [-3, -10, -3]]
            cfg.setSobelFilterKernels(sobelHorizontalKernel, sobelVerticalKernel)
            edgeCfgQueue.send(cfg)

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
#include <iostream>

// Inludes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"

int main() {
    using namespace std;

    // Create pipeline
    dai::Pipeline pipeline;

    // Define sources and outputs
    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
    auto monoRight = pipeline.create<dai::node::MonoCamera>();

    auto edgeDetectorLeft = pipeline.create<dai::node::EdgeDetector>();
    auto edgeDetectorRight = pipeline.create<dai::node::EdgeDetector>();
    auto edgeDetectorRgb = pipeline.create<dai::node::EdgeDetector>();

    auto xoutEdgeLeft = pipeline.create<dai::node::XLinkOut>();
    auto xoutEdgeRight = pipeline.create<dai::node::XLinkOut>();
    auto xoutEdgeRgb = pipeline.create<dai::node::XLinkOut>();
    auto xinEdgeCfg = pipeline.create<dai::node::XLinkIn>();

    const auto edgeLeftStr = "edge left";
    const auto edgeRightStr = "edge right";
    const auto edgeRgbStr = "edge rgb";
    const auto edgeCfgStr = "edge cfg";

    xoutEdgeLeft->setStreamName(edgeLeftStr);
    xoutEdgeRight->setStreamName(edgeRightStr);
    xoutEdgeRgb->setStreamName(edgeRgbStr);
    xinEdgeCfg->setStreamName(edgeCfgStr);

    // Properties
    camRgb->setBoardSocket(dai::CameraBoardSocket::RGB);
    camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);

    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
    monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
    monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

    edgeDetectorRgb->setMaxOutputFrameSize(camRgb->getVideoWidth() * camRgb->getVideoHeight());

    // Linking
    monoLeft->out.link(edgeDetectorLeft->inputImage);
    monoRight->out.link(edgeDetectorRight->inputImage);
    camRgb->video.link(edgeDetectorRgb->inputImage);

    edgeDetectorLeft->outputImage.link(xoutEdgeLeft->input);
    edgeDetectorRight->outputImage.link(xoutEdgeRight->input);
    edgeDetectorRgb->outputImage.link(xoutEdgeRgb->input);

    xinEdgeCfg->out.link(edgeDetectorLeft->inputConfig);
    xinEdgeCfg->out.link(edgeDetectorRight->inputConfig);
    xinEdgeCfg->out.link(edgeDetectorRgb->inputConfig);

    // Connect to device and start pipeline
    dai::Device device(pipeline);

    // Output/input queues
    auto edgeLeftQueue = device.getOutputQueue(edgeLeftStr, 8, false);
    auto edgeRightQueue = device.getOutputQueue(edgeRightStr, 8, false);
    auto edgeRgbQueue = device.getOutputQueue(edgeRgbStr, 8, false);
    auto edgeCfgQueue = device.getInputQueue(edgeCfgStr);

    std::cout << "Switch between sobel filter kernels using keys '1' and '2'" << std::endl;

    while(true) {
        auto edgeLeft = edgeLeftQueue->get<dai::ImgFrame>();
        auto edgeRight = edgeRightQueue->get<dai::ImgFrame>();
        auto edgeRgb = edgeRgbQueue->get<dai::ImgFrame>();

        cv::Mat edgeLeftFrame = edgeLeft->getFrame();
        cv::Mat edgeRightFrame = edgeRight->getFrame();
        cv::Mat edgeRgbFrame = edgeRgb->getFrame();

        // Show the frame
        cv::imshow(edgeLeftStr, edgeLeftFrame);
        cv::imshow(edgeRightStr, edgeRightFrame);
        cv::imshow(edgeRgbStr, edgeRgbFrame);

        int key = cv::waitKey(1);
        switch(key) {
            case 'q':
                return 0;
                break;

            case '1': {
                std::cout << "Switching sobel filter kernel." << std::endl;
                dai::EdgeDetectorConfig cfg;
                std::vector<std::vector<int>> sobelHorizontalKernel = {{1, 0, -1}, {2, 0, -2}, {1, 0, -1}};
                std::vector<std::vector<int>> sobelVerticalKernel = {{1, 2, 1}, {0, 0, 0}, {-1, -2, -1}};
                cfg.setSobelFilterKernels(sobelHorizontalKernel, sobelVerticalKernel);
                edgeCfgQueue->send(cfg);
            } break;

            case '2': {
                std::cout << "Switching sobel filter kernel." << std::endl;
                dai::EdgeDetectorConfig cfg;
                std::vector<std::vector<int>> sobelHorizontalKernel = {{3, 0, -3}, {10, 0, -10}, {3, 0, -3}};
                std::vector<std::vector<int>> sobelVerticalKernel = {{3, 10, 3}, {0, 0, 0}, {-3, -10, -3}};
                cfg.setSobelFilterKernels(sobelHorizontalKernel, sobelVerticalKernel);
                edgeCfgQueue->send(cfg);
            } break;

            default:
                break;
        }
    }
    return 0;
}

Got questions?

We’re always happy to help with code or other questions you might have.