DepthAI
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

Custom Visualizations

Supported on:RVC2RVC4
The example creates a DepthAI pipeline in Python to perform YOLOv6-Nano object detection, generates custom image annotations with bounding boxes and text, and streams the results along with 640x480 NV12 camera frames over to a remote connection for visualization.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import depthai as dai
4
5class ImgDetectionsExtended(dai.ImgDetections):
6    def __init__(self, detections: dai.ImgDetections):
7        dai.ImgDetections.__init__(self)
8        self.detections = detections.detections
9
10    # The function can return dai.ImgAnnotations or dai.ImgFrame
11    def getVisualizationMessage(self):
12        detections = self.detections
13        imgAnnt = dai.ImgAnnotations()
14        # Setting the timestamp is important, as the visualizer uses it to synchronize the data
15        imgAnnt.setTimestamp(self.getTimestamp())
16        annotation = dai.ImgAnnotation()
17        for detection in detections:
18            pointsAnnotation = dai.PointsAnnotation()
19            pointsAnnotation.type = dai.PointsAnnotationType.LINE_STRIP
20            pointsAnnotation.points = dai.VectorPoint2f([
21                dai.Point2f(detection.xmin, detection.ymin),
22                dai.Point2f(detection.xmax, detection.ymin),
23                dai.Point2f(detection.xmax, detection.ymax),
24                dai.Point2f(detection.xmin, detection.ymax),
25            ])
26            outlineColor = dai.Color(1.0, 0.5, 0.5, 1.0)
27            pointsAnnotation.outlineColor = outlineColor
28            fillColor = dai.Color(0.5, 1.0, 0.5, 0.5)
29            pointsAnnotation.fillColor = fillColor
30            pointsAnnotation.thickness = 2.0
31            text = dai.TextAnnotation()
32            text.position = dai.Point2f(detection.xmin, detection.ymin)
33            text.text = f"Test annotation"
34            text.fontSize = 50.5
35            textColor = dai.Color(0.5, 0.5, 1.0, 1.0)
36            text.textColor = textColor
37            backgroundColor = dai.Color(1.0, 1.0, 0.5, 1.0)
38            text.backgroundColor = backgroundColor
39            annotation.points.append(pointsAnnotation)
40            annotation.texts.append(text)
41
42        imgAnnt.annotations.append(annotation)
43        return imgAnnt
44
45class ImgAnnotationsGenerator(dai.node.ThreadedHostNode):
46    def __init__(self):
47        super().__init__()
48        self.inputDet = self.createInput()
49        self.output = self.createOutput()
50
51    def run(self):
52        while self.isRunning():
53            nnData = self.inputDet.get()
54            extended = ImgDetectionsExtended(nnData)
55            # Setting the timestamp is important, as the visualizer uses it to synchronize the data
56            extended.setTimestamp(nnData.getTimestamp())
57            self.output.send(extended)
58
59remoteConnector = dai.RemoteConnection()
60
61# Create pipeline
62with dai.Pipeline() as pipeline:
63    cameraNode = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
64    detectionNetwork = pipeline.create(dai.node.DetectionNetwork).build(
65        cameraNode, dai.NNModelDescription("yolov6-nano")
66    )
67    imageAnnotationsGenerator = pipeline.create(ImgAnnotationsGenerator)
68    outputToVisualize = cameraNode.requestOutput((640,480), type=dai.ImgFrame.Type.NV12)
69    detectionNetwork.out.link(imageAnnotationsGenerator.inputDet)
70
71    # Add the remote connector topics
72    remoteConnector.addTopic("encoded", outputToVisualize, "images")
73    remoteConnector.addTopic("detections", detectionNetwork.out, "images")
74    remoteConnector.addTopic("annotations", imageAnnotationsGenerator.output, "images")
75
76    pipeline.start()
77
78    # Register the pipeline with the remote connector
79    remoteConnector.registerPipeline(pipeline)
80
81    while pipeline.isRunning():
82        if remoteConnector.waitKey(1) == ord("q"):
83            pipeline.stop()
84            break

C++

1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <memory>
5#include <vector>
6
7#include "depthai/depthai.hpp"
8#include "depthai/pipeline/datatype/ImgAnnotations.hpp"
9#include "depthai/remote_connection/RemoteConnection.hpp"
10// Global flag for graceful shutdown
11std::atomic<bool> quitEvent(false);
12
13// Signal handler
14void signalHandler(int signum) {
15    quitEvent = true;
16}
17
18// Extended detections class
19class ImgDetectionsExtended : public dai::ImgDetections {
20   public:
21    ImgDetectionsExtended(const std::shared_ptr<dai::ImgDetections>& detections) {
22        this->detections = detections->detections;
23        this->setTimestamp(detections->getTimestamp());
24    }
25
26    std::shared_ptr<dai::Buffer> getVisualizationMessage() {
27        auto imgAnnt = std::make_shared<dai::ImgAnnotations>();
28        imgAnnt->setTimestamp(this->getTimestamp());
29
30        auto annotation = std::make_shared<dai::ImgAnnotation>();
31
32        for(const auto& detection : this->detections) {
33            // Create points annotation for bounding box
34            auto pointsAnnotation = std::make_shared<dai::PointsAnnotation>();
35            pointsAnnotation->type = dai::PointsAnnotationType::LINE_STRIP;
36            pointsAnnotation->points = {dai::Point2f(detection.xmin, detection.ymin),
37                                        dai::Point2f(detection.xmax, detection.ymin),
38                                        dai::Point2f(detection.xmax, detection.ymax),
39                                        dai::Point2f(detection.xmin, detection.ymax)};
40
41            // Set colors and thickness
42            pointsAnnotation->outlineColor = dai::Color(1.0f, 0.5f, 0.5f, 1.0f);
43            pointsAnnotation->fillColor = dai::Color(0.5f, 1.0f, 0.5f, 0.5f);
44            pointsAnnotation->thickness = 2.0f;
45
46            // Create text annotation
47            auto text = std::make_shared<dai::TextAnnotation>();
48            text->position = dai::Point2f(detection.xmin, detection.ymin);
49            text->text = "Test annotation";
50            text->fontSize = 50.5f;
51            text->textColor = dai::Color(0.5f, 0.5f, 1.0f, 1.0f);
52            text->backgroundColor = dai::Color(1.0f, 1.0f, 0.5f, 1.0f);
53
54            annotation->points.push_back(*pointsAnnotation);
55            annotation->texts.push_back(*text);
56        }
57
58        imgAnnt->annotations.push_back(*annotation);
59        return imgAnnt;
60    }
61};
62
63// Custom host node for image annotations
64class ImgAnnotationsGenerator : public dai::NodeCRTP<dai::node::HostNode, ImgAnnotationsGenerator> {
65   public:
66    Input& inputDet = inputs["detections"];
67    Output& output = out;
68
69    std::shared_ptr<ImgAnnotationsGenerator> build(Output& detections) {
70        detections.link(inputDet);
71        return std::static_pointer_cast<ImgAnnotationsGenerator>(this->shared_from_this());
72    }
73
74    std::shared_ptr<dai::Buffer> processGroup(std::shared_ptr<dai::MessageGroup> in) override {
75        auto nnData = in->get<dai::ImgDetections>("detections");
76        auto extended = std::make_shared<ImgDetectionsExtended>(nnData);
77        return extended->getVisualizationMessage();
78    }
79};
80
81int main() {
82    // Set up signal handlers
83    signal(SIGTERM, signalHandler);
84    signal(SIGINT, signalHandler);
85
86    try {
87        // Create remote connection
88        dai::RemoteConnection remoteConnector;
89
90        // Create pipeline
91        dai::Pipeline pipeline;
92
93        // Create nodes
94        auto cameraNode = pipeline.create<dai::node::Camera>();
95        cameraNode->build(dai::CameraBoardSocket::CAM_A);
96
97        auto detectionNetwork = pipeline.create<dai::node::DetectionNetwork>();
98        dai::NNModelDescription modelDesc;
99        modelDesc.model = "yolov6-nano";
100        detectionNetwork->build(cameraNode, modelDesc);
101
102        auto imageAnnotationsGenerator = pipeline.create<ImgAnnotationsGenerator>();
103        auto outputToVisualize = cameraNode->requestOutput(std::make_pair(640, 480), dai::ImgFrame::Type::NV12);
104
105        // Linking
106        detectionNetwork->out.link(imageAnnotationsGenerator->inputDet);
107
108        // Add remote connector topics
109        remoteConnector.addTopic("encoded", *outputToVisualize, "images");
110        remoteConnector.addTopic("detections", detectionNetwork->out, "images");
111        remoteConnector.addTopic("annotations", imageAnnotationsGenerator->output, "images");
112
113        // Start pipeline
114        pipeline.start();
115
116        // Register pipeline with remote connector
117        remoteConnector.registerPipeline(pipeline);
118
119        // Main loop
120        while(pipeline.isRunning() && !quitEvent) {
121            if(remoteConnector.waitKey(1) == 'q') {
122                pipeline.stop();
123                break;
124            }
125        }
126
127        // Cleanup
128        pipeline.stop();
129        pipeline.wait();
130
131    } catch(const std::exception& e) {
132        std::cerr << "Error: " << e.what() << std::endl;
133        return 1;
134    }
135
136    return 0;
137}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.