DepthAI
  • DepthAI Components
    • AprilTags
    • Benchmark
    • Camera
    • Calibration
    • DetectionNetwork
    • Events
    • FeatureTracker
    • Gate
    • HostNodes
    • ImageAlign
    • ImageManip
    • IMU
    • Misc
    • Model Zoo
    • NeuralDepth
    • NeuralNetwork
    • ObjectTracker
    • RecordReplay
    • RGBD
    • Script
    • SpatialDetectionNetwork
    • SpatialLocationCalculator
    • StereoDepth
    • Sync
    • VideoEncoder
    • Visualizer
    • Warp
    • RVC2-specific
  • Advanced Tutorials
  • API Reference
  • Tools
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

NN Archive Superblob

Supported on:RVC2
The example showcases loading a YOLOv6-Nano superblob model from the model zoo, configuring a neural network node with a 416x416 RGB camera input, and processing detection and passthrough outputs.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import time
4import depthai as dai
5
6# We will download a blob NNArchive from the model zoo
7# Pick your own model from
8modelDescription = dai.NNModelDescription()
9modelDescription.model = "yolov6-nano"
10modelDescription.platform = "RVC2"
11
12# Download model from zoo and load it
13archivePath = dai.getModelFromZoo(modelDescription, useCached=True)
14archive = dai.NNArchive(archivePath)
15
16# Archive knows it is a blob archive
17assert archive.getModelType() == dai.ModelType.SUPERBLOB
18
19# Therefore, getSuperBlob() is available
20assert archive.getSuperBlob() is not None
21
22# There is no blob or other model format available
23assert archive.getBlob() is None
24assert archive.getOtherModelFormat() is None
25
26# You can access any config version
27v1config: dai.nn_archive.v1.Config = archive.getConfig()
28
29# Print some config fields
30print("-" * 10)
31print("Config fields:")
32print(f"\tConfig version: {v1config.configVersion}")
33print(f"\tModel heads: {v1config.model.heads}")
34print(f"\tModel inputs: {v1config.model.inputs}")
35print(f"\tModel metadata: {v1config.model.metadata}")
36print(f"\tModel outputs: {v1config.model.outputs}")
37print("-" * 10)
38
39with dai.Pipeline() as pipeline:
40    # Color camera node
41    camRgb = pipeline.create(dai.node.Camera).build()
42    outCam = camRgb.requestOutput((416, 416), dai.ImgFrame.Type.BGR888p)
43
44    # Neural network node
45    blob = archive.getSuperBlob().getBlobWithNumShaves(6)
46    neuralNetwork = pipeline.create(dai.node.NeuralNetwork)
47    neuralNetwork.setBlob(blob)
48    neuralNetwork.setNumInferenceThreads(2)
49
50    # Linking
51    outCam.link(neuralNetwork.input)
52
53    nnDetectionQueue = neuralNetwork.out.createOutputQueue()
54    nnPassthroughQueue = neuralNetwork.passthrough.createOutputQueue()
55
56    pipeline.start()
57
58    while pipeline.isRunning():
59        in_nn = nnDetectionQueue.get()
60        in_nnPassthrough = nnPassthroughQueue.get()
61        print("Data received")
62        time.sleep(0.1)

C++

1#include <atomic>
2#include <chrono>
3#include <csignal>
4#include <iostream>
5#include <memory>
6#include <thread>
7
8#include "depthai/depthai.hpp"
9#include "depthai/openvino/OpenVINO.hpp"
10
11// Global flag for graceful shutdown
12std::atomic<bool> quitEvent(false);
13
14// Signal handler
15void signalHandler(int signum) {
16    quitEvent = true;
17}
18
19int main() {
20    // Set up signal handlers
21    signal(SIGTERM, signalHandler);
22    signal(SIGINT, signalHandler);
23
24    try {
25        // Get model from zoo
26        dai::NNModelDescription modelDesc;
27        modelDesc.model = "yolov6-nano";
28        modelDesc.platform = "RVC2";
29        auto archivePath = dai::getModelFromZoo(modelDesc, true);  // true to use cached if available, otherwise re-download
30
31        // Load NN archive
32        dai::NNArchive archive(archivePath);
33
34        // Verify archive type and properties
35        if(archive.getModelType() != dai::model::ModelType::SUPERBLOB) {
36            throw std::runtime_error("Archive is not a superblob type");
37        }
38
39        if(!archive.getSuperBlob()) {
40            throw std::runtime_error("SuperBlob should not be null for superblob type");
41        }
42
43        if(archive.getBlob()) {
44            throw std::runtime_error("Blob should be null for superblob type");
45        }
46
47        // Get config and print some fields
48        auto config = archive.getConfig<dai::nn_archive::v1::Config>();
49        std::cout << "----------" << std::endl;
50        std::cout << "Config fields:" << std::endl;
51        std::cout << "\tConfig version: " << config.configVersion.value() << std::endl;
52        std::cout << "\tModel heads: " << config.model.heads.value().size() << std::endl;
53        std::cout << "\tModel inputs: " << config.model.inputs.size() << std::endl;
54        std::cout << "\tModel outputs: " << config.model.outputs.size() << std::endl;
55        std::cout << "----------" << std::endl;
56
57        // Create pipeline
58        dai::Pipeline pipeline;
59
60        // Color camera node
61        auto camRgb = pipeline.create<dai::node::Camera>()->build();
62        auto camOut = camRgb->requestOutput(std::make_pair(416, 416), dai::ImgFrame::Type::BGR888p);
63
64        // Neural network node
65        auto neuralNetwork = pipeline.create<dai::node::NeuralNetwork>();
66        neuralNetwork->setBlob(archive.getSuperBlob()->getBlobWithNumShaves(6));
67        neuralNetwork->setNumInferenceThreads(2);
68
69        // Linking
70        camOut->link(neuralNetwork->input);
71
72        // Create output queues
73        auto qDet = neuralNetwork->out.createOutputQueue();
74        auto qPassthrough = neuralNetwork->passthrough.createOutputQueue();
75
76        // Start pipeline
77        pipeline.start();
78
79        while(pipeline.isRunning() && !quitEvent) {
80            auto inDet = qDet->get<dai::NNData>();
81            auto inPassthrough = qPassthrough->get<dai::ImgFrame>();
82
83            if(inDet != nullptr) {
84                std::cout << "Detection data received" << std::endl;
85            }
86
87            if(inPassthrough != nullptr) {
88                std::cout << "Passthrough frame received" << std::endl;
89            }
90
91            std::this_thread::sleep_for(std::chrono::milliseconds(100));
92        }
93
94        // Cleanup
95        pipeline.stop();
96        pipeline.wait();
97
98    } catch(const std::exception& e) {
99        std::cerr << "Error: " << e.what() << std::endl;
100        return 1;
101    }
102
103    return 0;
104}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.