DepthAI
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

Holistic Record

Supported on:RVC2RVC4
This example demonstrates how to record all selected input streams (such as video and IMU data) holistically during a DepthAI pipeline run, enabling full replay for development and testing.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5import argparse
6from pathlib import Path
7
8parser = argparse.ArgumentParser()
9parser.add_argument("-o", "--output", default="recordings", help="Output path")
10args = parser.parse_args()
11
12# Create output directory if it doesn't exist
13Path(args.output).mkdir(parents=True, exist_ok=True)
14
15# Create pipeline
16with dai.Pipeline(True) as pipeline:
17    # Define source and output
18    camA = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
19    camAOut = camA.requestOutput((600, 400))
20    camB = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B)
21    camBOut = camB.requestOutput((600, 400))
22    camC = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C)
23    camCOut = camC.requestOutput((600, 400))
24
25    imu = pipeline.create(dai.node.IMU)
26    imu.enableIMUSensor(dai.IMUSensor.ACCELEROMETER_RAW, 500)
27    imu.enableIMUSensor(dai.IMUSensor.GYROSCOPE_RAW, 400)
28    imu.setBatchReportThreshold(100)
29
30    config = dai.RecordConfig()
31    config.outputDir = args.output
32    config.videoEncoding.enabled = True
33    config.videoEncoding.bitrate = 0 # Automatic
34    config.videoEncoding.profile = dai.VideoEncoderProperties.Profile.H264_MAIN
35
36    pipeline.enableHolisticRecord(config)
37
38    videoQueueA = camAOut.createOutputQueue()
39    videoQueueB = camBOut.createOutputQueue()
40    videoQueueC = camCOut.createOutputQueue()
41    imuQueue = imu.out.createOutputQueue()
42
43    # Connect to device and start pipeline
44    pipeline.start()
45    while pipeline.isRunning():
46        videoInA : dai.ImgFrame = videoQueueA.get()
47        videoInB : dai.ImgFrame = videoQueueB.get()
48        videoInC : dai.ImgFrame = videoQueueC.get()
49        imuData : dai.IMUData = imuQueue.tryGetAll()
50
51        # Get BGR frame from NV12 encoded video frame to show with opencv
52        # Visualizing the frame on slower hosts might have overhead
53        cv2.imshow("video", videoInA.getCvFrame())
54        if imuData:
55            for packet in imuData[0].packets:
56                print(f"IMU Accelerometer: {packet.acceleroMeter}")
57                print(f"IMU Gyroscope: {packet.gyroscope}")
58
59            if cv2.waitKey(1) == ord('q'):
60                break

C++

1#include <filesystem>
2
3#include "depthai/common/CameraBoardSocket.hpp"
4#include "depthai/depthai.hpp"
5#include "depthai/pipeline/node/host/Display.hpp"
6#include "depthai/utility/RecordReplay.hpp"
7#include "utility.hpp"
8#ifndef DEPTHAI_MERGED_TARGET
9    #error This example needs OpenCV support, which is not available on your system
10#endif
11
12int main(int argc, char** argv) {
13    dai::Pipeline pipeline;
14    auto camA = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
15    auto* camAOut = camA->requestOutput({600, 400});
16    auto camB = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_B);
17    auto* camBOut = camB->requestOutput({600, 400});
18    auto camC = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_C);
19    auto* camCOut = camC->requestOutput({600, 400});
20
21    auto imu = pipeline.create<dai::node::IMU>();
22
23    auto display = pipeline.create<dai::node::Display>();
24
25    // enable ACCELEROMETER_RAW at 500 hz rate
26    imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 500);
27    // enable GYROSCOPE_RAW at 400 hz rate
28    imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 400);
29    imu->setBatchReportThreshold(100);
30
31    camAOut->link(display->input);
32    auto q = imu->out.createOutputQueue();
33
34    auto camAqueue = camAOut->createOutputQueue();
35    auto camBqueue = camBOut->createOutputQueue();
36    auto camCqueue = camCOut->createOutputQueue();
37
38    dai::RecordConfig config;
39    config.outputDir = argc > 1 ? std::string(argv[1]) : getDefaultRecordingPath();
40    config.videoEncoding.enabled = true;  // Use video encoding
41    config.videoEncoding.bitrate = 0;     // Automatic
42    config.videoEncoding.profile = dai::VideoEncoderProperties::Profile::H264_MAIN;
43
44    pipeline.enableHolisticRecord(config);
45
46    pipeline.start();
47
48    auto start = std::chrono::steady_clock::now();
49
50    try {
51        while(std::chrono::steady_clock::now() - start < std::chrono::seconds(15)) {
52            auto imuData = q->get<dai::IMUData>();
53            auto imuPackets = imuData->packets;
54            for(auto& imuPacket : imuPackets) {
55                auto& acceleroValues = imuPacket.acceleroMeter;
56                auto& gyroValues = imuPacket.gyroscope;
57
58                // printf("Accelerometer [m/s^2]: x: %.3f y: %.3f z: %.3f \n", acceleroValues.x, acceleroValues.y, acceleroValues.z);
59                // printf("Gyroscope [rad/s]: x: %.3f y: %.3f z: %.3f \n", gyroValues.x, gyroValues.y, gyroValues.z);
60            }
61            std::this_thread::sleep_for(std::chrono::milliseconds(1));
62        }
63    } catch(...) {
64    }
65
66    pipeline.stop();
67}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.