ON THIS PAGE

  • Demo
  • Source code
  • Pipeline

RGB & Mono Encoding

This example shows how to set up the encoder node to encode the RGB camera and both grayscale cameras (of DepthAI/OAK-D) at the same time. The RGB is set to 1920x1080 and the grayscale are set to 1280x720 each, all at 30FPS. Each encoded video stream is transferred over XLINK and saved to a respective file.Pressing Ctrl+C will stop the recording and then convert it using ffmpeg into an mp4 to make it playable. Note that ffmpeg will need to be installed and runnable for the conversion to mp4 to succeed.Be careful, this example saves encoded video to your host storage. So if you leave it running, you could fill up your storage on your host.

Similar samples:

Demo

This example requires the DepthAI v3 API, see installation instructions.

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import depthai as dai
4
5# Create pipeline
6pipeline = dai.Pipeline()
7
8# Define sources and outputs
9camRgb = pipeline.create(dai.node.ColorCamera)
10monoLeft = pipeline.create(dai.node.MonoCamera)
11monoRight = pipeline.create(dai.node.MonoCamera)
12ve1 = pipeline.create(dai.node.VideoEncoder)
13ve2 = pipeline.create(dai.node.VideoEncoder)
14ve3 = pipeline.create(dai.node.VideoEncoder)
15
16ve1Out = pipeline.create(dai.node.XLinkOut)
17ve2Out = pipeline.create(dai.node.XLinkOut)
18ve3Out = pipeline.create(dai.node.XLinkOut)
19
20ve1Out.setStreamName('ve1Out')
21ve2Out.setStreamName('ve2Out')
22ve3Out.setStreamName('ve3Out')
23
24# Properties
25camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
26monoLeft.setCamera("left")
27monoRight.setCamera("right")
28# Create encoders, one for each camera, consuming the frames and encoding them using H.264 / H.265 encoding
29ve1.setDefaultProfilePreset(30, dai.VideoEncoderProperties.Profile.H264_MAIN)
30ve2.setDefaultProfilePreset(30, dai.VideoEncoderProperties.Profile.H265_MAIN)
31ve3.setDefaultProfilePreset(30, dai.VideoEncoderProperties.Profile.H264_MAIN)
32
33# Linking
34monoLeft.out.link(ve1.input)
35camRgb.video.link(ve2.input)
36monoRight.out.link(ve3.input)
37ve1.bitstream.link(ve1Out.input)
38ve2.bitstream.link(ve2Out.input)
39ve3.bitstream.link(ve3Out.input)
40
41# Connect to device and start pipeline
42with dai.Device(pipeline) as dev:
43
44    # Output queues will be used to get the encoded data from the outputs defined above
45    outQ1 = dev.getOutputQueue(name='ve1Out', maxSize=30, blocking=True)
46    outQ2 = dev.getOutputQueue(name='ve2Out', maxSize=30, blocking=True)
47    outQ3 = dev.getOutputQueue(name='ve3Out', maxSize=30, blocking=True)
48
49    # The .h264 / .h265 files are raw stream files (not playable yet)
50    with open('mono1.h264', 'wb') as fileMono1H264, open('color.h265', 'wb') as fileColorH265, open('mono2.h264', 'wb') as fileMono2H264:
51        print("Press Ctrl+C to stop encoding...")
52        while True:
53            try:
54                # Empty each queue
55                while outQ1.has():
56                    outQ1.get().getData().tofile(fileMono1H264)
57
58                while outQ2.has():
59                    outQ2.get().getData().tofile(fileColorH265)
60
61                while outQ3.has():
62                    outQ3.get().getData().tofile(fileMono2H264)
63            except KeyboardInterrupt:
64                # Keyboard interrupt (Ctrl + C) detected
65                break
66
67    print("To view the encoded data, convert the stream file (.h264/.h265) into a video file (.mp4), using commands below:")
68    cmd = "ffmpeg -framerate 30 -i {} -c copy {}"
69    print(cmd.format("mono1.h264", "mono1.mp4"))
70    print(cmd.format("mono2.h264", "mono2.mp4"))
71    print(cmd.format("color.h265", "color.mp4"))

C++

1#include <csignal>
2#include <iostream>
3
4// Includes common necessary includes for development using depthai library
5#include "depthai/depthai.hpp"
6
7// Keyboard interrupt (Ctrl + C) detected
8static std::atomic<bool> alive{true};
9static void sigintHandler(int signum) {
10    alive = false;
11}
12
13int main() {
14    using namespace std;
15    std::signal(SIGINT, &sigintHandler);
16
17    // Create pipeline
18    dai::Pipeline pipeline;
19
20    // Define sources and outputs
21    auto camRgb = pipeline.create<dai::node::ColorCamera>();
22    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
23    auto monoRight = pipeline.create<dai::node::MonoCamera>();
24    auto ve1 = pipeline.create<dai::node::VideoEncoder>();
25    auto ve2 = pipeline.create<dai::node::VideoEncoder>();
26    auto ve3 = pipeline.create<dai::node::VideoEncoder>();
27
28    auto ve1Out = pipeline.create<dai::node::XLinkOut>();
29    auto ve2Out = pipeline.create<dai::node::XLinkOut>();
30    auto ve3Out = pipeline.create<dai::node::XLinkOut>();
31
32    ve1Out->setStreamName("ve1Out");
33    ve2Out->setStreamName("ve2Out");
34    ve3Out->setStreamName("ve3Out");
35
36    // Properties
37    camRgb->setBoardSocket(dai::CameraBoardSocket::CAM_A);
38    monoLeft->setCamera("left");
39    monoRight->setCamera("right");
40    // Create encoders, one for each camera, consuming the frames and encoding them using H.264 / H.265 encoding
41    ve1->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::H264_MAIN);
42    ve2->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::H265_MAIN);
43    ve3->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::H264_MAIN);
44
45    // Linking
46    monoLeft->out.link(ve1->input);
47    camRgb->video.link(ve2->input);
48    monoRight->out.link(ve3->input);
49    ve1->bitstream.link(ve1Out->input);
50    ve2->bitstream.link(ve2Out->input);
51    ve3->bitstream.link(ve3Out->input);
52
53    // Connect to device and start pipeline
54    dai::Device device(pipeline);
55
56    // Output queues will be used to get the encoded data from the output defined above
57    auto outQ1 = device.getOutputQueue("ve1Out", 30, true);
58    auto outQ2 = device.getOutputQueue("ve2Out", 30, true);
59    auto outQ3 = device.getOutputQueue("ve3Out", 30, true);
60
61    // The .h264 / .h265 files are raw stream files (not playable yet)
62    auto videoFile1 = std::ofstream("mono1.h264", std::ios::binary);
63    auto videoFile2 = std::ofstream("color.h265", std::ios::binary);
64    auto videoFile3 = std::ofstream("mono2.h264", std::ios::binary);
65    cout << "Press Ctrl+C to stop encoding..." << endl;
66
67    while(alive) {
68        auto out1 = outQ1->get<dai::ImgFrame>();
69        videoFile1.write((char*)out1->getData().data(), out1->getData().size());
70        auto out2 = outQ2->get<dai::ImgFrame>();
71        videoFile2.write((char*)out2->getData().data(), out2->getData().size());
72        auto out3 = outQ3->get<dai::ImgFrame>();
73        videoFile3.write((char*)out3->getData().data(), out3->getData().size());
74    }
75
76    cout << "To view the encoded data, convert the stream file (.h264/.h265) into a video file (.mp4), using a command below:" << endl;
77    cout << "ffmpeg -framerate 30 -i mono1.h264 -c copy mono1.mp4" << endl;
78    cout << "ffmpeg -framerate 30 -i mono2.h264 -c copy mono2.mp4" << endl;
79    cout << "ffmpeg -framerate 30 -i color.h265 -c copy color.mp4" << endl;
80
81    return 0;
82}

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.