# Video Encode

This example showcases how you can use [Video
Encoder](https://docs.luxonis.com/software-v3/depthai/depthai-components/nodes/video_encoder.md) node, which encodes video frames
on-device into MJPEG, H264, or H265 video codecs. It creates custom [Host
Node](https://docs.luxonis.com/software-v3/depthai/depthai-components/host_nodes.md) called VideoSaver which receives encoded
video frames from the Video Encoder node and saves them to a file on the host computer.

After the end of the recording, user has to use ffmpeg to convert raw encoded stream into a playable video file. One could extend
the VideoSaver node to save directly into a container like in the [Save encoded video stream into mp4
container](https://github.com/luxonis/oak-examples/tree/master/gen2-container-encoding#save-encoded-video-stream-into-mp4-container)
experiment.

## Demo output

```bash
python3 video_encode.py
Started to save video to video.encoded
Press Ctrl+C to stop
To view the encoded data, convert the stream file (.encoded) into a video file (.mp4) using a command below:
ffmpeg -framerate 30 -i video.encoded -c copy video.mp4
```

After running the ffmpeg command, you should use VLC player to view the video file, especially for H265 format, as it is not
supported by all video players (eg. QuickTime on MacOS).

This example requires the DepthAI v3 API, see [installation instructions](https://docs.luxonis.com/software-v3/depthai.md).

## Pipeline

### examples/video_encode.pipeline.json

```json
{"pipeline": {"connections": [{"node1Id": 1, "node1Output": "out", "node1OutputGroup": "", "node2Id": 4, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 0, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 6, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 0, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 1, "node2Input": "in", "node2InputGroup": ""}], "globalProperties": {"calibData": null, "cameraTuningBlobSize": null, "cameraTuningBlobUri": "", "leonCssFrequencyHz": 700000000.0, "leonMssFrequencyHz": 700000000.0, "pipelineName": null, "pipelineVersion": null, "sippBufferSize": 18432, "sippDmaBufferSize": 16384, "xlinkChunkSize": -1}, "nodes": [[6, {"alias": "", "id": 6, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 8, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_0_0"}}], [4, {"alias": "", "id": 4, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 7, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_1__out"}}], [1, {"alias": "", "id": 1, "ioInfo": [[["", "out"], {"blocking": false, "group": "", "id": 6, "name": "out", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "bitstream"], {"blocking": false, "group": "", "id": 5, "name": "bitstream", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "in"], {"blocking": true, "group": "", "id": 4, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "VideoEncoder", "parentId": -1, "properties": {"bitrate": 0, "frameRate": 30.0, "keyframeFrequency": 30, "lossless": false, "maxBitrate": 0, "numBFrames": 0, "numFramesPool": 0, "outputFrameSize": 0, "profile": 4, "quality": 80, "rateCtrlMode": 0}}], [0, {"alias": "", "id": 0, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 3, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 2, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 1, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 0, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 0, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 0, "aeRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "afRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "antiBandingMode": 0, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 0, "brightness": 0, "captureIntent": 0, "chromaDenoise": 0, "cmdMask": 0, "contrast": 0, "controlMode": 0, "effectMode": 0, "enableHdr": false, "expCompensation": 0, "expManual": {"exposureTimeUs": 0, "frameDurationUs": 0, "sensitivityIso": 0}, "frameSyncMode": 0, "lensPosAutoInfinity": 0, "lensPosAutoMacro": 0, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 0, "lowPowerNumFramesDiscard": 0, "lumaDenoise": 0, "miscControls": [], "saturation": 0, "sceneMode": 0, "sharpness": 0, "strobeConfig": {"activeLevel": 0, "enable": 0, "gpioNumber": 0}, "strobeTimings": {"durationUs": 0, "exposureBeginOffsetUs": 0, "exposureEndOffsetUs": 0}, "wbColorTemp": 0}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [1920, 1440]}}, "type": 22}], "resolutionHeight": -1, "resolutionWidth": -1}}]]}}
```

## Source Code

#### Python

```python
import depthai as dai

# Capture Ctrl+C and set a flag to stop the loop
import time
import cv2
import threading
import signal

PROFILE = dai.VideoEncoderProperties.Profile.MJPEG # or H265_MAIN, H264_MAIN

quitEvent = threading.Event()
signal.signal(signal.SIGTERM, lambda *_args: quitEvent.set())
signal.signal(signal.SIGINT, lambda *_args: quitEvent.set())

class VideoSaver(dai.node.HostNode):
    def __init__(self, *args, **kwargs):
        dai.node.HostNode.__init__(self, *args, **kwargs)
        self.file_handle = open('video.encoded', 'wb')

    def build(self, *args):
        self.link_args(*args)
        return self

    def process(self, frame):
        frame.getData().tofile(self.file_handle)

with dai.Pipeline() as pipeline:
    camRgb = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
    output = camRgb.requestOutput((1920, 1440), type=dai.ImgFrame.Type.NV12)
    outputQueue = output.createOutputQueue()
    encoded = pipeline.create(dai.node.VideoEncoder).build(output,
            frameRate = 30,
            profile = PROFILE)
    saver = pipeline.create(VideoSaver).build(encoded.out)

    pipeline.start()
    print("Started to save video to video.encoded")
    print("Press Ctrl+C to stop")
    timeStart = time.monotonic()
    while pipeline.isRunning() and not quitEvent.is_set():
        frame = outputQueue.get()
        assert isinstance(frame, dai.ImgFrame)
        cv2.imshow("video", frame.getCvFrame())
        key = cv2.waitKey(1)
        if key == ord('q'):
            break
    pipeline.stop()
    pipeline.wait()
    saver.file_handle.close()

print("To view the encoded data, convert the stream file (.encoded) into a video file (.mp4) using a command below:")
print("ffmpeg -framerate 30 -i video.encoded -c copy video.mp4")
```

#### C++

```cpp
#include <atomic>
#include <csignal>
#include <fstream>
#include <iostream>
#include <memory>
#include <opencv2/opencv.hpp>
#include <thread>

#include "depthai/depthai.hpp"
#include "depthai/pipeline/datatype/MessageGroup.hpp"

// Global flag for graceful shutdown
std::atomic<bool> quitEvent(false);

// Signal handler
void signalHandler(int signum) {
    quitEvent = true;
}

// Custom host node for saving video data
class VideoSaver : public dai::node::CustomNode<VideoSaver> {
   public:
    VideoSaver() : fileHandle("video.encoded", std::ios::binary) {
        if(!fileHandle.is_open()) {
            throw std::runtime_error("Could not open video.encoded for writing");
        }
    }

    ~VideoSaver() {
        if(fileHandle.is_open()) {
            fileHandle.close();
        }
    }

    std::shared_ptr<dai::Buffer> processGroup(std::shared_ptr<dai::MessageGroup> message) override {
        if(!fileHandle.is_open()) return nullptr;

        // Get raw data and write to file
        auto frame = message->get<dai::EncodedFrame>("data");
        unsigned char* frameData = frame->getData().data();
        size_t frameSize = frame->getData().size();
        std::cout << "Storing frame of size: " << frameSize << std::endl;
        fileHandle.write(reinterpret_cast<const char*>(frameData), frameSize);

        // Don't send anything back
        return nullptr;
    }

   private:
    std::ofstream fileHandle;
};

int main() {
    // Set up signal handlers
    signal(SIGTERM, signalHandler);
    signal(SIGINT, signalHandler);

    // Create device
    std::shared_ptr<dai::Device> device = std::make_shared<dai::Device>();

    // Create pipeline
    dai::Pipeline pipeline(device);

    // Create nodes
    auto camRgb = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
    auto output = camRgb->requestOutput(std::make_pair(1920, 1440), dai::ImgFrame::Type::NV12);
    auto outputQueue = output->createOutputQueue();

    // Create video encoder node
    auto encoded = pipeline.create<dai::node::VideoEncoder>();
    encoded->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::MJPEG);
    output->link(encoded->input);

    // Create video saver node
    auto saver = pipeline.create<VideoSaver>();
    encoded->out.link(saver->inputs["data"]);

    // Start pipeline
    pipeline.start();
    std::cout << "Started to save video to video.encoded" << std::endl;
    std::cout << "Press Ctrl+C to stop" << std::endl;

    auto timeStart = std::chrono::steady_clock::now();

    while(pipeline.isRunning() && !quitEvent) {
        auto frame = outputQueue->get<dai::ImgFrame>();
        if(frame == nullptr) continue;

        cv::imshow("video", frame->getCvFrame());

        int key = cv::waitKey(1);
        if(key == 'q') {
            break;
        }
    }

    // Cleanup
    pipeline.stop();
    pipeline.wait();

    std::cout << "To view the encoded data, convert the stream file (.encoded) into a video file (.mp4) using a command below:" << std::endl;
    std::cout << "ffmpeg -framerate 30 -i video.encoded -c copy video.mp4" << std::endl;

    return 0;
}
```

### Need assistance?

Head over to [Discussion Forum](https://discuss.luxonis.com/) for technical support or any other questions you might have.
