DepthAI
Software Stack

ON THIS PAGE

  • Pipeline
  • Source code

Camera raw output

Supported on:RVC2RVC4
This example shows how to use the raw output from the Camera node. It also unpacks RAW10 (sensor) data into viewable OpenCV frame, and shows it in a window.This example requires the DepthAI v3 API, see installation instructions.

Pipeline

Source code

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import cv2
4import depthai as dai
5import numpy as np
6
7def unpackRaw10(rawData, width, height, stride=None):
8    """
9    Unpacks RAW10 data from DepthAI pipeline into a 16-bit grayscale array.
10    :param raw_data: List of raw bytes from DepthAI (1D numpy array)
11    :param width: Image width
12    :param height: Image height
13    :param stride: Row stride in bytes (if None, calculated as width*10/8)
14    :return: Unpacked 16-bit grayscale image with dimensions width×height
15    """
16    if stride is None:
17        stride = width * 10 // 8
18    expectedSize = stride * height
19
20    if len(rawData) < expectedSize:
21        raise ValueError(f"Data too small: {len(rawData)} bytes, expected {expectedSize}")
22
23    # Convert raw_data to numpy array
24    packedData = np.frombuffer(rawData, dtype=np.uint8)
25
26    # Process image row by row to handle stride correctly
27    result = np.zeros((height, width), dtype=np.uint16)
28
29    for row in range(height):
30        # Get row data using stride
31        rowStart = row * stride
32        rowData = packedData[rowStart:rowStart + stride]
33        # Calculate how many complete 5-byte groups we need for width pixels
34        numGroups = (width + 3) // 4  # Ceiling division
35        rowBytes = numGroups * 5
36        # Ensure we don't go beyond available data
37        if len(rowData) < rowBytes:
38            break
39
40        # Process only the bytes we need for this row
41        rowPacked = rowData[:rowBytes].reshape(-1, 5)
42        rowUnpacked = np.zeros((rowPacked.shape[0], 4), dtype=np.uint16)
43
44        # Extract 8 most significant bits
45        rowUnpacked[:, 0] = rowPacked[:, 0].astype(np.uint16) << 2
46        rowUnpacked[:, 1] = rowPacked[:, 1].astype(np.uint16) << 2
47        rowUnpacked[:, 2] = rowPacked[:, 2].astype(np.uint16) << 2
48        rowUnpacked[:, 3] = rowPacked[:, 3].astype(np.uint16) << 2
49
50        # Extract least significant 2 bits from 5th byte
51        rowUnpacked[:, 0] |= (rowPacked[:, 4] & 0b00000011)
52        rowUnpacked[:, 1] |= (rowPacked[:, 4] & 0b00001100) >> 2
53        rowUnpacked[:, 2] |= (rowPacked[:, 4] & 0b00110000) >> 4
54        rowUnpacked[:, 3] |= (rowPacked[:, 4] & 0b11000000) >> 6
55
56        # Flatten and copy only the required width pixels to result
57        rowFlat = rowUnpacked.flatten()
58        result[row, :width] = rowFlat[:width]
59
60    # Scale from 10-bit (0-1023) to 16-bit (0-65535) for proper display
61    result16bit = (result * 64).astype(np.uint16)
62    return result16bit
63
64# Create pipeline
65with dai.Pipeline() as pipeline:
66    # Define source and output
67    cam = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
68    rawQueue = cam.raw.createOutputQueue()
69    videoQueue = cam.requestFullResolutionOutput().createOutputQueue()
70    # Connect to device and start pipeline
71    pipeline.start()
72    while pipeline.isRunning():
73        videoIn = videoQueue.tryGet()
74        rawFrame = rawQueue.tryGet()
75        if rawFrame is not None:
76            assert isinstance(rawFrame, dai.ImgFrame)
77            dataRaw = rawFrame.getData()
78            parsedImage = unpackRaw10(dataRaw, rawFrame.getWidth(), rawFrame.getHeight(), rawFrame.getStride())
79            cv2.imshow("raw", parsedImage)
80        if videoIn is not None:
81            assert isinstance(videoIn, dai.ImgFrame)
82            cv2.imshow("video", videoIn.getCvFrame())
83
84        if cv2.waitKey(1) == ord("q"):
85            break

C++

1#include <atomic>
2#include <csignal>
3#include <iostream>
4#include <memory>
5#include <opencv2/opencv.hpp>
6#include <vector>
7
8#include "depthai/depthai.hpp"
9
10std::atomic<bool> quitEvent(false);
11
12void signalHandler(int) {
13    quitEvent = true;
14}
15
16cv::Mat unpackRaw10(const std::vector<uint8_t>& rawData, int width, int height, int stride = -1) {
17    if(stride == -1) {
18        stride = width * 10 / 8;
19    }
20    int expectedSize = stride * height;
21
22    if(rawData.size() < expectedSize) {
23        throw std::runtime_error("Data too small: " + std::to_string(rawData.size()) + " bytes, expected " + std::to_string(expectedSize));
24    }
25
26    // Create output matrix
27    cv::Mat result(height, width, CV_16UC1);
28
29    // Process image row by row to handle stride correctly
30    for(int row = 0; row < height; row++) {
31        // Get row data using stride
32        const uint8_t* rowStart = rawData.data() + row * stride;
33
34        // Calculate how many complete 5-byte groups we need for width pixels
35        int numGroups = (width + 3) / 4;  // Ceiling division
36        int rowBytes = numGroups * 5;
37
38        // Ensure we don't go beyond available data
39        if(rowBytes > stride) break;
40
41        // Process each 5-byte group
42        for(int i = 0; i < numGroups; i++) {
43            const uint8_t* group = rowStart + i * 5;
44            uint16_t pixels[4];
45
46            // Extract 8 most significant bits
47            pixels[0] = (group[0] << 2);
48            pixels[1] = (group[1] << 2);
49            pixels[2] = (group[2] << 2);
50            pixels[3] = (group[3] << 2);
51
52            // Extract least significant 2 bits from 5th byte
53            pixels[0] |= (group[4] & 0b00000011);
54            pixels[1] |= ((group[4] & 0b00001100) >> 2);
55            pixels[2] |= ((group[4] & 0b00110000) >> 4);
56            pixels[3] |= ((group[4] & 0b11000000) >> 6);
57
58            // Copy pixels to result
59            for(int j = 0; j < 4 && (i * 4 + j) < width; j++) {
60                result.at<uint16_t>(row, i * 4 + j) = pixels[j] * 64;  // Scale from 10-bit to 16-bit
61            }
62        }
63    }
64
65    return result;
66}
67
68int main() {
69    signal(SIGTERM, signalHandler);
70    signal(SIGINT, signalHandler);
71
72    // Create device
73    std::shared_ptr<dai::Device> device = std::make_shared<dai::Device>();
74
75    // Create pipeline
76    dai::Pipeline pipeline(device);
77
78    // Create nodes
79    auto cam = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
80    auto rawQueue = cam->raw.createOutputQueue();
81    auto videoQueue = cam->requestFullResolutionOutput()->createOutputQueue();
82
83    // Start pipeline
84    pipeline.start();
85
86    while(pipeline.isRunning() && !quitEvent) {
87        auto videoIn = videoQueue->tryGet<dai::ImgFrame>();
88        auto rawFrame = rawQueue->tryGet<dai::ImgFrame>();
89
90        if(rawFrame != nullptr) {
91            auto dataRaw = rawFrame->getData();
92            std::vector<uint8_t> dataRawVec(dataRaw.begin(), dataRaw.end());
93            try {
94                cv::Mat parsedImage = unpackRaw10(dataRawVec, rawFrame->getWidth(), rawFrame->getHeight(), rawFrame->getStride());
95                cv::imshow("raw", parsedImage);
96            } catch(const std::exception& e) {
97                std::cerr << "Error processing raw frame: " << e.what() << std::endl;
98            }
99        }
100
101        if(videoIn != nullptr) {
102            cv::imshow("video", videoIn->getCvFrame());
103        }
104
105        if(cv::waitKey(1) == 'q') {
106            break;
107        }
108    }
109
110    pipeline.stop();
111    pipeline.wait();
112
113    return 0;
114}

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.