# Stereo Depth Remap

The example configures a pipeline to capture RGB and stereo depth streams, processes the depth data with percentile-based
normalization and HOT colormap visualization, and displays both streams with synchronized rotated rectangles, demonstrating
coordinate transformation between frames.

This example requires the DepthAI v3 API, see [installation instructions](https://docs.luxonis.com/software-v3/depthai.md).

## Pipeline

### examples/stereo_depth_remap.pipeline.json

```json
{"pipeline": {"connections": [{"node1Id": 3, "node1Output": "depth", "node1OutputGroup": "", "node2Id": 8, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 2, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 6, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 2, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 3, "node2Input": "right", "node2InputGroup": ""}, {"node1Id": 1, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 3, "node2Input": "left", "node2InputGroup": ""}, {"node1Id": 0, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 4, "node2Input": "in", "node2InputGroup": ""}], "globalProperties": {"calibData": null, "cameraTuningBlobSize": null, "cameraTuningBlobUri": "", "leonCssFrequencyHz": 700000000.0, "leonMssFrequencyHz": 700000000.0, "pipelineName": null, "pipelineVersion": null, "sippBufferSize": 18432, "sippDmaBufferSize": 16384, "xlinkChunkSize": -1}, "nodes": [[8, {"alias": "", "id": 8, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 31, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_3_depth"}}], [6, {"alias": "", "id": 6, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 30, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_2_0"}}], [4, {"alias": "", "id": 4, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 29, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_0_0"}}], [3, {"alias": "", "id": 3, "ioInfo": [[["", "confidenceMap"], {"blocking": false, "group": "", "id": 28, "name": "confidenceMap", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispCostDump"], {"blocking": false, "group": "", "id": 27, "name": "debugDispCostDump", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugExtDispLrCheckIt2"], {"blocking": false, "group": "", "id": 26, "name": "debugExtDispLrCheckIt2", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispLrCheckIt2"], {"blocking": false, "group": "", "id": 24, "name": "debugDispLrCheckIt2", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugExtDispLrCheckIt1"], {"blocking": false, "group": "", "id": 25, "name": "debugExtDispLrCheckIt1", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispLrCheckIt1"], {"blocking": false, "group": "", "id": 23, "name": "debugDispLrCheckIt1", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "outConfig"], {"blocking": false, "group": "", "id": 22, "name": "outConfig", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "rectifiedRight"], {"blocking": false, "group": "", "id": 21, "name": "rectifiedRight", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "rectifiedLeft"], {"blocking": false, "group": "", "id": 20, "name": "rectifiedLeft", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "depth"], {"blocking": false, "group": "", "id": 16, "name": "depth", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "right"], {"blocking": true, "group": "", "id": 15, "name": "right", "queueSize": 3, "type": 3, "waitForMessage": false}], [["", "left"], {"blocking": true, "group": "", "id": 14, "name": "left", "queueSize": 3, "type": 3, "waitForMessage": false}], [["", "syncedRight"], {"blocking": false, "group": "", "id": 19, "name": "syncedRight", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "syncedLeft"], {"blocking": false, "group": "", "id": 18, "name": "syncedLeft", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputAlignTo"], {"blocking": false, "group": "", "id": 13, "name": "inputAlignTo", "queueSize": 1, "type": 3, "waitForMessage": true}], [["", "disparity"], {"blocking": false, "group": "", "id": 17, "name": "disparity", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputConfig"], {"blocking": true, "group": "", "id": 12, "name": "inputConfig", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "StereoDepth", "parentId": -1, "properties": {"alphaScaling": null, "baseline": null, "depthAlignCamera": -1, "depthAlignmentUseSpecTranslation": null, "disparityToDepthUseSpecTranslation": null, "enableFrameSync": true, "enableRectification": true, "enableRuntimeStereoModeSwitch": false, "focalLength": null, "focalLengthFromCalibration": true, "height": null, "initialConfig": {"algorithmControl": {"centerAlignmentShiftFactor": null, "customDepthUnitMultiplier": 1000.0, "depthAlign": 1, "depthUnit": 2, "disparityShift": 0, "enableExtended": false, "enableLeftRightCheck": true, "enableSubpixel": true, "enableSwLeftRightCheck": false, "leftRightCheckThreshold": 10, "numInvalidateEdgePixels": 0, "subpixelFractionalBits": 3}, "censusTransform": {"enableMeanMode": true, "kernelMask": 0, "kernelSize": -1, "noiseThresholdOffset": 1, "noiseThresholdScale": 1, "threshold": 0}, "confidenceMetrics": {"flatnessConfidenceThreshold": 5, "flatnessConfidenceWeight": 2, "flatnessOverride": false, "motionVectorConfidenceThreshold": 1, "motionVectorConfidenceWeight": 10, "occlusionConfidenceWeight": 20}, "costAggregation": {"divisionFactor": 1, "horizontalPenaltyCostP1": 250, "horizontalPenaltyCostP2": 500, "p1Config": {"defaultValue": 45, "edgeThreshold": 15, "edgeValue": 40, "enableAdaptive": true, "smoothThreshold": 5, "smoothValue": 49}, "p2Config": {"defaultValue": 95, "edgeValue": 90, "enableAdaptive": true, "smoothValue": 99}, "verticalPenaltyCostP1": 250, "verticalPenaltyCostP2": 500}, "costMatching": {"confidenceThreshold": 15, "disparityWidth": 1, "enableCompanding": false, "enableSwConfidenceThresholding": false, "invalidDisparityValue": 0, "linearEquationParameters": {"alpha": 0, "beta": 2, "threshold": 127}}, "filtersBackend": 2, "postProcessing": {"adaptiveMedianFilter": {"confidenceThreshold": 200, "enable": true}, "bilateralSigmaValue": 0, "brightnessFilter": {"maxBrightness": 256, "minBrightness": 0}, "decimationFilter": {"decimationFactor": 2, "decimationMode": 0}, "filteringOrder": [1, 3, 2, 4, 5], "holeFilling": {"enable": true, "fillConfidenceThreshold": 210, "highConfidenceThreshold": 100, "invalidateDisparities": true, "minValidDisparity": 3}, "median": 7, "spatialFilter": {"alpha": 0.5, "delta": 3, "enable": true, "holeFillingRadius": 1, "numIterations": 1}, "speckleFilter": {"differenceThreshold": 2, "enable": true, "speckleRange": 200}, "temporalFilter": {"alpha": 0.5, "delta": 3, "enable": true, "persistencyMode": 3}, "thresholdFilter": {"maxRange": 15000, "minRange": 0}}}, "mesh": {"meshLeftUri": "", "meshRightUri": "", "meshSize": null, "stepHeight": 16, "stepWidth": 16}, "numFramesPool": 3, "numPostProcessingMemorySlices": 3, "numPostProcessingShaves": 3, "outHeight": null, "outKeepAspectRatio": true, "outWidth": null, "rectificationUseSpecTranslation": null, "rectifyEdgeFillColor": 0, "useHomographyRectification": null, "width": null}}], [2, {"alias": "", "id": 2, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 11, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 10, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 9, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 8, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 2, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 4, "aeRegion": {"height": 0, "priority": 299135334, "width": 0, "x": 96, "y": 0}, "afRegion": {"height": 44799, "priority": 23538, "width": 31808, "x": 23543, "y": 0}, "antiBandingMode": 0, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 0, "brightness": 0, "captureIntent": 0, "chromaDenoise": 0, "cmdMask": 0, "contrast": 110, "controlMode": 0, "effectMode": 0, "enableHdr": false, "expCompensation": 0, "expManual": {"exposureTimeUs": 0, "frameDurationUs": 0, "sensitivityIso": 576}, "frameSyncMode": 0, "lensPosAutoInfinity": 0, "lensPosAutoMacro": 0, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 108, "lowPowerNumFramesDiscard": 105, "lumaDenoise": 101, "miscControls": [], "saturation": 97, "sceneMode": 0, "sharpness": 109, "strobeConfig": {"activeLevel": 0, "enable": 0, "gpioNumber": 0}, "strobeTimings": {"durationUs": 23538, "exposureBeginOffsetUs": 0, "exposureEndOffsetUs": -1330420944}, "wbColorTemp": 24941}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [640, 480]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}], [1, {"alias": "", "id": 1, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 7, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 6, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 5, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 4, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 1, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 1852403110, "aeRegion": {"height": 51816, "priority": 14721348, "width": 29796, "x": 42434, "y": 26999}, "afRegion": {"height": 41216, "priority": 51833, "width": 0, "x": 30881, "y": 202}, "antiBandingMode": 166, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 0, "brightness": -54, "captureIntent": 104, "chromaDenoise": 164, "cmdMask": 0, "contrast": 63, "controlMode": 101, "effectMode": 105, "enableHdr": false, "expCompensation": 115, "expManual": {"exposureTimeUs": 1869523456, "frameDurationUs": 1684372073, "sensitivityIso": 1818324338}, "frameSyncMode": 103, "lensPosAutoInfinity": 202, "lensPosAutoMacro": 68, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 101, "lowPowerNumFramesDiscard": 1, "lumaDenoise": 158, "miscControls": [], "saturation": -41, "sceneMode": 0, "sharpness": 239, "strobeConfig": {"activeLevel": 116, "enable": 104, "gpioNumber": -51}, "strobeTimings": {"durationUs": 2593718595, "exposureBeginOffsetUs": 2019650982, "exposureEndOffsetUs": -898404282}, "wbColorTemp": 28793}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [640, 480]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}], [0, {"alias": "", "id": 0, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 3, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 2, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 1, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 0, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 0, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 0, "aeRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "afRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "antiBandingMode": 0, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 0, "brightness": 0, "captureIntent": 0, "chromaDenoise": 0, "cmdMask": 0, "contrast": 0, "controlMode": 0, "effectMode": 0, "enableHdr": false, "expCompensation": 0, "expManual": {"exposureTimeUs": 0, "frameDurationUs": 0, "sensitivityIso": 0}, "frameSyncMode": 0, "lensPosAutoInfinity": 0, "lensPosAutoMacro": 0, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 0, "lowPowerNumFramesDiscard": 0, "lumaDenoise": 0, "miscControls": [], "saturation": 0, "sceneMode": 0, "sharpness": 0, "strobeConfig": {"activeLevel": 0, "enable": 0, "gpioNumber": 0}, "strobeTimings": {"durationUs": 0, "exposureBeginOffsetUs": 0, "exposureEndOffsetUs": 0}, "wbColorTemp": 0}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [640, 480]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}]]}}
```

## Source code

#### Python

```python
import depthai as dai
import cv2
import numpy as np

def draw_rotated_rectangle(frame, center, size, angle, color, thickness=2):
    """
    Draws a rotated rectangle on the given frame.

    Args:
        frame (numpy.ndarray): The image/frame to draw on.
        center (tuple): The (x, y) coordinates of the rectangle's center.
        size (tuple): The (width, height) of the rectangle.
        angle (float): The rotation angle of the rectangle in degrees (counter-clockwise).
        color (tuple): The color of the rectangle in BGR format (e.g., (0, 255, 0) for green).
        thickness (int): The thickness of the rectangle edges. Default is 2.
    """
    # Create a rotated rectangle
    rect = ((center[0], center[1]), (size[0], size[1]), angle)

    # Get the four vertices of the rotated rectangle
    box = cv2.boxPoints(rect)
    box = np.intp(box)  # Convert to integer coordinates

    # Draw the rectangle on the frame
    cv2.polylines(frame, [box], isClosed=True, color=color, thickness=thickness)

def processDepthFrame(depthFrame):
    depth_downscaled = depthFrame[::4]
    if np.all(depth_downscaled == 0):
        min_depth = 0
    else:
        min_depth = np.percentile(depth_downscaled[depth_downscaled != 0], 1)
    max_depth = np.percentile(depth_downscaled, 99)
    depthFrameColor = np.interp(depthFrame, (min_depth, max_depth), (0, 255)).astype(np.uint8)
    return cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_HOT)

with dai.Pipeline() as pipeline:
    color = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
    monoLeft = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B)
    monoRight = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C)
    stereo = pipeline.create(dai.node.StereoDepth)

    stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT)
    # stereo.setDepthAlign(dai.CameraBoardSocket.CAM_A)
    # stereo.setOutputSize(640, 400)

    colorCamOut = color.requestOutput((640, 480))

    monoLeftOut = monoLeft.requestOutput((640, 480))
    monoRightOut = monoRight.requestOutput((640, 480))

    monoLeftOut.link(stereo.left)
    monoRightOut.link(stereo.right)

    colorOut = colorCamOut.createOutputQueue()
    rightOut = monoRightOut.createOutputQueue()
    stereoOut = stereo.depth.createOutputQueue()

    pipeline.start()
    while pipeline.isRunning():
        colorFrame = colorOut.get()
        stereoFrame = stereoOut.get()

        assert colorFrame.validateTransformations()
        assert stereoFrame.validateTransformations()

        clr = colorFrame.getCvFrame()
        depth = processDepthFrame(stereoFrame.getCvFrame())

        rect = dai.RotatedRect(dai.Point2f(300, 200), dai.Size2f(200, 100), 10)
        remappedRect = colorFrame.getTransformation().remapRectTo(stereoFrame.getTransformation(), rect)

        print(f"Original rect x: {rect.center.x} y: {rect.center.y} width: {rect.size.width} height: {rect.size.height} angle: {rect.angle}")
        print(f"Remapped rect x: {remappedRect.center.x} y: {remappedRect.center.y} width: {remappedRect.size.width} height: {remappedRect.size.height} angle: {remappedRect.angle}")

        draw_rotated_rectangle(clr, (rect.center.x, rect.center.y), (rect.size.width, rect.size.height), rect.angle, (255, 0, 0))
        draw_rotated_rectangle(depth, (remappedRect.center.x, remappedRect.center.y), (remappedRect.size.width, remappedRect.size.height), remappedRect.angle, (255, 0, 0))

        cv2.imshow("color", clr)
        cv2.imshow("depth", depth)

        if cv2.waitKey(1) == ord('q'):
            break
    pipeline.stop()
```

#### C++

```cpp
#include <atomic>
#include <csignal>
#include <iostream>
#include <opencv2/opencv.hpp>

#include "depthai/depthai.hpp"

std::atomic<bool> quitEvent(false);

void signalHandler(int) {
    quitEvent = true;
}

// Helper function to draw rotated rectangle
void drawRotatedRectangle(cv::Mat& frame, const cv::Point2f& center, const cv::Size2f& size, float angle, const cv::Scalar& color, int thickness = 2) {
    // Create a rotated rectangle
    cv::RotatedRect rect(center, size, angle);

    // Get the four vertices of the rotated rectangle
    cv::Point2f vertices[4];
    rect.points(vertices);

    // Convert vertices to integer points
    std::vector<cv::Point> points;
    for(int i = 0; i < 4; i++) {
        points.push_back(cv::Point(static_cast<int>(vertices[i].x), static_cast<int>(vertices[i].y)));
    }

    // Draw the rectangle
    cv::polylines(frame, points, true, color, thickness);
}

// Helper function to process depth frame
cv::Mat processDepthFrame(const cv::Mat& depthFrame) {
    cv::Mat depth_downscaled;
    cv::resize(depthFrame, depth_downscaled, cv::Size(), 0.25, 0.25);

    double min_depth = 0;
    if(!cv::countNonZero(depth_downscaled == 0)) {
        std::vector<uint16_t> nonZeroDepth;
        nonZeroDepth.reserve(depth_downscaled.rows * depth_downscaled.cols);

        for(int i = 0; i < depth_downscaled.rows; i++) {
            for(int j = 0; j < depth_downscaled.cols; j++) {
                uint16_t depth = depth_downscaled.at<uint16_t>(i, j);
                if(depth > 0) nonZeroDepth.push_back(depth);
            }
        }

        if(!nonZeroDepth.empty()) {
            std::sort(nonZeroDepth.begin(), nonZeroDepth.end());
            min_depth = nonZeroDepth[static_cast<int>(nonZeroDepth.size() * 0.01)];  // 1st percentile
        }
    }

    std::vector<uint16_t> allDepth;
    allDepth.reserve(depth_downscaled.rows * depth_downscaled.cols);
    for(int i = 0; i < depth_downscaled.rows; i++) {
        for(int j = 0; j < depth_downscaled.cols; j++) {
            allDepth.push_back(depth_downscaled.at<uint16_t>(i, j));
        }
    }
    std::sort(allDepth.begin(), allDepth.end());
    double max_depth = allDepth[static_cast<int>(allDepth.size() * 0.99)];  // 99th percentile

    // Normalize and colorize
    cv::Mat normalized;
    cv::normalize(depthFrame, normalized, 0, 255, cv::NORM_MINMAX, CV_8UC1, depthFrame > min_depth);
    cv::Mat colorized;
    cv::applyColorMap(normalized, colorized, cv::COLORMAP_HOT);
    return colorized;
}

int main() {
    signal(SIGTERM, signalHandler);
    signal(SIGINT, signalHandler);

    // Create pipeline
    dai::Pipeline pipeline;

    // Create and configure nodes
    auto color = pipeline.create<dai::node::Camera>();
    color->build(dai::CameraBoardSocket::CAM_A);

    auto monoLeft = pipeline.create<dai::node::Camera>();
    monoLeft->build(dai::CameraBoardSocket::CAM_B);

    auto monoRight = pipeline.create<dai::node::Camera>();
    monoRight->build(dai::CameraBoardSocket::CAM_C);

    auto stereo = pipeline.create<dai::node::StereoDepth>();

    // Configure stereo node
    stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::DEFAULT);
    // Uncomment to align depth to RGB
    // stereo->setDepthAlign(dai::CameraBoardSocket::CAM_A);
    // stereo->setOutputSize(640, 400);

    // Configure outputs
    auto colorCamOut = color->requestOutput(std::make_pair(640, 480));
    auto monoLeftOut = monoLeft->requestOutput(std::make_pair(640, 480));
    auto monoRightOut = monoRight->requestOutput(std::make_pair(640, 480));

    // Link mono cameras to stereo
    monoLeftOut->link(stereo->left);
    monoRightOut->link(stereo->right);

    // Create output queues
    auto colorOut = colorCamOut->createOutputQueue();
    auto rightOut = monoRightOut->createOutputQueue();
    auto stereoOut = stereo->depth.createOutputQueue();

    pipeline.start();

    while(pipeline.isRunning() && !quitEvent) {
        auto colorFrame = colorOut->get<dai::ImgFrame>();
        auto stereoFrame = stereoOut->get<dai::ImgFrame>();

        if(colorFrame == nullptr || stereoFrame == nullptr) continue;

        // Validate transformations
        if(!colorFrame->validateTransformations() || !stereoFrame->validateTransformations()) {
            std::cerr << "Invalid transformations!" << std::endl;
            continue;
        }

        // Get frames
        cv::Mat clr = colorFrame->getCvFrame();
        cv::Mat depth = processDepthFrame(stereoFrame->getCvFrame());

        // Create and remap rectangle
        dai::RotatedRect rect(dai::Point2f(300, 200), dai::Size2f(200, 100), 10);
        auto remappedRect = colorFrame->transformation.remapRectTo(stereoFrame->transformation, rect);

        // Print rectangle information
        std::cout << "Original rect x: " << rect.center.x << " y: " << rect.center.y << " width: " << rect.size.width << " height: " << rect.size.height
                  << " angle: " << rect.angle << std::endl;
        std::cout << "Remapped rect x: " << remappedRect.center.x << " y: " << remappedRect.center.y << " width: " << remappedRect.size.width
                  << " height: " << remappedRect.size.height << " angle: " << remappedRect.angle << std::endl;

        // Draw rectangles
        drawRotatedRectangle(clr, cv::Point2f(rect.center.x, rect.center.y), cv::Size2f(rect.size.width, rect.size.height), rect.angle, cv::Scalar(255, 0, 0));

        drawRotatedRectangle(depth,
                             cv::Point2f(remappedRect.center.x, remappedRect.center.y),
                             cv::Size2f(remappedRect.size.width, remappedRect.size.height),
                             remappedRect.angle,
                             cv::Scalar(255, 0, 0));

        // Show frames
        cv::imshow("color", clr);
        cv::imshow("depth", depth);

        if(cv::waitKey(1) == 'q') {
            break;
        }
    }

    pipeline.stop();
    pipeline.wait();

    return 0;
}
```

### Need assistance?

Head over to [Discussion Forum](https://discuss.luxonis.com/) for technical support or any other questions you might have.
