# Object tracker on RGB

This example shows how to run MobileNetv2SSD on the RGB input frame, and perform object tracking on persons.

### Similar samples:

 * [Object tracker on video](https://docs.luxonis.com/software/depthai/examples/object_tracker_video.md)
 * [Spatial object tracker on RGB](https://docs.luxonis.com/software/depthai/examples/spatial_object_tracker.md)

## Demo

## Setup

Please run the [install script](https://github.com/luxonis/depthai-python/blob/main/examples/install_requirements.py) to download
all required dependencies. Please note that this script must be ran from git context, so you have to download the
[depthai-python](https://github.com/luxonis/depthai-python) repository first and then run the script

```bash
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
```

For additional information, please follow the [installation guide](https://docs.luxonis.com/software/depthai/manual-install.md).

## Source code

#### Python

```python
#!/usr/bin/env python3

from pathlib import Path
import cv2
import depthai as dai
import numpy as np
import time
import argparse

labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
            "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]

nnPathDefault = str((Path(__file__).parent / Path('../models/mobilenet-ssd_openvino_2021.4_6shave.blob')).resolve().absolute())
parser = argparse.ArgumentParser()
parser.add_argument('nnPath', nargs='?', help="Path to mobilenet detection network blob", default=nnPathDefault)
parser.add_argument('-ff', '--full_frame', action="store_true", help="Perform tracking on full RGB frame", default=False)

args = parser.parse_args()

fullFrameTracking = args.full_frame

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
camRgb = pipeline.create(dai.node.ColorCamera)
detectionNetwork = pipeline.create(dai.node.MobileNetDetectionNetwork)
objectTracker = pipeline.create(dai.node.ObjectTracker)

xlinkOut = pipeline.create(dai.node.XLinkOut)
trackerOut = pipeline.create(dai.node.XLinkOut)
xinTrackerConfig = pipeline.create(dai.node.XLinkIn)

xlinkOut.setStreamName("preview")
trackerOut.setStreamName("tracklets")
xinTrackerConfig.setStreamName("trackerConfig")

# Properties
camRgb.setPreviewSize(300, 300)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setInterleaved(False)
camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
camRgb.setFps(40)

# testing MobileNet DetectionNetwork
detectionNetwork.setBlobPath(args.nnPath)
detectionNetwork.setConfidenceThreshold(0.5)
detectionNetwork.input.setBlocking(False)

objectTracker.setDetectionLabelsToTrack([15])  # track only person
# possible tracking types: ZERO_TERM_COLOR_HISTOGRAM, ZERO_TERM_IMAGELESS, SHORT_TERM_IMAGELESS, SHORT_TERM_KCF
objectTracker.setTrackerType(dai.TrackerType.ZERO_TERM_COLOR_HISTOGRAM)
# take the smallest ID when new object is tracked, possible options: SMALLEST_ID, UNIQUE_ID
objectTracker.setTrackerIdAssignmentPolicy(dai.TrackerIdAssignmentPolicy.SMALLEST_ID)

# Linking
camRgb.preview.link(detectionNetwork.input)
objectTracker.passthroughTrackerFrame.link(xlinkOut.input)

if fullFrameTracking:
    camRgb.video.link(objectTracker.inputTrackerFrame)
else:
    detectionNetwork.passthrough.link(objectTracker.inputTrackerFrame)

detectionNetwork.passthrough.link(objectTracker.inputDetectionFrame)
detectionNetwork.out.link(objectTracker.inputDetections)
objectTracker.out.link(trackerOut.input)

# set tracking parameters
objectTracker.setOcclusionRatioThreshold(0.4)
objectTracker.setTrackletMaxLifespan(120)
objectTracker.setTrackletBirthThreshold(3)

xinTrackerConfig.out.link(objectTracker.inputConfig)

# Connect to device and start pipeline
with dai.Device(pipeline) as device:

    preview = device.getOutputQueue("preview", 4, False)
    tracklets = device.getOutputQueue("tracklets", 4, False)
    trackerConfigQueue = device.getInputQueue("trackerConfig")

    startTime = time.monotonic()
    counter = 0
    fps = 0
    frame = None

    while(True):
        latestTrackedIds = []
        imgFrame = preview.get()
        track = tracklets.get()

        counter+=1
        current_time = time.monotonic()
        if (current_time - startTime) > 1 :
            fps = counter / (current_time - startTime)
            counter = 0
            startTime = current_time

        color = (255, 0, 0)
        frame = imgFrame.getCvFrame()
        trackletsData = track.tracklets
        for t in trackletsData:
            roi = t.roi.denormalize(frame.shape[1], frame.shape[0])
            x1 = int(roi.topLeft().x)
            y1 = int(roi.topLeft().y)
            x2 = int(roi.bottomRight().x)
            y2 = int(roi.bottomRight().y)

            try:
                label = labelMap[t.label]
            except:
                label = t.label

            cv2.putText(frame, str(label), (x1 + 10, y1 + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
            cv2.putText(frame, f"ID: {[t.id]}", (x1 + 10, y1 + 35), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
            cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
            cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)

            if t.status == dai.Tracklet.TrackingStatus.TRACKED:
                latestTrackedIds.append(t.id)

        cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color)

        cv2.imshow("tracker", frame)

        key = cv2.waitKey(1)
        if key == ord('q'):
            break
        elif key == ord('g'):
            # send tracker config to device
            config = dai.ObjectTrackerConfig()

            # take a random ID from the latest tracked IDs
            if len(latestTrackedIds) > 0:
                idToRemove = (np.random.choice(latestTrackedIds))
                print(f"Force removing ID: {idToRemove}")
                config.forceRemoveID(idToRemove)
                trackerConfigQueue.send(config)
            else:
                print("No tracked IDs available to force remove")
```

#### C++

```cpp
#include <chrono>
#include <iostream>

// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"

static const std::vector<std::string> labelMap = {"background", "aeroplane", "bicycle",     "bird",  "boat",        "bottle", "bus",
                                                  "car",        "cat",       "chair",       "cow",   "diningtable", "dog",    "horse",
                                                  "motorbike",  "person",    "pottedplant", "sheep", "sofa",        "train",  "tvmonitor"};

static std::atomic<bool> fullFrameTracking{false};

int main(int argc, char** argv) {
    using namespace std;
    using namespace std::chrono;
    std::string nnPath(BLOB_PATH);

    // If path to blob specified, use that
    if(argc > 1) {
        nnPath = std::string(argv[1]);
    }

    // Print which blob we are using
    printf("Using blob at path: %s\n", nnPath.c_str());

    // Create pipeline
    dai::Pipeline pipeline;

    // Define sources and outputs
    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto detectionNetwork = pipeline.create<dai::node::MobileNetDetectionNetwork>();
    auto objectTracker = pipeline.create<dai::node::ObjectTracker>();

    auto xlinkOut = pipeline.create<dai::node::XLinkOut>();
    auto trackerOut = pipeline.create<dai::node::XLinkOut>();

    xlinkOut->setStreamName("preview");
    trackerOut->setStreamName("tracklets");

    // Properties
    camRgb->setPreviewSize(300, 300);
    camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
    camRgb->setInterleaved(false);
    camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
    camRgb->setFps(40);

    // testing MobileNet DetectionNetwork
    detectionNetwork->setBlobPath(nnPath);
    detectionNetwork->setConfidenceThreshold(0.5f);
    detectionNetwork->input.setBlocking(false);

    objectTracker->setDetectionLabelsToTrack({15});  // track only person
    // possible tracking types: ZERO_TERM_COLOR_HISTOGRAM, ZERO_TERM_IMAGELESS, SHORT_TERM_IMAGELESS, SHORT_TERM_KCF
    objectTracker->setTrackerType(dai::TrackerType::ZERO_TERM_COLOR_HISTOGRAM);
    // take the smallest ID when new object is tracked, possible options: SMALLEST_ID, UNIQUE_ID
    objectTracker->setTrackerIdAssignmentPolicy(dai::TrackerIdAssignmentPolicy::SMALLEST_ID);

    // Linking
    camRgb->preview.link(detectionNetwork->input);
    objectTracker->passthroughTrackerFrame.link(xlinkOut->input);

    if(fullFrameTracking) {
        camRgb->video.link(objectTracker->inputTrackerFrame);
    } else {
        detectionNetwork->passthrough.link(objectTracker->inputTrackerFrame);
    }

    detectionNetwork->passthrough.link(objectTracker->inputDetectionFrame);
    detectionNetwork->out.link(objectTracker->inputDetections);
    objectTracker->out.link(trackerOut->input);

    // Connect to device and start pipeline
    dai::Device device(pipeline);

    auto preview = device.getOutputQueue("preview", 4, false);
    auto tracklets = device.getOutputQueue("tracklets", 4, false);

    auto startTime = steady_clock::now();
    int counter = 0;
    float fps = 0;

    while(true) {
        auto imgFrame = preview->get<dai::ImgFrame>();
        auto track = tracklets->get<dai::Tracklets>();

        counter++;
        auto currentTime = steady_clock::now();
        auto elapsed = duration_cast<duration<float>>(currentTime - startTime);
        if(elapsed > seconds(1)) {
            fps = counter / elapsed.count();
            counter = 0;
            startTime = currentTime;
        }

        auto color = cv::Scalar(255, 0, 0);
        cv::Mat frame = imgFrame->getCvFrame();
        auto trackletsData = track->tracklets;
        for(auto& t : trackletsData) {
            auto roi = t.roi.denormalize(frame.cols, frame.rows);
            int x1 = roi.topLeft().x;
            int y1 = roi.topLeft().y;
            int x2 = roi.bottomRight().x;
            int y2 = roi.bottomRight().y;

            uint32_t labelIndex = t.label;
            std::string labelStr = to_string(labelIndex);
            if(labelIndex < labelMap.size()) {
                labelStr = labelMap[labelIndex];
            }
            cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);

            std::stringstream idStr;
            idStr << "ID: " << t.id;
            cv::putText(frame, idStr.str(), cv::Point(x1 + 10, y1 + 40), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);
            std::stringstream statusStr;
            statusStr << "Status: " << t.status;
            cv::putText(frame, statusStr.str(), cv::Point(x1 + 10, y1 + 60), cv::FONT_HERSHEY_TRIPLEX, 0.5, color);

            cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
        }

        std::stringstream fpsStr;
        fpsStr << "NN fps:" << std::fixed << std::setprecision(2) << fps;
        cv::putText(frame, fpsStr.str(), cv::Point(2, imgFrame->getHeight() - 4), cv::FONT_HERSHEY_TRIPLEX, 0.4, color);

        cv::imshow("tracker", frame);

        int key = cv::waitKey(1);
        if(key == 'q' || key == 'Q') {
            return 0;
        }
    }
    return 0;
}
```

## Pipeline

### examples/object_tracker.pipeline.json

```json
{"pipeline": {"connections": [{"node1Id": 7, "node1Output": "passthroughTrackerFrame", "node1OutputGroup": "", "node2Id": 10, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 7, "node1Output": "out", "node1OutputGroup": "", "node2Id": 8, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 4, "node1Output": "out", "node1OutputGroup": "", "node2Id": 7, "node2Input": "inputDetections", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "passthrough", "node1OutputGroup": "", "node2Id": 7, "node2Input": "inputDetectionFrame", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "passthrough", "node1OutputGroup": "", "node2Id": 7, "node2Input": "inputTrackerFrame", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "passthrough", "node1OutputGroup": "", "node2Id": 3, "node2Input": "inputAlignTo", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "passthrough", "node1OutputGroup": "", "node2Id": 6, "node2Input": "imageIn", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "passthrough", "node1OutputGroup": "", "node2Id": 4, "node2Input": "inputImg", "node2InputGroup": ""}, {"node1Id": 5, "node1Output": "out", "node1OutputGroup": "", "node2Id": 6, "node2Input": "in", "node2InputGroup": ""}, {"node1Id": 6, "node1Output": "out", "node1OutputGroup": "", "node2Id": 4, "node2Input": "inputDetections", "node2InputGroup": ""}, {"node1Id": 3, "node1Output": "depth", "node1OutputGroup": "", "node2Id": 4, "node2Input": "inputDepth", "node2InputGroup": ""}, {"node1Id": 2, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 3, "node2Input": "right", "node2InputGroup": ""}, {"node1Id": 1, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 3, "node2Input": "left", "node2InputGroup": ""}, {"node1Id": 0, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 5, "node2Input": "in", "node2InputGroup": ""}], "globalProperties": {"calibData": null, "cameraTuningBlobSize": null, "cameraTuningBlobUri": "", "leonCssFrequencyHz": 700000000.0, "leonMssFrequencyHz": 700000000.0, "pipelineName": null, "pipelineVersion": null, "sippBufferSize": 18432, "sippDmaBufferSize": 16384, "xlinkChunkSize": -1}, "nodes": [[10, {"alias": "", "id": 10, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 50, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_7_passthroughTrackerFrame"}}], [8, {"alias": "", "id": 8, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 49, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_7_out"}}], [7, {"alias": "", "id": 7, "ioInfo": [[["", "passthroughDetectionFrame"], {"blocking": false, "group": "", "id": 47, "name": "passthroughDetectionFrame", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "passthroughDetections"], {"blocking": false, "group": "", "id": 48, "name": "passthroughDetections", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "passthroughTrackerFrame"], {"blocking": false, "group": "", "id": 46, "name": "passthroughTrackerFrame", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "out"], {"blocking": false, "group": "", "id": 45, "name": "out", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputDetectionFrame"], {"blocking": false, "group": "", "id": 43, "name": "inputDetectionFrame", "queueSize": 4, "type": 3, "waitForMessage": true}], [["", "inputDetections"], {"blocking": false, "group": "", "id": 44, "name": "inputDetections", "queueSize": 4, "type": 3, "waitForMessage": true}], [["", "inputTrackerFrame"], {"blocking": false, "group": "", "id": 42, "name": "inputTrackerFrame", "queueSize": 4, "type": 3, "waitForMessage": true}]], "logLevel": 3, "name": "ObjectTracker", "parentId": -1, "properties": {"detectionLabelsToTrack": [0], "maxObjectsToTrack": 60, "trackerIdAssignmentPolicy": 1, "trackerThreshold": 0.0, "trackerType": 6, "trackingPerClass": true}}], [6, {"alias": "detectionParser", "id": 6, "ioInfo": [[["", "out"], {"blocking": false, "group": "", "id": 41, "name": "out", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "imageIn"], {"blocking": false, "group": "", "id": 40, "name": "imageIn", "queueSize": 1, "type": 3, "waitForMessage": true}], [["", "in"], {"blocking": true, "group": "", "id": 39, "name": "in", "queueSize": 1, "type": 3, "waitForMessage": true}]], "logLevel": 3, "name": "DetectionParser", "parentId": 4, "properties": {"networkInputs": {"images": {"dataType": 1, "dims": [416, 416, 3, 1], "name": "images", "numDimensions": 4, "offset": 0, "order": 17185, "qpScale": 1.0, "qpZp": 0.0, "quantization": false, "strides": []}}, "numFramesPool": 8, "parser": {"anchorMasks": {}, "anchors": [], "anchorsV2": [], "classNames": ["person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"], "classes": 80, "confidenceThreshold": 0.5, "coordinates": 4, "iouThreshold": 0.5, "nnFamily": 0, "subtype": "yolov6"}}}], [5, {"alias": "neuralNetwork", "id": 5, "ioInfo": [[["", "passthrough"], {"blocking": false, "group": "", "id": 38, "name": "passthrough", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "out"], {"blocking": false, "group": "", "id": 37, "name": "out", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "in"], {"blocking": false, "group": "", "id": 36, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": true}]], "logLevel": 3, "name": "NeuralNetwork", "parentId": 4, "properties": {"backend": "", "backendProperties": {}, "blobSize": 8689834, "blobUri": "asset:__blob", "modelSource": 0, "modelUri": "", "numFrames": 8, "numNCEPerThread": 0, "numShavesPerThread": 0, "numThreads": 0}}], [4, {"alias": "", "id": 4, "ioInfo": [[["", "passthroughDepth"], {"blocking": false, "group": "", "id": 34, "name": "passthroughDepth", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "boundingBoxMapping"], {"blocking": false, "group": "", "id": 33, "name": "boundingBoxMapping", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "spatialLocationCalculatorOutput"], {"blocking": false, "group": "", "id": 35, "name": "spatialLocationCalculatorOutput", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "out"], {"blocking": false, "group": "", "id": 32, "name": "out", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputDetections"], {"blocking": true, "group": "", "id": 31, "name": "inputDetections", "queueSize": 1, "type": 3, "waitForMessage": true}], [["", "inputImg"], {"blocking": true, "group": "", "id": 30, "name": "inputImg", "queueSize": 2, "type": 3, "waitForMessage": true}], [["", "inputDepth"], {"blocking": false, "group": "", "id": 29, "name": "inputDepth", "queueSize": 4, "type": 3, "waitForMessage": true}]], "logLevel": 3, "name": "SpatialDetectionNetwork", "parentId": -1, "properties": {"calculationAlgorithm": 4, "depthThresholds": {"lowerThreshold": 100, "upperThreshold": 5000}, "detectedBBScaleFactor": 0.5}}], [3, {"alias": "", "id": 3, "ioInfo": [[["", "confidenceMap"], {"blocking": false, "group": "", "id": 28, "name": "confidenceMap", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispCostDump"], {"blocking": false, "group": "", "id": 27, "name": "debugDispCostDump", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugExtDispLrCheckIt2"], {"blocking": false, "group": "", "id": 26, "name": "debugExtDispLrCheckIt2", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispLrCheckIt2"], {"blocking": false, "group": "", "id": 24, "name": "debugDispLrCheckIt2", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugExtDispLrCheckIt1"], {"blocking": false, "group": "", "id": 25, "name": "debugExtDispLrCheckIt1", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "debugDispLrCheckIt1"], {"blocking": false, "group": "", "id": 23, "name": "debugDispLrCheckIt1", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "outConfig"], {"blocking": false, "group": "", "id": 22, "name": "outConfig", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "rectifiedRight"], {"blocking": false, "group": "", "id": 21, "name": "rectifiedRight", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "rectifiedLeft"], {"blocking": false, "group": "", "id": 20, "name": "rectifiedLeft", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "depth"], {"blocking": false, "group": "", "id": 16, "name": "depth", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "right"], {"blocking": true, "group": "", "id": 15, "name": "right", "queueSize": 3, "type": 3, "waitForMessage": false}], [["", "left"], {"blocking": true, "group": "", "id": 14, "name": "left", "queueSize": 3, "type": 3, "waitForMessage": false}], [["", "syncedRight"], {"blocking": false, "group": "", "id": 19, "name": "syncedRight", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "syncedLeft"], {"blocking": false, "group": "", "id": 18, "name": "syncedLeft", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputAlignTo"], {"blocking": false, "group": "", "id": 13, "name": "inputAlignTo", "queueSize": 1, "type": 3, "waitForMessage": true}], [["", "disparity"], {"blocking": false, "group": "", "id": 17, "name": "disparity", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "inputConfig"], {"blocking": true, "group": "", "id": 12, "name": "inputConfig", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "StereoDepth", "parentId": -1, "properties": {"alphaScaling": null, "baseline": null, "depthAlignCamera": -1, "depthAlignmentUseSpecTranslation": null, "disparityToDepthUseSpecTranslation": null, "enableFrameSync": true, "enableRectification": true, "enableRuntimeStereoModeSwitch": false, "focalLength": null, "focalLengthFromCalibration": true, "height": null, "initialConfig": {"algorithmControl": {"centerAlignmentShiftFactor": null, "customDepthUnitMultiplier": 1000.0, "depthAlign": 1, "depthUnit": 2, "disparityShift": 0, "enableExtended": false, "enableLeftRightCheck": true, "enableSubpixel": true, "enableSwLeftRightCheck": false, "leftRightCheckThreshold": 10, "numInvalidateEdgePixels": 0, "subpixelFractionalBits": 5}, "censusTransform": {"enableMeanMode": true, "kernelMask": 0, "kernelSize": -1, "noiseThresholdOffset": 1, "noiseThresholdScale": 1, "threshold": 0}, "confidenceMetrics": {"flatnessConfidenceThreshold": 2, "flatnessConfidenceWeight": 8, "flatnessOverride": false, "motionVectorConfidenceThreshold": 1, "motionVectorConfidenceWeight": 4, "occlusionConfidenceWeight": 20}, "costAggregation": {"divisionFactor": 1, "horizontalPenaltyCostP1": 250, "horizontalPenaltyCostP2": 500, "p1Config": {"defaultValue": 11, "edgeThreshold": 15, "edgeValue": 10, "enableAdaptive": true, "smoothThreshold": 5, "smoothValue": 22}, "p2Config": {"defaultValue": 33, "edgeValue": 22, "enableAdaptive": true, "smoothValue": 63}, "verticalPenaltyCostP1": 250, "verticalPenaltyCostP2": 500}, "costMatching": {"confidenceThreshold": 55, "disparityWidth": 1, "enableCompanding": false, "enableSwConfidenceThresholding": false, "invalidDisparityValue": 0, "linearEquationParameters": {"alpha": 0, "beta": 2, "threshold": 127}}, "filtersBackend": 2, "postProcessing": {"adaptiveMedianFilter": {"confidenceThreshold": 200, "enable": true}, "bilateralSigmaValue": 0, "brightnessFilter": {"maxBrightness": 256, "minBrightness": 0}, "decimationFilter": {"decimationFactor": 1, "decimationMode": 0}, "filteringOrder": [3, 1, 2, 4, 5], "holeFilling": {"enable": true, "fillConfidenceThreshold": 200, "highConfidenceThreshold": 210, "invalidateDisparities": true, "minValidDisparity": 1}, "median": 0, "spatialFilter": {"alpha": 0.5, "delta": 0, "enable": false, "holeFillingRadius": 2, "numIterations": 1}, "speckleFilter": {"differenceThreshold": 2, "enable": false, "speckleRange": 50}, "temporalFilter": {"alpha": 0.4000000059604645, "delta": 0, "enable": false, "persistencyMode": 3}, "thresholdFilter": {"maxRange": 65535, "minRange": 0}}}, "mesh": {"meshLeftUri": "", "meshRightUri": "", "meshSize": null, "stepHeight": 16, "stepWidth": 16}, "numFramesPool": 3, "numPostProcessingMemorySlices": -1, "numPostProcessingShaves": -1, "outHeight": null, "outKeepAspectRatio": true, "outWidth": null, "rectificationUseSpecTranslation": null, "rectifyEdgeFillColor": 0, "useHomographyRectification": null, "width": null}}], [2, {"alias": "", "id": 2, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 11, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 10, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 9, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 8, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 2, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 1734960488, "aeRegion": {"height": 16330, "priority": 2760884917, "width": 29552, "x": 26989, "y": 18030}, "afRegion": {"height": 26999, "priority": 3446174820, "width": 42240, "x": 31092, "y": 25968}, "antiBandingMode": 134, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 15, "brightness": 69, "captureIntent": 163, "chromaDenoise": 110, "cmdMask": 0, "contrast": 61, "controlMode": 102, "effectMode": 111, "enableHdr": false, "expCompensation": -54, "expManual": {"exposureTimeUs": 1182294381, "frameDurationUs": 2785018096, "sensitivityIso": 1103786864}, "frameSyncMode": 118, "lensPosAutoInfinity": 205, "lensPosAutoMacro": 11, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 97, "lowPowerNumFramesDiscard": 108, "lumaDenoise": 170, "miscControls": [], "saturation": -64, "sceneMode": 216, "sharpness": 0, "strobeConfig": {"activeLevel": 173, "enable": 134, "gpioNumber": 104}, "strobeTimings": {"durationUs": 2797757541, "exposureBeginOffsetUs": 1836216142, "exposureEndOffsetUs": 2053729377}, "wbColorTemp": 28018}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [640, 400]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}], [1, {"alias": "", "id": 1, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 7, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 6, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 5, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 4, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 1, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 537120116, "aeRegion": {"height": 28526, "priority": 1818324338, "width": 43520, "x": 17610, "y": 49227}, "afRegion": {"height": 26999, "priority": 3395843172, "width": 42434, "x": 31337, "y": 25701}, "antiBandingMode": 224, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 68, "brightness": 120, "captureIntent": 0, "chromaDenoise": 67, "cmdMask": 0, "contrast": 70, "controlMode": 161, "effectMode": 120, "enableHdr": false, "expCompensation": 97, "expManual": {"exposureTimeUs": 1684372073, "frameDurationUs": 1952999273, "sensitivityIso": 1701357250}, "frameSyncMode": 202, "lensPosAutoInfinity": 114, "lensPosAutoMacro": 109, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 166, "lowPowerNumFramesDiscard": 109, "lumaDenoise": 202, "miscControls": [], "saturation": 112, "sceneMode": 161, "sharpness": 115, "strobeConfig": {"activeLevel": 0, "enable": 0, "gpioNumber": 0}, "strobeTimings": {"durationUs": 1751607653, "exposureBeginOffsetUs": 51833, "exposureEndOffsetUs": 1755709440}, "wbColorTemp": 39577}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [640, 400]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}], [0, {"alias": "", "id": 0, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 3, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 2, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 1, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 0, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 0, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 100663297, "aeRegion": {"height": 0, "priority": 1195433728, "width": 0, "x": 4163, "y": 1}, "afRegion": {"height": 0, "priority": 0, "width": 0, "x": 24767, "y": 0}, "antiBandingMode": 64, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 72, "brightness": -41, "captureIntent": 71, "chromaDenoise": 0, "cmdMask": 0, "contrast": -40, "controlMode": 191, "effectMode": 96, "enableHdr": false, "expCompensation": -55, "expManual": {"exposureTimeUs": 24767, "frameDurationUs": 24767, "sensitivityIso": 1195433836}, "frameSyncMode": 0, "lensPosAutoInfinity": 128, "lensPosAutoMacro": 223, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 0, "lowPowerNumFramesDiscard": 0, "lumaDenoise": 0, "miscControls": [], "saturation": -117, "sceneMode": 223, "sharpness": 0, "strobeConfig": {"activeLevel": 168, "enable": 0, "gpioNumber": -27}, "strobeTimings": {"durationUs": 20, "exposureBeginOffsetUs": 24767, "exposureEndOffsetUs": 17}, "wbColorTemp": 0}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [416, 416]}}, "type": 8}], "resolutionHeight": -1, "resolutionWidth": -1}}]]}}
```

### Need assistance?

Head over to [Discussion Forum](https://discuss.luxonis.com/) for technical support or any other questions you might have.
