# Object Tracker Remap

This example demonstrates running YOLOv6-nano detection with the
[ObjectTracker](https://docs.luxonis.com/software-v3/depthai/depthai-components/nodes/object_tracker.md) node and remapping
bounding boxes onto a colorized depth frame using transformation metadata, ensuring accurate alignment across streams (RGB ↔
Depth).

## Demo

This example requires the DepthAI v3 API, see [installation instructions](https://docs.luxonis.com/software-v3/depthai.md).

## Pipeline

### examples/object_tracker_remap(daiv3).pipeline.json

```json
{
  "pipeline": {
    "connections": [
      {
        "node1Id": 0,
        "node1Output": "0",
        "node1OutputGroup": "dynamicOutputs",
        "node2Id": 2,
        "node2Input": "in",
        "node2InputGroup": ""
      },
      {
        "node1Id": 2,
        "node1Output": "out",
        "node1OutputGroup": "",
        "node2Id": 3,
        "node2Input": "in",
        "node2InputGroup": ""
      },
      {
        "node1Id": 2,
        "node1Output": "passthrough",
        "node1OutputGroup": "",
        "node2Id": 4,
        "node2Input": "inputDetectionFrame",
        "node2InputGroup": ""
      },
      {
        "node1Id": 2,
        "node1Output": "passthrough",
        "node1OutputGroup": "",
        "node2Id": 4,
        "node2Input": "inputTrackerFrame",
        "node2InputGroup": ""
      },
      {
        "node1Id": 3,
        "node1Output": "out",
        "node1OutputGroup": "",
        "node2Id": 4,
        "node2Input": "inputDetections",
        "node2InputGroup": ""
      },
      {
        "node1Id": 5,
        "node1Output": "0",
        "node1OutputGroup": "dynamicOutputs",
        "node2Id": 7,
        "node2Input": "left",
        "node2InputGroup": ""
      },
      {
        "node1Id": 6,
        "node1Output": "0",
        "node1OutputGroup": "dynamicOutputs",
        "node2Id": 7,
        "node2Input": "right",
        "node2InputGroup": ""
      }
    ],
    "globalProperties": {
      "calibData": null,
      "cameraTuningBlobSize": null,
      "cameraTuningBlobUri": "",
      "eepromId": 0,
      "leonCssFrequencyHz": 700000000.0,
      "leonMssFrequencyHz": 700000000.0,
      "pipelineName": null,
      "pipelineVersion": null,
      "sippBufferSize": 18432,
      "sippDmaBufferSize": 16384,
      "xlinkChunkSize": -1
    },
    "nodes": [
      [
        0,
        {
          "alias": "",
          "id": 0,
          "ioInfo": [
            [
              [
                "",
                "inputControl"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 0,
                "name": "inputControl",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "dynamicOutputs",
                "0"
              ],
              {
                "blocking": false,
                "group": "dynamicOutputs",
                "id": 3,
                "name": "0",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "mockIsp"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 1,
                "name": "mockIsp",
                "queueSize": 8,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "raw"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 2,
                "name": "raw",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "Camera",
          "parentId": -1,
          "properties": {
            "boardSocket": 0,
            "cameraName": "",
            "fps": -1.0,
            "imageOrientation": -1,
            "initialControl": {
              "aeLockMode": false,
              "aeMaxExposureTimeUs": 0,
              "aeRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "afRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "antiBandingMode": 0,
              "autoFocusMode": 3,
              "awbLockMode": false,
              "awbMode": 0,
              "brightness": 0,
              "captureIntent": 0,
              "chromaDenoise": 0,
              "cmdMask": 0,
              "contrast": 0,
              "controlMode": 0,
              "effectMode": 0,
              "enableHdr": false,
              "expCompensation": 0,
              "expManual": {
                "exposureTimeUs": 0,
                "frameDurationUs": 0,
                "sensitivityIso": 0
              },
              "frameSyncMode": 0,
              "lensPosAutoInfinity": 0,
              "lensPosAutoMacro": 0,
              "lensPosition": 0,
              "lensPositionRaw": 0.0,
              "lowPowerNumFramesBurst": 0,
              "lowPowerNumFramesDiscard": 0,
              "lumaDenoise": 0,
              "miscControls": [],
              "saturation": 0,
              "sceneMode": 0,
              "sharpness": 0,
              "strobeConfig": {
                "activeLevel": 0,
                "enable": 0,
                "gpioNumber": 0
              },
              "strobeTimings": {
                "durationUs": 0,
                "exposureBeginOffsetUs": 0,
                "exposureEndOffsetUs": 0
              },
              "wbColorTemp": 0
            },
            "isp3aFps": 0,
            "mockIspHeight": -1,
            "mockIspWidth": -1,
            "numFramesPoolIsp": 3,
            "numFramesPoolPreview": 4,
            "numFramesPoolRaw": 3,
            "numFramesPoolStill": 4,
            "numFramesPoolVideo": 4,
            "outputRequests": [
              {
                "enableUndistortion": null,
                "fps": {
                  "value": null
                },
                "resizeMode": 0,
                "size": {
                  "value": {
                    "index": 0,
                    "value": [
                      512,
                      288
                    ]
                  }
                },
                "type": 10
              }
            ],
            "resolutionHeight": -1,
            "resolutionWidth": -1,
            "sensorType": -1
          }
        }
      ],
      [
        2,
        {
          "alias": "neuralNetwork",
          "id": 2,
          "ioInfo": [
            [
              [
                "",
                "in"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 4,
                "name": "in",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "out"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 5,
                "name": "out",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "passthrough"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 6,
                "name": "passthrough",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "NeuralNetwork",
          "parentId": 1,
          "properties": {
            "backend": "",
            "backendProperties": {},
            "blobSize": null,
            "blobUri": "",
            "modelSource": 1,
            "modelUri": "asset:__model",
            "numFrames": 8,
            "numNCEPerThread": 0,
            "numShavesPerThread": 0,
            "numThreads": 0
          }
        }
      ],
      [
        3,
        {
          "alias": "detectionParser",
          "id": 3,
          "ioInfo": [
            [
              [
                "",
                "in"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 7,
                "name": "in",
                "queueSize": 1,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "out"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 8,
                "name": "out",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "DetectionParser",
          "parentId": 1,
          "properties": {
            "networkInputs": {},
            "numFramesPool": 8,
            "parser": {
              "anchorMasks": {},
              "anchors": [],
              "anchorsV2": [],
              "classNames": [
                "person",
                "bicycle",
                "car",
                "motorcycle",
                "airplane",
                "bus",
                "train",
                "truck",
                "boat",
                "traffic light",
                "fire hydrant",
                "stop sign",
                "parking meter",
                "bench",
                "bird",
                "cat",
                "dog",
                "horse",
                "sheep",
                "cow",
                "elephant",
                "bear",
                "zebra",
                "giraffe",
                "backpack",
                "umbrella",
                "handbag",
                "tie",
                "suitcase",
                "frisbee",
                "skis",
                "snowboard",
                "sports ball",
                "kite",
                "baseball bat",
                "baseball glove",
                "skateboard",
                "surfboard",
                "tennis racket",
                "bottle",
                "wine glass",
                "cup",
                "fork",
                "knife",
                "spoon",
                "bowl",
                "banana",
                "apple",
                "sandwich",
                "orange",
                "broccoli",
                "carrot",
                "hot dog",
                "pizza",
                "donut",
                "cake",
                "chair",
                "couch",
                "potted plant",
                "bed",
                "dining table",
                "toilet",
                "tv",
                "laptop",
                "mouse",
                "remote",
                "keyboard",
                "cell phone",
                "microwave",
                "oven",
                "toaster",
                "sink",
                "refrigerator",
                "book",
                "clock",
                "vase",
                "scissors",
                "teddy bear",
                "hair drier",
                "toothbrush"
              ],
              "classes": 80,
              "confidenceThreshold": 0.5,
              "coordinates": 4,
              "iouThreshold": 0.5,
              "nnFamily": 0,
              "subtype": "yolov6r2"
            }
          }
        }
      ],
      [
        4,
        {
          "alias": "",
          "id": 4,
          "ioInfo": [
            [
              [
                "",
                "inputTrackerFrame"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 9,
                "name": "inputTrackerFrame",
                "queueSize": 4,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "inputDetectionFrame"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 10,
                "name": "inputDetectionFrame",
                "queueSize": 4,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "inputDetections"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 11,
                "name": "inputDetections",
                "queueSize": 4,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "inputConfig"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 12,
                "name": "inputConfig",
                "queueSize": 4,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "passthroughDetections"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 16,
                "name": "passthroughDetections",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "out"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 13,
                "name": "out",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "passthroughTrackerFrame"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 14,
                "name": "passthroughTrackerFrame",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "passthroughDetectionFrame"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 15,
                "name": "passthroughDetectionFrame",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "ObjectTracker",
          "parentId": -1,
          "properties": {
            "detectionLabelsToTrack": [],
            "maxObjectsToTrack": 60,
            "occlusionRatioThreshold": 0.20000000298023224,
            "trackerIdAssignmentPolicy": 0,
            "trackerThreshold": 0.0,
            "trackerType": 3,
            "trackingPerClass": true,
            "trackletBirthThreshold": 3,
            "trackletMaxLifespan": 120
          }
        }
      ],
      [
        5,
        {
          "alias": "",
          "id": 5,
          "ioInfo": [
            [
              [
                "",
                "inputControl"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 17,
                "name": "inputControl",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "dynamicOutputs",
                "0"
              ],
              {
                "blocking": false,
                "group": "dynamicOutputs",
                "id": 20,
                "name": "0",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "mockIsp"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 18,
                "name": "mockIsp",
                "queueSize": 8,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "raw"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 19,
                "name": "raw",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "Camera",
          "parentId": -1,
          "properties": {
            "boardSocket": 1,
            "cameraName": "",
            "fps": -1.0,
            "imageOrientation": -1,
            "initialControl": {
              "aeLockMode": false,
              "aeMaxExposureTimeUs": 0,
              "aeRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "afRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "antiBandingMode": 0,
              "autoFocusMode": 3,
              "awbLockMode": false,
              "awbMode": 0,
              "brightness": 0,
              "captureIntent": 0,
              "chromaDenoise": 0,
              "cmdMask": 0,
              "contrast": 0,
              "controlMode": 0,
              "effectMode": 0,
              "enableHdr": false,
              "expCompensation": 0,
              "expManual": {
                "exposureTimeUs": 0,
                "frameDurationUs": 0,
                "sensitivityIso": 0
              },
              "frameSyncMode": 0,
              "lensPosAutoInfinity": 0,
              "lensPosAutoMacro": 0,
              "lensPosition": 0,
              "lensPositionRaw": 0.0,
              "lowPowerNumFramesBurst": 0,
              "lowPowerNumFramesDiscard": 0,
              "lumaDenoise": 0,
              "miscControls": [],
              "saturation": 0,
              "sceneMode": 0,
              "sharpness": 0,
              "strobeConfig": {
                "activeLevel": 0,
                "enable": 0,
                "gpioNumber": 0
              },
              "strobeTimings": {
                "durationUs": 0,
                "exposureBeginOffsetUs": 0,
                "exposureEndOffsetUs": 0
              },
              "wbColorTemp": 0
            },
            "isp3aFps": 0,
            "mockIspHeight": -1,
            "mockIspWidth": -1,
            "numFramesPoolIsp": 3,
            "numFramesPoolPreview": 4,
            "numFramesPoolRaw": 3,
            "numFramesPoolStill": 4,
            "numFramesPoolVideo": 4,
            "outputRequests": [
              {
                "enableUndistortion": null,
                "fps": {
                  "value": null
                },
                "resizeMode": 0,
                "size": {
                  "value": {
                    "index": 0,
                    "value": [
                      1280,
                      720
                    ]
                  }
                },
                "type": 22
              }
            ],
            "resolutionHeight": -1,
            "resolutionWidth": -1,
            "sensorType": -1
          }
        }
      ],
      [
        6,
        {
          "alias": "",
          "id": 6,
          "ioInfo": [
            [
              [
                "",
                "inputControl"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 21,
                "name": "inputControl",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "dynamicOutputs",
                "0"
              ],
              {
                "blocking": false,
                "group": "dynamicOutputs",
                "id": 24,
                "name": "0",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "mockIsp"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 22,
                "name": "mockIsp",
                "queueSize": 8,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "raw"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 23,
                "name": "raw",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "Camera",
          "parentId": -1,
          "properties": {
            "boardSocket": 2,
            "cameraName": "",
            "fps": -1.0,
            "imageOrientation": -1,
            "initialControl": {
              "aeLockMode": false,
              "aeMaxExposureTimeUs": 0,
              "aeRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "afRegion": {
                "height": 0,
                "priority": 0,
                "width": 0,
                "x": 0,
                "y": 0
              },
              "antiBandingMode": 0,
              "autoFocusMode": 3,
              "awbLockMode": false,
              "awbMode": 0,
              "brightness": 0,
              "captureIntent": 0,
              "chromaDenoise": 0,
              "cmdMask": 0,
              "contrast": 0,
              "controlMode": 0,
              "effectMode": 0,
              "enableHdr": false,
              "expCompensation": 0,
              "expManual": {
                "exposureTimeUs": 0,
                "frameDurationUs": 0,
                "sensitivityIso": 0
              },
              "frameSyncMode": 0,
              "lensPosAutoInfinity": 0,
              "lensPosAutoMacro": 0,
              "lensPosition": 0,
              "lensPositionRaw": 0.0,
              "lowPowerNumFramesBurst": 0,
              "lowPowerNumFramesDiscard": 0,
              "lumaDenoise": 0,
              "miscControls": [],
              "saturation": 0,
              "sceneMode": 0,
              "sharpness": 0,
              "strobeConfig": {
                "activeLevel": 0,
                "enable": 0,
                "gpioNumber": 0
              },
              "strobeTimings": {
                "durationUs": 0,
                "exposureBeginOffsetUs": 0,
                "exposureEndOffsetUs": 0
              },
              "wbColorTemp": 0
            },
            "isp3aFps": 0,
            "mockIspHeight": -1,
            "mockIspWidth": -1,
            "numFramesPoolIsp": 3,
            "numFramesPoolPreview": 4,
            "numFramesPoolRaw": 3,
            "numFramesPoolStill": 4,
            "numFramesPoolVideo": 4,
            "outputRequests": [
              {
                "enableUndistortion": null,
                "fps": {
                  "value": null
                },
                "resizeMode": 0,
                "size": {
                  "value": {
                    "index": 0,
                    "value": [
                      1280,
                      720
                    ]
                  }
                },
                "type": 22
              }
            ],
            "resolutionHeight": -1,
            "resolutionWidth": -1,
            "sensorType": -1
          }
        }
      ],
      [
        7,
        {
          "alias": "",
          "id": 7,
          "ioInfo": [
            [
              [
                "",
                "inputAlignTo"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 26,
                "name": "inputAlignTo",
                "queueSize": 1,
                "type": 3,
                "waitForMessage": true
              }
            ],
            [
              [
                "",
                "inputConfig"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 25,
                "name": "inputConfig",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "left"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 27,
                "name": "left",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "debugExtDispLrCheckIt1"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 38,
                "name": "debugExtDispLrCheckIt1",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "right"
              ],
              {
                "blocking": true,
                "group": "",
                "id": 28,
                "name": "right",
                "queueSize": 3,
                "type": 3,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "depth"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 29,
                "name": "depth",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "disparity"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 30,
                "name": "disparity",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "syncedLeft"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 31,
                "name": "syncedLeft",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "syncedRight"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 32,
                "name": "syncedRight",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "rectifiedLeft"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 33,
                "name": "rectifiedLeft",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "rectifiedRight"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 34,
                "name": "rectifiedRight",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "confidenceMap"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 41,
                "name": "confidenceMap",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "outConfig"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 35,
                "name": "outConfig",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "debugDispLrCheckIt1"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 36,
                "name": "debugDispLrCheckIt1",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "debugDispLrCheckIt2"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 37,
                "name": "debugDispLrCheckIt2",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "debugDispCostDump"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 40,
                "name": "debugDispCostDump",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ],
            [
              [
                "",
                "debugExtDispLrCheckIt2"
              ],
              {
                "blocking": false,
                "group": "",
                "id": 39,
                "name": "debugExtDispLrCheckIt2",
                "queueSize": 8,
                "type": 0,
                "waitForMessage": false
              }
            ]
          ],
          "logLevel": 3,
          "name": "StereoDepth",
          "parentId": -1,
          "properties": {
            "alphaScaling": null,
            "baseline": null,
            "depthAlignCamera": -1,
            "depthAlignmentUseSpecTranslation": null,
            "disparityToDepthUseSpecTranslation": null,
            "enableFrameSync": true,
            "enableRectification": true,
            "enableRuntimeStereoModeSwitch": false,
            "focalLength": null,
            "focalLengthFromCalibration": true,
            "height": null,
            "initialConfig": {
              "algorithmControl": {
                "centerAlignmentShiftFactor": null,
                "customDepthUnitMultiplier": 1000.0,
                "depthAlign": 1,
                "depthUnit": 2,
                "disparityShift": 0,
                "enableExtended": true,
                "enableLeftRightCheck": true,
                "enableSubpixel": true,
                "enableSwLeftRightCheck": false,
                "leftRightCheckThreshold": 10,
                "numInvalidateEdgePixels": 0,
                "subpixelFractionalBits": 5
              },
              "censusTransform": {
                "enableMeanMode": true,
                "kernelMask": 0,
                "kernelSize": -1,
                "noiseThresholdOffset": 1,
                "noiseThresholdScale": 1,
                "threshold": 0
              },
              "confidenceMetrics": {
                "flatnessConfidenceThreshold": 2,
                "flatnessConfidenceWeight": 8,
                "flatnessOverride": false,
                "motionVectorConfidenceThreshold": 1,
                "motionVectorConfidenceWeight": 4,
                "occlusionConfidenceWeight": 20
              },
              "costAggregation": {
                "divisionFactor": 1,
                "horizontalPenaltyCostP1": 250,
                "horizontalPenaltyCostP2": 500,
                "p1Config": {
                  "defaultValue": 11,
                  "edgeThreshold": 15,
                  "edgeValue": 10,
                  "enableAdaptive": true,
                  "smoothThreshold": 5,
                  "smoothValue": 22
                },
                "p2Config": {
                  "defaultValue": 33,
                  "edgeValue": 22,
                  "enableAdaptive": true,
                  "smoothValue": 63
                },
                "verticalPenaltyCostP1": 250,
                "verticalPenaltyCostP2": 500
              },
              "costMatching": {
                "confidenceThreshold": 55,
                "disparityWidth": 1,
                "enableCompanding": false,
                "enableSwConfidenceThresholding": false,
                "invalidDisparityValue": 0,
                "linearEquationParameters": {
                  "alpha": 0,
                  "beta": 2,
                  "threshold": 127
                }
              },
              "filtersBackend": 2,
              "postProcessing": {
                "adaptiveMedianFilter": {
                  "confidenceThreshold": 200,
                  "enable": true
                },
                "bilateralSigmaValue": 0,
                "brightnessFilter": {
                  "maxBrightness": 256,
                  "minBrightness": 0
                },
                "decimationFilter": {
                  "decimationFactor": 1,
                  "decimationMode": 0
                },
                "filteringOrder": [
                  3,
                  1,
                  2,
                  4,
                  5
                ],
                "holeFilling": {
                  "enable": true,
                  "fillConfidenceThreshold": 200,
                  "highConfidenceThreshold": 210,
                  "invalidateDisparities": true,
                  "minValidDisparity": 1
                },
                "median": 0,
                "spatialFilter": {
                  "alpha": 0.5,
                  "delta": 3,
                  "enable": false,
                  "holeFillingRadius": 2,
                  "numIterations": 1
                },
                "speckleFilter": {
                  "differenceThreshold": 2,
                  "enable": false,
                  "speckleRange": 50
                },
                "temporalFilter": {
                  "alpha": 0.4000000059604645,
                  "delta": 3,
                  "enable": false,
                  "persistencyMode": 3
                },
                "thresholdFilter": {
                  "maxRange": 65535,
                  "minRange": 0
                }
              }
            },
            "mesh": {
              "meshLeftUri": "",
              "meshRightUri": "",
              "meshSize": null,
              "stepHeight": 16,
              "stepWidth": 16
            },
            "numFramesPool": 3,
            "numPostProcessingMemorySlices": -1,
            "numPostProcessingShaves": -1,
            "outHeight": null,
            "outKeepAspectRatio": true,
            "outWidth": null,
            "rectificationUseSpecTranslation": null,
            "rectifyEdgeFillColor": 0,
            "useHomographyRectification": null,
            "width": null
          }
        }
      ]
    ]
  }
}
```

## Source code

#### Python

```python
#!/usr/bin/env python3

import cv2
import depthai as dai
import numpy as np

def colorizeDepth(frameDepth):
    invalidMask = frameDepth == 0
    # Log the depth, minDepth and maxDepth
    try:
        minDepth = np.percentile(frameDepth[frameDepth != 0], 3)
        maxDepth = np.percentile(frameDepth[frameDepth != 0], 95)
        logDepth = np.zeros_like(frameDepth, dtype=np.float32)
        np.log(frameDepth, where=frameDepth != 0, out=logDepth)
        logMinDepth = np.log(minDepth)
        logMaxDepth = np.log(maxDepth)
        np.nan_to_num(logDepth, copy=False, nan=logMinDepth)
        # Clip the values to be in the 0-255 range
        logDepth = np.clip(logDepth, logMinDepth, logMaxDepth)

        # Interpolate only valid logDepth values, setting the rest based on the mask
        depthFrameColor = np.interp(logDepth, (logMinDepth, logMaxDepth), (0, 255))
        depthFrameColor = np.nan_to_num(depthFrameColor)
        depthFrameColor = depthFrameColor.astype(np.uint8)
        depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_JET)
        # Set invalid depth pixels to black
        depthFrameColor[invalidMask] = 0
    except IndexError:
        # Frame is likely empty
        depthFrameColor = np.zeros((frameDepth.shape[0], frameDepth.shape[1], 3), dtype=np.uint8)
    except Exception as e:
        raise e
    return depthFrameColor

# Create pipeline
with dai.Pipeline() as pipeline:
    cameraNode = pipeline.create(dai.node.Camera).build()
    detectionNetwork = pipeline.create(dai.node.DetectionNetwork).build(cameraNode, dai.NNModelDescription("yolov6-nano"))
    objectTracker = pipeline.create(dai.node.ObjectTracker)
    labelMap = detectionNetwork.getClasses()
    monoLeft = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_B)
    monoRight = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_C)
    stereo = pipeline.create(dai.node.StereoDepth)

    # Linking
    monoLeftOut = monoLeft.requestOutput((1280, 720))
    monoRightOut = monoRight.requestOutput((1280, 720))
    monoLeftOut.link(stereo.left)
    monoRightOut.link(stereo.right)

    detectionNetwork.out.link(objectTracker.inputDetections)
    detectionNetwork.passthrough.link(objectTracker.inputDetectionFrame)
    detectionNetwork.passthrough.link(objectTracker.inputTrackerFrame)

    stereo.setRectification(True)
    stereo.setExtendedDisparity(True)
    stereo.setLeftRightCheck(True)
    stereo.setSubpixel(True)

    qRgb = detectionNetwork.passthrough.createOutputQueue()
    qTrack = objectTracker.out.createOutputQueue()
    qDepth = stereo.disparity.createOutputQueue()

    pipeline.start()

    def displayFrame(name: str, frame: dai.ImgFrame, tracklets: dai.Tracklets):
        color = (0, 255, 0)
        assert tracklets.getTransformation() is not None
        cvFrame = frame.getFrame() if frame.getType() == dai.ImgFrame.Type.RAW16 else frame.getCvFrame()
        if(frame.getType() == dai.ImgFrame.Type.RAW16):
            cvFrame = colorizeDepth(cvFrame)
        for tracklet in tracklets.tracklets:
            # Get the shape of the frame from which the detections originated for denormalization
            normShape = tracklets.getTransformation().getSize()

            # Create rotated rectangle to remap
            # Here we use an intermediate dai.Rect to create a dai.RotatedRect to simplify construction and denormalization
            rotRect = dai.RotatedRect(tracklet.roi.denormalize(normShape[0], normShape[1]), 0)
            # Remap the detection rectangle to target frame
            remapped = tracklets.getTransformation().remapRectTo(frame.getTransformation(), rotRect)
            # Remapped rectangle could be rotated, so we get the bounding box
            bbox = [int(l) for l in remapped.getOuterRect()]
            cv2.putText(
                cvFrame,
                labelMap[tracklet.label],
                (bbox[0] + 10, bbox[1] + 20),
                cv2.FONT_HERSHEY_TRIPLEX,
                0.5,
                255,
            )
            cv2.putText(
                cvFrame,
                f"{int(tracklet.srcImgDetection.confidence * 100)}%",
                (bbox[0] + 10, bbox[1] + 40),
                cv2.FONT_HERSHEY_TRIPLEX,
                0.5,
                255,
            )
            cv2.rectangle(cvFrame, (bbox[0], bbox[1]), (bbox[2], bbox[3]), color, 2)
        # Show the frame
        cv2.imshow(name, cvFrame)

    while pipeline.isRunning():
        inRgb: dai.ImgFrame = qRgb.get()
        inTrack: dai.Tracklets = qTrack.get()
        inDepth: dai.ImgFrame = qDepth.get()
        hasRgb = inRgb is not None
        hasDepth = inDepth is not None
        hasTrack = inTrack is not None
        if hasRgb:
            displayFrame("rgb", inRgb, inTrack)
        if hasDepth:
            displayFrame("depth", inDepth, inTrack)
        if cv2.waitKey(1) == ord("q"):
            pipeline.stop()
            break
```

#### C++

```cpp
#include <algorithm>  // Required for std::sort and std::unique
#include <cmath>      // Required for std::log, std::isnan, std::isinf
#include <iostream>
#include <opencv2/opencv.hpp>
#include <string>
#include <vector>

#include "depthai/depthai.hpp"
#include "xtensor/containers/xadapt.hpp"
#include "xtensor/core/xmath.hpp"

cv::Mat colorizeDepth(cv::Mat frameDepth) {
    cv::Mat invalidMask = frameDepth == 0;
    cv::Mat depthFrameColor;

    try {
        cv::Mat frameDepthFloat;
        frameDepth.convertTo(frameDepthFloat, CV_32F);
        xt::xtensor<float, 2> depth =
            xt::adapt((float*)frameDepthFloat.data, {static_cast<size_t>(frameDepthFloat.rows), static_cast<size_t>(frameDepthFloat.cols)});

        // Get valid depth values (non-zero)
        std::vector<float> validDepth;
        validDepth.reserve(depth.size());
        std::copy_if(depth.begin(), depth.end(), std::back_inserter(validDepth), [](float x) { return x != 0; });

        if(validDepth.size() == 0) {
            return cv::Mat::zeros(frameDepth.rows, frameDepth.cols, CV_8UC3);
        }

        // Calculate percentiles
        std::sort(validDepth.begin(), validDepth.end());
        float minDepth = validDepth[static_cast<size_t>(validDepth.size() * 0.03)];
        float maxDepth = validDepth[static_cast<size_t>(validDepth.size() * 0.95)];

        // Take log of depth values
        auto logDepth = xt::eval(xt::log(depth));
        float logMinDepth = std::log(minDepth);
        float logMaxDepth = std::log(maxDepth);

        // Replace invalid values with logMinDepth using a naive implementation
        auto logDepthData = logDepth.data();
        auto depthData = depth.data();
        const size_t size = depth.size();
        for(size_t i = 0; i < size; i++) {
            if(std::isnan(logDepthData[i]) || std::isinf(logDepthData[i]) || depthData[i] == 0.0f) {
                logDepthData[i] = logMinDepth;
            }
        }

        // Clip values
        logDepth = xt::clip(logDepth, logMinDepth, logMaxDepth);

        // Normalize to 0-255 range
        auto normalizedDepth = (logDepth - logMinDepth) / (logMaxDepth - logMinDepth) * 255.0f;

        // Convert to CV_8UC1
        cv::Mat depthMat(frameDepth.rows, frameDepth.cols, CV_8UC1);
        std::transform(normalizedDepth.begin(), normalizedDepth.end(), depthMat.data, [](float x) { return static_cast<uchar>(x); });

        // Apply colormap
        cv::applyColorMap(depthMat, depthFrameColor, cv::COLORMAP_JET);

        // Set invalid pixels to black
        depthFrameColor.setTo(cv::Scalar(0, 0, 0), invalidMask);

    } catch(const std::exception& e) {
        std::cerr << "Error in colorizeDepth: " << e.what() << std::endl;
        return cv::Mat::zeros(frameDepth.rows, frameDepth.cols, CV_8UC3);
    }

    return depthFrameColor;
}

// Helper function to display frames with detections
void displayFrame(const std::string& name,
                  std::shared_ptr<dai::ImgFrame> frame,
                  std::shared_ptr<dai::Tracklets> tracklets,
                  const std::vector<std::string>& labelMap) {
    cv::Scalar color(0, 255, 0);
    cv::Mat cvFrame;

    if(frame->getType() == dai::ImgFrame::Type::RAW16) {
        cvFrame = colorizeDepth(frame->getFrame());
    } else {
        cvFrame = frame->getCvFrame();
    }

    if(!tracklets) {
        // std::cout << "No detections or transformation data for " << name << std::endl;
        cv::imshow(name, cvFrame);
        return;
    }

    const auto& sourceTransform = tracklets->transformation;
    const auto& targetTransform = frame->transformation;

    for(const auto& tracklet : tracklets->tracklets) {
        auto normShape = sourceTransform.getSize();

        dai::Rect rect = tracklet.roi;
        rect = rect.denormalize(static_cast<float>(normShape.first), static_cast<float>(normShape.second));
        dai::RotatedRect rotRect(rect, 0);

        auto remapped = sourceTransform.remapRectTo(targetTransform, rotRect);
        auto bbox = remapped.getOuterRect();

        cv::putText(cvFrame,
                    labelMap[tracklet.label],
                    cv::Point(static_cast<int>(bbox[0]) + 10, static_cast<int>(bbox[1]) + 20),
                    cv::FONT_HERSHEY_TRIPLEX,
                    0.5,
                    cv::Scalar(255, 255, 255));
        cv::putText(cvFrame,
                    std::to_string(static_cast<int>(tracklet.srcImgDetection.confidence * 100)) + "%",
                    cv::Point(static_cast<int>(bbox[0]) + 10, static_cast<int>(bbox[1]) + 40),
                    cv::FONT_HERSHEY_TRIPLEX,
                    0.5,
                    cv::Scalar(255, 255, 255));
        cv::rectangle(cvFrame,
                      cv::Point(static_cast<int>(bbox[0]), static_cast<int>(bbox[1])),
                      cv::Point(static_cast<int>(bbox[2]), static_cast<int>(bbox[3])),
                      color,
                      2);
    }
    cv::imshow(name, cvFrame);
}

int main() {
    dai::Pipeline pipeline;

    auto cameraNode = pipeline.create<dai::node::Camera>();
    cameraNode->build();

    auto detectionNetwork = pipeline.create<dai::node::DetectionNetwork>();
    dai::NNModelDescription modelDescription;
    modelDescription.model = "yolov6-nano";
    detectionNetwork->build(cameraNode, modelDescription);
    auto objectTracker = pipeline.create<dai::node::ObjectTracker>();
    auto labelMap = detectionNetwork->getClasses().value_or(std::vector<std::string>{});

    auto monoLeft = pipeline.create<dai::node::Camera>();
    monoLeft->build(dai::CameraBoardSocket::CAM_B);
    auto monoRight = pipeline.create<dai::node::Camera>();
    monoRight->build(dai::CameraBoardSocket::CAM_C);
    auto stereo = pipeline.create<dai::node::StereoDepth>();

    // Linking
    auto monoLeftOut = monoLeft->requestOutput(std::make_pair(1280, 720));
    auto monoRightOut = monoRight->requestOutput(std::make_pair(1280, 720));
    monoLeftOut->link(stereo->left);
    monoRightOut->link(stereo->right);

    detectionNetwork->out.link(objectTracker->inputDetections);
    detectionNetwork->passthrough.link(objectTracker->inputDetectionFrame);
    detectionNetwork->passthrough.link(objectTracker->inputTrackerFrame);

    stereo->setRectification(true);
    stereo->setExtendedDisparity(true);
    stereo->setLeftRightCheck(true);
    stereo->setSubpixel(true);

    auto qRgb = detectionNetwork->passthrough.createOutputQueue();
    auto qTrack = objectTracker->out.createOutputQueue();
    auto qDepth = stereo->disparity.createOutputQueue();

    pipeline.start();

    while(pipeline.isRunning()) {
        auto inRgb = qRgb->tryGet<dai::ImgFrame>();
        auto inTrack = qTrack->tryGet<dai::Tracklets>();
        auto inDepth = qDepth->tryGet<dai::ImgFrame>();

        bool hasRgb = inRgb != nullptr;
        bool hasDepth = inDepth != nullptr;
        bool hasTrack = inTrack != nullptr;

        if(hasRgb && hasTrack) {
            displayFrame("rgb", inRgb, inTrack, labelMap);
        }
        if(hasDepth && hasTrack) {
            displayFrame("depth", inDepth, inTrack, labelMap);
        }

        if(cv::waitKey(1) == 'q') {
            pipeline.stop();
            break;
        }
    }

    return 0;
}
```

### Need assistance?

Head over to [Discussion Forum](https://discuss.luxonis.com/) for technical support or any other questions you might have.
