# Host Display

This example demonstrates how to use a HostDisplay node within a DepthAI pipeline to display frames obtained from the device using
OpenCV. The HostDisplay node is a custom host node that receives image frames from the DepthAI pipeline and displays them on the
host machine using OpenCV's imshow function.

The examples serves as an alternative to manually calling .get() on pipeline queues to retrieve frames and display them using
OpenCV. Instead, the HostDisplay node handles the frame retrieval and display process, making it easier to visualize the output of
the pipeline. It is the most basic example of a custom host node.

This example requires the DepthAI v3 API, see [installation instructions](https://docs.luxonis.com/software-v3/depthai.md).

## Pipeline

### examples/display.pipeline.json

```json
{"pipeline": {"connections": [{"node1Id": 0, "node1Output": "0", "node1OutputGroup": "dynamicOutputs", "node2Id": 3, "node2Input": "in", "node2InputGroup": ""}], "globalProperties": {"calibData": null, "cameraTuningBlobSize": null, "cameraTuningBlobUri": "", "leonCssFrequencyHz": 700000000.0, "leonMssFrequencyHz": 700000000.0, "pipelineName": null, "pipelineVersion": null, "sippBufferSize": 18432, "sippDmaBufferSize": 16384, "xlinkChunkSize": -1}, "nodes": [[3, {"alias": "", "id": 3, "ioInfo": [[["", "in"], {"blocking": true, "group": "", "id": 4, "name": "in", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "XLinkOut", "parentId": -1, "properties": {"maxFpsLimit": -1.0, "metadataOnly": false, "streamName": "__x_0_dynamicOutputs_0"}}], [0, {"alias": "", "id": 0, "ioInfo": [[["dynamicOutputs", "0"], {"blocking": false, "group": "dynamicOutputs", "id": 3, "name": "0", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "raw"], {"blocking": false, "group": "", "id": 2, "name": "raw", "queueSize": 8, "type": 0, "waitForMessage": false}], [["", "mockIsp"], {"blocking": true, "group": "", "id": 1, "name": "mockIsp", "queueSize": 8, "type": 3, "waitForMessage": false}], [["", "inputControl"], {"blocking": true, "group": "", "id": 0, "name": "inputControl", "queueSize": 3, "type": 3, "waitForMessage": false}]], "logLevel": 3, "name": "Camera", "parentId": -1, "properties": {"boardSocket": 0, "cameraName": "", "fps": -1.0, "imageOrientation": -1, "initialControl": {"aeLockMode": false, "aeMaxExposureTimeUs": 0, "aeRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "afRegion": {"height": 0, "priority": 0, "width": 0, "x": 0, "y": 0}, "antiBandingMode": 0, "autoFocusMode": 3, "awbLockMode": false, "awbMode": 0, "brightness": 0, "captureIntent": 0, "chromaDenoise": 0, "cmdMask": 0, "contrast": 0, "controlMode": 0, "effectMode": 0, "enableHdr": false, "expCompensation": 0, "expManual": {"exposureTimeUs": 0, "frameDurationUs": 0, "sensitivityIso": 0}, "frameSyncMode": 0, "lensPosAutoInfinity": 0, "lensPosAutoMacro": 0, "lensPosition": 0, "lensPositionRaw": 0.0, "lowPowerNumFramesBurst": 0, "lowPowerNumFramesDiscard": 0, "lumaDenoise": 0, "miscControls": [], "saturation": 0, "sceneMode": 0, "sharpness": 0, "strobeConfig": {"activeLevel": 0, "enable": 0, "gpioNumber": 0}, "strobeTimings": {"durationUs": 0, "exposureBeginOffsetUs": 0, "exposureEndOffsetUs": 0}, "wbColorTemp": 0}, "isp3aFps": 0, "mockIspHeight": -1, "mockIspWidth": -1, "numFramesPoolIsp": 3, "numFramesPoolPreview": 4, "numFramesPoolRaw": 3, "numFramesPoolStill": 4, "numFramesPoolVideo": 4, "outputRequests": [{"enableUndistortion": null, "fps": {"value": null}, "resizeMode": 0, "size": {"value": {"index": 0, "value": [300, 300]}}, "type": null}], "resolutionHeight": -1, "resolutionWidth": -1}}]]}}
```

## Source Code

#### Python

```python
import depthai as dai
import cv2

class HostDisplay(dai.node.HostNode):
    def build(self, frameOutput: dai.Node.Output):
        self.link_args(frameOutput) # Has to match the inputs to the `process` method

        # This sends all the processing to the pipeline where it's executed by the `pipeline.runTasks()` or implicitly by `pipeline.run()` method.
        # It's needed as the GUI window needs to be updated in the main thread, and the `process` method is by default called in a separate thread.
        self.sendProcessingToPipeline(True)
        return self

    def process(self, message: dai.ImgFrame):
        cv2.imshow("HostDisplay", message.getCvFrame())
        key = cv2.waitKey(1)
        if key == ord('q'):
            print("Detected 'q' - stopping the pipeline...")
            self.stopPipeline()

# with dai.Pipeline() as p:
p = dai.Pipeline()
with p:
    camera = p.create(dai.node.Camera).build()
    hostDisplay = p.create(HostDisplay).build(camera.requestOutput((300, 300)))

    p.run() # Will block until the pipeline is stopped by someone else (in this case it's the display node)
```

#### C++

```cpp
#include <atomic>
#include <csignal>
#include <iostream>
#include <memory>
#include <opencv2/opencv.hpp>

#include "depthai/depthai.hpp"

std::atomic<bool> quitEvent(false);

void signalHandler(int) {
    quitEvent = true;
}

// Custom host node for display
class HostDisplay : public dai::node::CustomNode<HostDisplay> {
   public:
    HostDisplay() {
        sendProcessingToPipeline(true);
    }

    std::shared_ptr<dai::Buffer> processGroup(std::shared_ptr<dai::MessageGroup> message) override {
        if(quitEvent) {
            stopPipeline();
            return nullptr;
        }
        if(message == nullptr) return nullptr;

        auto frame = message->get<dai::ImgFrame>("frame");
        if(frame == nullptr) return nullptr;

        cv::imshow("HostDisplay", frame->getCvFrame());
        int key = cv::waitKey(1);
        if(key == 'q') {
            std::cout << "Detected 'q' - stopping the pipeline..." << std::endl;
            stopPipeline();
        }

        return nullptr;
    }
};

int main() {
    signal(SIGTERM, signalHandler);
    signal(SIGINT, signalHandler);

    // Create device
    std::shared_ptr<dai::Device> device = std::make_shared<dai::Device>();

    // Create pipeline
    dai::Pipeline pipeline(device);

    // Create nodes
    auto camera = pipeline.create<dai::node::Camera>()->build();
    auto output = camera->requestOutput(std::make_pair(300, 300));

    // Create display node
    auto display = pipeline.create<HostDisplay>();
    output->link(display->inputs["frame"]);

    // Start pipeline
    pipeline.run();

    return 0;
}
```

### Need assistance?

Head over to [Discussion Forum](https://discuss.luxonis.com/) for technical support or any other questions you might have.
