ON THIS PAGE

  • Thermal People Detection
  • Source code
  • Pipeline

Thermal People Detection

This example uses a thermal sensor, found on OAK Thermal camera, to detect people using a neural network. The detected people are then displayed on the thermal camera frames.This example requires the DepthAI v3 API, see installation instructions.

Source code

Python
C++

Python

Python
GitHub
1#!/usr/bin/env python3
2
3import depthai as dai
4import cv2
5from pathlib import Path
6import numpy as np
7import sys
8
9
10nnPath = str((Path(__file__).parent / Path('../models/yolov6n_thermal_people_256x192_openvino_2022.1_6shave.blob')).resolve().absolute())
11if len(sys.argv) > 1:
12    nnPath = sys.argv[1]
13
14if not Path(nnPath).exists():
15    import sys
16    raise FileNotFoundError(f'Required file/s not found, please run "{sys.executable} install_requirements.py"')
17
18labels = ["person"]
19
20device = dai.Device()
21
22pipeline = dai.Pipeline()
23nnet = pipeline.create(dai.node.YoloDetectionNetwork)
24nnet.setBlobPath(nnPath)
25nnet.setConfidenceThreshold(0.5)
26nnet.setNumClasses(1)
27nnet.setCoordinateSize(4)
28nnet.setIouThreshold(0.4)
29
30thermalCam = pipeline.create(dai.node.Camera)
31thermalCam.setBoardSocket(dai.CameraBoardSocket.CAM_E)
32thermalCam.setPreviewSize(256, 192)
33
34thermalCam.raw.link(nnet.input)
35
36rawOut = pipeline.createXLinkOut()
37rawOut.setStreamName("preview")
38thermalCam.preview.link(rawOut.input)
39
40xoutNn = pipeline.createXLinkOut()
41xoutNn.setStreamName("nn")
42nnet.out.link(xoutNn.input)
43
44xoutPass = pipeline.createXLinkOut()
45xoutPass.setStreamName("pass")
46nnet.passthrough.link(xoutPass.input)
47
48device.startPipeline(pipeline)
49
50qNn = device.getOutputQueue(name="nn", maxSize=2, blocking=False)
51qPass = device.getOutputQueue(name="pass", maxSize=2, blocking=False)
52qPreview = device.getOutputQueue(name="preview", maxSize=2, blocking=False)
53
54cv2.namedWindow("nnet", cv2.WINDOW_NORMAL)
55cv2.namedWindow("raw", cv2.WINDOW_NORMAL)
56cv2.resizeWindow("nnet", 640, 480)
57cv2.resizeWindow("raw", 640, 480)
58
59while True:
60    inNn = qNn.get()
61    inPass = qPass.tryGet()
62    inPreview = qPreview.get()
63    if inNn and inPass:
64        frame = inPass.getCvFrame().astype(np.float32)
65        min_, max_ = frame.min(), frame.max()
66        colormappedFrame = cv2.normalize(frame, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
67        colormappedFrame = cv2.applyColorMap(colormappedFrame, cv2.COLORMAP_MAGMA)
68
69        detections = inNn.detections
70        for detection in detections:
71            xmin = max(0.0, detection.xmin)
72            ymin = max(0.0, detection.ymin)
73            xmax = min(1.0, detection.xmax)
74            ymax = min(1.0, detection.ymax)
75            pt1 = int(xmin * 256), int(ymin * 192)
76            pt2 = int(xmax * 256), int(ymax * 192)
77            cv2.rectangle(colormappedFrame, pt1, pt2, (0, 255, 0))
78            cv2.putText(colormappedFrame, labels[detection.label], pt1, cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2, cv2.LINE_AA)
79        cv2.imshow("nnet", colormappedFrame)
80    if inPreview:
81        cv2.imshow("raw", inPreview.getCvFrame())
82
83    if cv2.waitKey(1) == ord("q"):
84        break

Pipeline

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.