DepthAI Tutorials
DepthAI API References

ON THIS PAGE

  • Frame syncing on OAK
  • Demo
  • Setup
  • Source code

Frame syncing on OAK

This example showcases how you can use Script node to perform Message syncing of multiple streams. Example uses ImgFrame's timestamps to achieve syncing precision.Similar syncing demo scripts (python) can be found at our depthai-experiments repository in gen2-syncing folder.

Demo

Terminal log after about 13 minutes. Color and disparity streams are perfectly in-sync.
Command Line
1[1662574807.8811488] Stream rgb, timestamp: 7:26:21.601595, sequence number: 21852
2[1662574807.8821492] Stream disp, timestamp: 7:26:21.601401, sequence number: 21852
3
4[1662574807.913144] Stream rgb, timestamp: 7:26:21.634982, sequence number: 21853
5[1662574807.9141443] Stream disp, timestamp: 7:26:21.634730, sequence number: 21853
6
7[1662574807.9451444] Stream rgb, timestamp: 7:26:21.668243, sequence number: 21854
8[1662574807.946151] Stream disp, timestamp: 7:26:21.668057, sequence number: 21854

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
Command Line
1git clone https://github.com/luxonis/depthai-python.git
2cd depthai-python/examples
3python3 install_requirements.py
For additional information, please follow the installation guide.

Source code

Python
C++
Python
GitHub
1import depthai as dai
2import time
3
4FPS = 30
5
6pipeline = dai.Pipeline()
7
8# Define a source - color camera
9camRgb = pipeline.create(dai.node.ColorCamera)
10# Since we are saving RGB frames in Script node we need to make the
11# video pool size larger, otherwise the pipeline will freeze because
12# the ColorCamera won't be able to produce new video frames.
13camRgb.setVideoNumFramesPool(10)
14camRgb.setFps(FPS)
15
16left = pipeline.create(dai.node.MonoCamera)
17left.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
18left.setCamera("left")
19left.setFps(FPS)
20
21right = pipeline.create(dai.node.MonoCamera)
22right.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
23right.setCamera("right")
24right.setFps(FPS)
25
26stereo = pipeline.createStereoDepth()
27stereo.initialConfig.setMedianFilter(dai.MedianFilter.KERNEL_7x7)
28stereo.setLeftRightCheck(True)
29stereo.setExtendedDisparity(False)
30stereo.setSubpixel(False)
31left.out.link(stereo.left)
32right.out.link(stereo.right)
33
34# Script node will sync high-res frames
35script = pipeline.create(dai.node.Script)
36
37# Send both streams to the Script node so we can sync them
38stereo.disparity.link(script.inputs["disp_in"])
39camRgb.video.link(script.inputs["rgb_in"])
40
41script.setScript("""
42    FPS=30
43    import time
44    from datetime import timedelta
45    import math
46
47    # Timestamp threshold (in miliseconds) under which frames will be considered synced.
48    # Lower number means frames will have less delay between them, which can potentially
49    # lead to dropped frames.
50    MS_THRESHOL=math.ceil(500 / FPS)
51
52    def check_sync(queues, timestamp):
53        matching_frames = []
54        for name, list in queues.items(): # Go through each available stream
55            # node.warn(f"List {name}, len {str(len(list))}")
56            for i, msg in enumerate(list): # Go through each frame of this stream
57                time_diff = abs(msg.getTimestamp() - timestamp)
58                if time_diff <= timedelta(milliseconds=MS_THRESHOL): # If time diff is below threshold, this frame is considered in-sync
59                    matching_frames.append(i) # Append the position of the synced frame, so we can later remove all older frames
60                    break
61
62        if len(matching_frames) == len(queues):
63            # We have all frames synced. Remove the excess ones
64            i = 0
65            for name, list in queues.items():
66                queues[name] = queues[name][matching_frames[i]:] # Remove older (excess) frames
67                i+=1
68            return True
69        else:
70            return False # We don't have synced frames yet
71
72    names = ['disp', 'rgb']
73    frames = dict() # Dict where we store all received frames
74    for name in names:
75        frames[name] = []
76
77    while True:
78        for name in names:
79            f = node.io[name+"_in"].tryGet()
80            if f is not None:
81                frames[name].append(f) # Save received frame
82
83                if check_sync(frames, f.getTimestamp()): # Check if we have any synced frames
84                    # Frames synced!
85                    node.info(f"Synced frame!")
86                    # node.warn(f"Queue size. Disp: {len(frames['disp'])}, rgb: {len(frames['rgb'])}")
87                    for name, list in frames.items():
88                        syncedF = list.pop(0) # We have removed older (excess) frames, so at positions 0 in dict we have synced frames
89                        node.info(f"{name}, ts: {str(syncedF.getTimestamp())}, seq {str(syncedF.getSequenceNum())}")
90                        node.io[name+'_out'].send(syncedF) # Send synced frames to the host
91
92
93        time.sleep(0.001)  # Avoid lazy looping
94""")
95
96script_out = ['disp', 'rgb']
97
98for name in script_out: # Create XLinkOut for disp/rgb streams
99    xout = pipeline.create(dai.node.XLinkOut)
100    xout.setStreamName(name)
101    script.outputs[name+'_out'].link(xout.input)
102
103with dai.Device(pipeline) as device:
104    device.setLogLevel(dai.LogLevel.INFO)
105    device.setLogOutputLevel(dai.LogLevel.INFO)
106    names = ['rgb', 'disp']
107    queues = [device.getOutputQueue(name) for name in names]
108
109    while True:
110        for q in queues:
111            img: dai.ImgFrame = q.get()
112            # Display timestamp/sequence number of two synced frames
113            print(f"Time: {time.time()}. Stream {q.getName()}, timestamp: {img.getTimestamp()}, sequence number: {img.getSequenceNum()}")

Need assistance?

Head over to Discussion Forum for technical support or any other questions you might have.