luxonis / depthai-core

DepthAI C++ Library
MIT License
238 stars 128 forks source link

[BUG] Fatal error. Please report to developers. Log: 'PlgSrcMipi' '1100' when using setStartStreaming #671

Open chengguizi opened 1 year ago

chengguizi commented 1 year ago

Describe the bug

run into a reproducible issue of [3.3.4] [14.951] [system] [critical] Fatal error. Please report to developers. Log: 'PlgSrcMipi' '1100'

This occurs when I am trying to use a setup that includes AR0234 sensor, hardware sync and toggling setStopStreaming / setStartStreaming

The issue is not present when only OV928x sensors are present.

Minimal Reproducible Example

The link to the python code is here

Here is the reproducible code from my end.

Additional context I am using a FFC type of board:

chengguizi commented 1 year ago

Code here

#!/usr/bin/env python3

import cv2
import depthai as dai

# error occurs if AR0234 is enabled
enable_color = True
enable_left = True

enable_fsin = True
fps = 30
exposure = 5000

# Create pipeline
pipeline = dai.Pipeline()

if enable_left:

    xoutLeft = pipeline.create(dai.node.XLinkOut)
    xoutLeft.setStreamName('left')

    monoLeft = pipeline.create(dai.node.MonoCamera)
    monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_800_P)
    monoLeft.out.link(xoutLeft.input)

    monoLeft.setBoardSocket(dai.CameraBoardSocket.CAM_B)
    if enable_fsin:
        monoLeft.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)
    monoLeft.setFps(fps)
    monoLeft.initialControl.setManualExposure(exposure, 100)
    monoLeft.initialControl.setBrightness(10)

if enable_color:
    camRgb = pipeline.create(dai.node.ColorCamera)
    xoutVideo = pipeline.create(dai.node.XLinkOut)

    xoutVideo.setStreamName("video")

    # Properties
    camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_D)
    camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1200_P)
    camRgb.setVideoSize(1920, 1200)
    # camRgb.initialControl.setAutoExposureEnable()
    camRgb.initialControl.setManualExposure(exposure, 100)

    if enable_fsin:
        camRgb.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)

    camRgb.setFps(fps)

    xoutVideo.input.setBlocking(False)
    xoutVideo.input.setQueueSize(1)

    # convert to monochrome
    # manip = pipeline.create(dai.node.ImageManip)
    # manip.initialConfig.setFrameType(dai.ImgFrame.Type.RAW8)
    # camRgb.video.link(manip.inputImage)
    # manip.out.link(xoutVideo.input)
    # manip.setMaxOutputFrameSize(camRgb.getVideoWidth() * camRgb.getVideoHeight() * 3)

    camRgb.video.link(xoutVideo.input)

# Define sources and outputs

xoutRight = pipeline.create(dai.node.XLinkOut)
xoutRight.setStreamName('right')

monoRight = pipeline.create(dai.node.MonoCamera)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_800_P)
monoRight.out.link(xoutRight.input)

# Properties

monoRight.setBoardSocket(dai.CameraBoardSocket.CAM_C)
if enable_fsin:
    monoRight.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.OUTPUT)
monoRight.setFps(fps)

monoRight.initialControl.setManualExposure(exposure, 200)

# Camera Controls

controlInAll = pipeline.create(dai.node.XLinkIn)
controlInAll.setStreamName('control_all')

controlInAll.out.link(monoRight.inputControl)
if enable_left:
    controlInAll.out.link(monoLeft.inputControl)
if enable_color:
    controlInAll.out.link(camRgb.inputControl)

# GPIO workaround

script = pipeline.create(dai.node.Script)
script.setProcessor(dai.ProcessorType.LEON_CSS)
script.setScript("""
    import GPIO
    GPIO.setup(46, GPIO.IN)
""")

config = dai.Device.Config()
# set as output just in case, as input doesn't work yet...
config.board.gpio[46] = dai.BoardConfig.GPIO(dai.BoardConfig.GPIO.OUTPUT,
                                            dai.BoardConfig.GPIO.Level.LOW)

streaming = True

with dai.Device(config) as device:
    device.startPipeline(pipeline)

    if enable_color:
        video = device.getOutputQueue(name="video", maxSize=4, blocking=False)

    if enable_left:
        qLeft = device.getOutputQueue(name="left", maxSize=4, blocking=False)
    qRight = device.getOutputQueue(name="right", maxSize=4, blocking=False)

    controlQueue = device.getInputQueue(controlInAll.getStreamName())

    while True:

        if enable_left:
            inLeft = qLeft.tryGet()

        if enable_color:
            videoIn = video.tryGet()
        inRight = qRight.tryGet()

        # Get BGR frame from NV12 encoded video frame to show with opencv
        # Visualizing the frame on slower hosts might have overhead

        if enable_left and inLeft is not None:
            cv2.imshow("left", inLeft.getCvFrame())
            # print(f"left {inLeft.getTimestamp().total_seconds()}")

        if enable_color and videoIn is not None:
            cv2.imshow("video", videoIn.getCvFrame())
            # print(f"video {videoIn.getTimestamp().total_seconds()}")

        if inRight is not None:
            cv2.imshow("right", inRight.getCvFrame())
            # print(f"right {inRight.getTimestamp().total_seconds()}")

        key = cv2.waitKey(1)
        if  key == ord(' '):
            if streaming:
                ctrl = dai.CameraControl()
                ctrl.setStopStreaming()
                controlQueue.send(ctrl)
                streaming = False
                print(f"stopped streaming")
            else:
                ctrl = dai.CameraControl()
                ctrl.setStartStreaming()
                controlQueue.send(ctrl)
                streaming = True
                print(f"started streaming")

        if key == ord('q'):
            break