awslabs / amazon-kinesis-video-streams-producer-c

https://awslabs.github.io/amazon-kinesis-video-streams-producer-c/group__PublicMemberFunctions.html
Apache License 2.0
54 stars 72 forks source link

[QUESTION]AivHeap.c:666: splitFreeBlock: Assertion `0' failed. #373

Closed fugudong closed 10 months ago

fugudong commented 1 year ago

I have a case, I will push video streams to aws by api putKinesisVideoFrame in the same streamHandle. That is push stream for a while, then stop pushing stream, repeat the process for too many times, then the kvs sdk will produce the error log, AmbaKvsServer_FrameShare: /home/gbfeng/workspace/Code_NB4G_GitLab/sdk8/ambalink/output/ambalink/build/amba_kvs-1.0/third_party/amazon-kinesis-video-streams-producer-c-1.3.0/dependency/libkvspic/kvspic-src/src/heap/src/AivHeap.c:666: splitFreeBlock: Assertion `0' failed.

Can you give me some advice, thanks.

disa6302 commented 1 year ago

@fugudong ,

Are you using the sample provided in this repository or are there any changes from your end?

fugudong commented 1 year ago

@disa6302 We make many changes about sample code, we create three kinesis stream, and every stream will push video and audio。Please refer to following code.

UINT32 amazon_kvs_stream_start(int streamId, CHAR streamName, VIDEO_CODEC_ID videoCodecId)
{
    STREAM_HANDLE streamHandle = INVALID_STREAM_HANDLE_VALUE;
    UINT32 streamHandleId = INVALID_STREAM_HANDLE_ID;
    STATUS retStatus = STATUS_SUCCESS;
    PTrackInfo pAudioTrack = NULL;
    BYTE alawAudioCpd[KVS_PCM_CPD_SIZE_BYTE];
    CHAR streamName[MAX_STREAM_NAME_LEN];
    int i, retry = 0;
    for(i = 0; i < MAX_STREAM_NUM; i++)
    {
        if(data[i].streamHandle == INVALID_STREAM_HANDLE_VALUE)
        {
            data[i].startTime = GETTIME();
            data[i].firstFrame = TRUE;

            CHK_STATUS(createRealtimeAudioVideoStreamInfoProviderWithCodecs(streamName, RETENTION_PERIOD(bootstrap->kinesis->retention_period_hours), BUFFER_DURATION(DEFAULT_BUFFER_DURATION_S), videoCodecId, AUDIO_CODEC_ID_AAC, &pStreamInfo[i]));
            // You can use createRealtimeAudioVideoStreamInfoProvider for H.264 and AAC as it uses them by default
            // To specify PCM/G.711 use createRealtimeAudioVideoStreamInfoProviderWithCodecs
            // adjust members of pStreamInfo here if needed

            // set up audio cpd.
            pAudioTrack = pStreamInfo[i]->streamCaps.trackInfoList[0].trackId == DEFAULT_AUDIO_TRACK_ID ? &pStreamInfo[i]->streamCaps.trackInfoList[0]
                                                                                         : &pStreamInfo[i]->streamCaps.trackInfoList[1];
            data[i].aacAudioCpd = (BYTE *)MEMCALLOC(1, KVS_AAC_CPD_SIZE_BYTE * sizeof(BYTE));
            CHK(data[i].aacAudioCpd != NULL, STATUS_NOT_ENOUGH_MEMORY);
            pAudioTrack->codecPrivateData = data[i].aacAudioCpd;
            pAudioTrack->codecPrivateDataSize = KVS_AAC_CPD_SIZE_BYTE;
            CHK_STATUS(mkvgenGenerateAacCpd(AAC_LC, AAC_AUDIO_TRACK_SAMPLING_RATE, AAC_AUDIO_TRACK_CHANNEL_CONFIG, pAudioTrack->codecPrivateData,
                                pAudioTrack->codecPrivateDataSize));
            // use relative time mode. Buffer timestamps start from 0
            pStreamInfo[i]->streamCaps.absoluteFragmentTimes = FALSE;

            CHK_STATUS(createKinesisVideoStreamSync(clientHandle, pStreamInfo[i], &streamHandle));

            data[i].audioIndex = 0;
            data[i].videoIndex = 0;
            data[i].audioTs = 0;
            data[i].videoTs = 0;
            data[i].streamStartTime = 0;
            ATOMIC_STORE_BOOL(&data[i].firstVideoFramePut, FALSE);

            data[i].streamHandle = streamHandle;

            kvs_info("%s: success streamHandleId:[%d] streamHandle:[%lld]\n", __FUNCTION__, i+1, data[i].streamHandle);

            streamHandleId = (UINT32)(i + 1);

            break;
        }
    }

    if(i == MAX_STREAM_NUM)
        kvs_warn("streamHandle is full.\n");

    return streamHandleId;

CleanUp:
    kvs_err("Failed with status 0x%08x\n", retStatus);

    if(data[i].streamHandle != INVALID_STREAM_HANDLE_VALUE)
    {
        freeKinesisVideoStream(&data[i].streamHandle);
    }
    if(pStreamInfo[i] != NULL)
    {
        freeStreamInfoProvider(&pStreamInfo[i]);
        pStreamInfo[i] = NULL;
    }
    MEMSET(&data[i], 0x00, SIZEOF(SampleCustomData));

    return streamHandleId;
}

INT32 amazon_kvs_video_frame_send(UINT32 streamHandleId, UINT8 *frameData, UINT32 frameSize, UINT32 frameType, UINT32 duration)
{
    INT32 rval = AWS_KVS_SUCCESS;
    Frame frame;
    STATUS status;
    UINT64 runningTime;
    DOUBLE startUpLatency;
    UINT32 streamHandleIndex = streamHandleId - 1;
    CHAR *persistentMetadataName = "gps";
    CHAR *persistentMetadataValue;
    RT_FRAMESHARE_KVS_METADATA_ARG_s pMetaData;
    int stream_id = -1;

    pthread_mutex_lock(&mutexPutFrame);

    if(data[streamHandleIndex].streamStartTime == 0)
        data[streamHandleIndex].streamStartTime = defaultGetTime();

    frame.frameData = frameData;
    frame.version = FRAME_CURRENT_VERSION;
    frame.trackId = DEFAULT_VIDEO_TRACK_ID;
    frame.duration = duration * HUNDREDS_OF_NANOS_IN_A_MICROSECOND;
    frame.decodingTs = data[streamHandleIndex].videoTs * HUNDREDS_OF_NANOS_IN_A_MICROSECOND;
    frame.presentationTs = frame.decodingTs;
    frame.index = data[streamHandleIndex].videoIndex;
    frame.flags = frameType == 1 ? FRAME_FLAG_KEY_FRAME : FRAME_FLAG_NONE;
    frame.size = frameSize;

    data[streamHandleIndex].videoIndex++;
    data[streamHandleIndex].videoTs += duration;

    if(frame.flags == FRAME_FLAG_KEY_FRAME) {
        stream_id = kvs_stream_handle_to_stream_id(streamHandleId);
        AmbaFrameShareKvs_GetMetaData(stream_id, &pMetaData);
        if(pMetaData.gps_data_len) {
            persistentMetadataValue = (CHAR *)MEMCALLOC(1, pMetaData.gps_data_len);
            MEMCPY(persistentMetadataValue, pMetaData.gps_data, pMetaData.gps_data_len);
            putKinesisVideoFragmentMetadata(data[streamHandleIndex].streamHandle, persistentMetadataName, persistentMetadataValue, TRUE);
            MEMFREE(persistentMetadataValue);
        }
    }
    status = putKinesisVideoFrame(data[streamHandleIndex].streamHandle, &frame);
    ATOMIC_STORE_BOOL(&data[streamHandleIndex].firstVideoFramePut, TRUE);
    pthread_mutex_unlock(&mutexPutFrame);

CleanUp:
    return rval;
}
niyatim23 commented 1 year ago

@fugudong, can you please share verbose SDK logs with this failure?

fugudong commented 1 year ago

@niyatim23 In our project, we will firstly create a stream by function amazon_kvs_stream_start, then we can get one kvs sream handle. With this stream handle, we will push video and audio data to amazon server by function amazon_kvs_video_frame_send and amazon_kvs_audio_frame_send. Usually, we will only push stream for a while, after a while, we will continue pushing stream to amazon server. The amazon_kvs_audio_frame_send code is as following code, the link has log https://gitlab.com/ambafgd/kvs-bug-list/-/tree/main/kvs%20start%20stop%20cause%20aiv%20heap%20assertion, by log file"kvs start stop aiv heap assertion verbose log 20130815", you can see our project setting about kvs sdk, by log "kvs start stop aiv heap assertion info log 20130815", you can see the assertion about AivHeap.c:830: addAllocatedBlock. ` INT32 amazon_kvs_audio_frame_send(UINT32 streamHandleId, UINT8 *frameData, UINT32 frameSize, UINT32 frameIndex, UINT32 duration, UINT64 ts) { INT32 rval = AWS_KVS_SUCCESS;

if !KVS_VIDEO_ONLY_MODE

Frame frame;
STATUS status;
UINT64 runningTime;
UINT32 streamHandleIndex = streamHandleId - 1;

if(get_init_state() == INIT_STATE_NONE || get_init_state() == INIT_STATE_FAILED)
{
    kvs_err("amazon kvs is not initialized\n");
    rval = AWS_KVS_FAILURE;
    goto CleanUp;
}

if(streamHandleIndex >= MAX_STREAM_NUM)
{
    kvs_err("streamHandleId %d [1 ~ %d]\n", streamHandleId, MAX_STREAM_NUM);
    rval = AWS_KVS_FAILURE;
    goto CleanUp;
}
if(data[streamHandleIndex].isStreamStart == 0) {
    rval = AWS_KVS_FAILURE;
    goto CleanUp;
}
if(data[streamHandleIndex].streamHandle == INVALID_STREAM_HANDLE_VALUE)
{
    rval = AWS_KVS_FAILURE;
    goto CleanUp;
}

// no audio can be put until first video frame is put
if (ATOMIC_LOAD_BOOL(&data[streamHandleIndex].firstVideoFramePut)) {
    // synchronize putKinesisVideoFrame to running time
    runningTime = defaultGetTime() - data[streamHandleIndex].streamStartTime;

    frame.frameData = frameData;
    frame.size = frameSize;
    frame.version = FRAME_CURRENT_VERSION;
    frame.trackId = DEFAULT_AUDIO_TRACK_ID;
    frame.duration = duration * HUNDREDS_OF_NANOS_IN_A_MICROSECOND;
    frame.decodingTs = data[streamHandleIndex].audioTs;
    #if USE_CALIB_PTS
    if (abs_ts(frame.decodingTs, runningTime) >= frame.duration / 2) {
        frame.decodingTs = runningTime;
    }
    #endif
    frame.presentationTs = frame.decodingTs; // relative time mode
    frame.index = data[streamHandleIndex].audioIndex;
    frame.flags = FRAME_FLAG_NONE; // audio track is not used to cut fragment

    data[streamHandleIndex].audioIndex++;
    data[streamHandleIndex].audioTs += frame.duration;
    status = putKinesisVideoFrame(data[streamHandleIndex].streamHandle, &frame);
    if (STATUS_FAILED(status)) {
        kvs_err("[%d] audio failed with 0x%08x [%lld] audioTs %lld videoTs %lld\n", streamHandleId, status, ts, data[streamHandleIndex].audioTs, data[streamHandleIndex].videoTs);
        rval = AWS_KVS_FAILURE;
        if ((defaultGetTime() - data[streamHandleIndex].streamPutFailTime) / HUNDREDS_OF_NANOS_IN_A_MILLISECOND > 1000) {
            amazon_kvs_stream_reset(streamHandleId);
            data[streamHandleIndex].streamPutFailTime = defaultGetTime();
        }
        if (status != data[streamHandleIndex].putKvsRetStatus) {
            if (status == STATUS_CLIENT_AUTH_CALL_FAILED || status == STATUS_CURL_PERFORM_FAILED) {
                if (streamCallbacksCb.streamErrorReportFn) {
                    streamCallbacksCb.streamErrorReportFn(0, streamHandleId, 0, 0, 0);
                }
            }
            data[streamHandleIndex].putKvsRetStatus = status;
        }
    }

if KVS_STREAM_SEND_RATE_CTRL

    kvs_dbg("AUDIO-[%d][%lld]: sleep = (%lld - %lld) *(0.9) = [%lld]\n", streamHandleIndex, data[streamHandleIndex].streamHandle,
                frame.presentationTs, runningTime, (UINT64)((frame.presentationTs - runningTime) * 0.9));
    if (runningTime < frame.presentationTs) {
        THREAD_SLEEP(frame.presentationTs - runningTime);
    }

endif

}

CleanUp:

endif

return rval;

} `

15013740763 commented 3 months ago

Hello, is this problem solved, I'm having the same problem, when running the example program of kvs sdk.

log:Thread 1 "kvsVideoOnlyRea" received signal SIGBUS, Bus error 0x0015415c in splitFreeBlock () (gdb) bt

0 0x0015415c in splitFreeBlock ()

1 0x0015458c in aivHeapAlloc ()

2 0x0013ac78 in checkForAvailability ()

3 0x7efcd92c in ?? ()

Backtrace stopped: previous frame identical to this frame (corrupt stack?) (gdb) where

0 0x0015415c in splitFreeBlock ()

1 0x0015458c in aivHeapAlloc ()

2 0x0013ac78 in checkForAvailability ()

3 0x7efcd92c in ?? ()

Backtrace stopped: previous frame identical to this frame (corrupt stack?) (gdb)

MushMal commented 3 months ago

The low-level heap assertion indicates corrupt memory pointers. It's likely that a piece of higher-level code has a heap overflow, write after delete or in some cases stack overflow. Recommend using the stock sample application with no modifications and add minor changes on top until the issue reproduces which will pinpoint the high-level code.