fan-chao / nvjmi

封装Jetson Multimedia API的编解码库,基于https://github.com/jocover/jetson-ffmpeg基础进行的修改,未集成于ffmpeg,可单独使用。
MIT License
31 stars 12 forks source link

您好 能简单提供个调用demo吗 #1

Closed mahxn0 closed 3 years ago

mahxn0 commented 3 years ago

您好 我写了一个rtsp的客户端,解析rtsp码流 回调出来的数据写成文件再用官方00_video_decode解码 没有问题 但是调用您封装的接口的时候 总是报段错误,是不是我哪里调用不对呢 gdb调试报错信息如下: 写入数据长度=298781 fps=0 video_type=51 height=0 width=0 frame_timestamp_usec=0 channelid=0 payload_size=298781 pts=0 ctx[0]=0x55555979d0

Thread 12 "test" received signal SIGSEGV, Segmentation fault. [Switching to Thread 0x7fa37fdf30 (LWP 4923)] 0x0000007fb7f73024 in jmi::nvjmi_decoder_put_packet (ctx=0x55555979d0, packet=0x0) at nvjmi_dec.cpp:485 warning: Source file is more recent than executable. 485 if (packet->payload_size == 0){ (gdb) bt

0 0x0000007fb7f73024 in jmi::nvjmi_decoder_put_packet (ctx=0x55555979d0, packet=0x0) at nvjmi_dec.cpp:485

1 0x0000005555556594 in CallBackStreamFun(void, int, int, int, char, RTSP_FRAME_INFO, void*) ()

2 0x0000007fb7f2a5e4 in CRtspParent::RecvData (this=this@entry=0x5555598fc0, pCRtp=0x5555c30030, pUserData=pUserData@entry=0x5555598fc0)

at /home/pes/mxj/data/nfs/mahxn0/workspace/multimedia_api/librtspclient/src/CRtspParent.cpp:1142

3 0x0000007fb7f2b500 in CRtspParent::RecvDataThread (pUserData=pUserData@entry=0x5555598fc0)

at /home/pes/mxj/data/nfs/mahxn0/workspace/multimedia_api/librtspclient/src/CRtspParent.h:68

4 0x0000007fb7f28db8 in rtsp_StartTask (p=) at /home/pes/mxj/data/nfs/mahxn0/workspace/multimedia_api/librtspclient/src/CRtspParent.cpp:96

5 0x0000007fb79df088 in start_thread (arg=0x7fffffecbf) at pthread_create.c:463

6 0x0000007fb7cd94ec in thread_start () at ../sysdeps/unix/sysv/linux/aarch64/clone.S:78

(gdb)

原调用代码如下: FILE fp4 = NULL; int CallBackStreamFun(Rtsp_Client_Handle handle, int _channelPtr, int _mediaType, int bufflen, char pBuf, RTSP_FRAME_INFO _frameInfo, void userPtr) { int channel_id = _channelPtr; printf("写入数据长度=%ld\n", (unsigned long)bufflen); printf("fps=%d\n",_frameInfo->video_fps); printf("video_type=%d\n", _frameInfo->video_type); printf("height=%d\n", _frameInfo->video_height); printf("width=%d\n", _frameInfo->video_width); printf("frame_timestamp_usec=%d\n", _frameInfo->frame_timestamp_usec); printf("channelid=%d\n", channel_id);

nvPacket *packet;
packet->flags = 0;
packet->payload_size = (unsigned long)bufflen;
packet->payload = (unsigned char*)pBuf;
packet->pts = (unsigned long)_frameInfo->frame_timestamp_usec;
printf("payload_size=%ld\n", packet->payload_size);
printf("pts=%ld\n", packet->pts);
printf("ctx[%d]=%p\n", channel_id, ctx[channel_id]);
nvjmi_decoder_put_packet(ctx[channel_id], packet);
printf("put packet success\n");

// nvFrameMeta *frame_data;
// nvjmi_decoder_get_frame_meta(ctx[channel_id], frame_data);

return 0;

}

int CallBackStreamFun1(Rtsp_Client_Handle handle, int _channelPtr, int _mediaType, int bufflen, char pBuf, RTSP_FRAME_INFO _frameInfo, void userPtr) { if (NULL == fp4) { fp4 = fopen("../data/out.264", "wb"); int ret = fwrite(pBuf, 1, bufflen, fp4); } else { int ret = fwrite(pBuf, 1, bufflen, fp4); } return 0; }

//状态回调 int StatueCallBack(int iRecvSum, int iRecvLostSum, int iNowStreamFlag, void pUserData); int StatueCallBack(int iRecvSum, int iRecvLostSum, int iNowStreamFlag, void pUserData) { printf(">>>>>> pUserData[%s] iRecvSum[%d] iRecvLostSum[%d] iNowStreamFlag[%d] DisConnectStatue: ", pUserData, iRecvSum, iRecvLostSum, iNowStreamFlag); printf("\n"); }

int main() { int Ret; nvJmiCtxParam ctx_param; ctx_param.coding_type = NV_VIDEO_CodingH264; ctx_param.resize_height = 1080; ctx_param.resize_width = 1920; //开始解析码流信息 //步骤1,初始化 Ret = RtspClient_Init_V3(16000,NULL,0); if (Ret < 0) { printf("initial rtspclient_sdk failed!\n"); } //步骤2,打开码流 创建解码器

for (int i = 0; i < multimedia_num; i++)
{
    //每一路创建一个handle;
    handle[i] = RtspClient_Create();
    //每一路创建一个解码器
    ctx[i] = nvjmi_create_decoder("dev0", &ctx_param);
    Ret = RtspClient_OpenStream(handle[i], URL[i], RTP_OVER_UDP, VIDEO_CODEC_H264, CallBackStreamFun, StatueCallBack, streamID[i]);
    if (0 > Ret)
    {
        printf("RtspClient_OpenStream  Failed!!!! mahxn0 \n");
        nvjmi_decoder_close(ctx[i]);
        nvjmi_decoder_free_context(&ctx[i]);
        RtspClient_CloseStream(handle[i]);
        RtspClient_Destory(handle[i]);
        handle[i] = NULL;
        sleep(20);
        continue;
    }
}

while (1)
{
    sleep(10000000);
}
RtspClient_UnInit();
return 0;

}

fan-chao commented 3 years ago

我这边解码是使用如下: if(jmictx == nullptr) { jmi::nvJmiCtxParam jmi_ctx_param{};

    if(rsz_w > 0 && rsz_h > 0){
        jmi_ctx_param.resize_width = rsz_w;
        jmi_ctx_param.resize_height = rsz_h;
    }

    if ("H264" == m_pRtspClient->GetCodeName()) {
        jmi_ctx_param.coding_type =jmi::NV_VIDEO_CodingH264;
    }
    else if ("H265" == m_pRtspClient->GetCodeName()) {
        jmi_ctx_param.coding_type = jmi::NV_VIDEO_CodingHEVC;
    }
    string dec_name = "dec-" + session_id();
    jmi_ctx_ = jmi::nvjmi_create_decoder(dec_name.data(), &jmi_ctx_param);
}

//基于jetson nvdec解码
jmi::nvPacket nvpacket;

nvpacket.payload_size = dataLen;
nvpacket.payload = data;

int ret{};
ret = jmi::nvjmi_decoder_put_packet(jmi_ctx_, &nvpacket);
if(ret == jmi::NVJMI_ERROR_STOP) {
    LOG_INFO(VDO_RTSP_LOG, "[{}] frameCallback: nvjmi decode error, frame callback EOF!", m_ip);
}

while (ret >= 0) {
    jmi::nvFrameMeta nvframe_meta;
    ret = jmi::nvjmi_decoder_get_frame_meta(jmi_ctx_, &nvframe_meta);
    if (ret < 0) break;

    Buffer buf;
    buf.allocate(nvframe_meta.width, nvframe_meta.height, 3, nvframe_meta.payload_size / nvframe_meta.height);
    jmi::nvjmi_decoder_retrieve_frame_data(jmi_ctx_, &nvframe_meta, (void*)buf.getData());     
}
mahxn0 commented 3 years ago

收到 谢谢 还想问下你的jetpack是哪个版本呢

fan-chao commented 3 years ago

jetpack 4.4,支持NX的版本。

mahxn0 commented 3 years ago

Buffer buf; buf.allocate(nvframe_meta.width, nvframe_meta.height, 3, nvframe_meta.payload_size / nvframe_meta.height);

我想问下 这里我不调用jmi::nvjmi_decoder_retrieve_frame_data这个接口 一直在回调做一件事 ret = jmi::nvjmi_decoder_put_packet(jmictx, &nvpacket); 然后跑10几s就卡在dqbuffer这里了 是因为数据没处理缓存满了的原因吗 if (ctx->index < (int)ctx->dec->output_plane.getNumBuffers()) { nvBuffer = ctx->dec->output_plane.getNthBuffer(ctx->index); } else { printf("1\n"); ret = ctx->dec->output_plane.dqBuffer(v4l2_buf, &nvBuffer, NULL, -1); if (ret < 0) { TEST_ERROR(ret < 0, "Error DQing buffer at output plane", ret); return NVJMI_ERROR_OUTPUT_PLANE_DQBUF; } }

fan-chao commented 3 years ago

是的,如果解码后的缓存里已经放满了图像数据,不及时取出,后续解码后的数据就无法放入,会阻塞在那里。

mahxn0 commented 3 years ago

是的,如果解码后的缓存里已经放满了图像数据,不及时取出,后续解码后的数据就无法放入,会阻塞在那里。

OK Buffer buf; buf.allocate(nvframe_meta.width, nvframe_meta.height, 3, nvframe_meta.payload_size / nvframe_meta.height); 这个Buffer是nvbuffer吗 还是随便new一块内存就行

fan-chao commented 3 years ago

这里随便new一块内存就行。

mahxn0 commented 3 years ago

这里随便new一块内存就行。

好的 如果缓存满了就等待消费 不消费的话意味着后面的数据会丢掉是吧 不会像队列那样先进先出去刷掉前面的数据吗这个设计

fan-chao commented 3 years ago

这个不会刷掉,需要自行取出队列里的数据。一般put packet后,就需要尝试取出解码后的数据。