gpt4 book ai didi

android - 在 Android 中使用 ffmpeg 读取 RTSP 流时 15 秒后出现 Segfault

转载 作者:行者123 更新时间:2023-12-04 23:30:16 27 4
gpt4 key购买 nike

我正在尝试通过 javacv 使用 ffmpeg 读取 RTSP 流.在对 sws_scale 的调用出现段错误之前,我可以查看拳头 15 秒左右。这是否意味着我的内存不足?如果是这样,知道我做错了什么吗?

    av_register_all();
avcodec.avcodec_register_all();
avformat_network_init();
AVFormatContext avFormatContext = avformat.avformat_alloc_context();

if(0 != avformat_open_input(avFormatContext, "rtsp://192.168.0.107:7654/lov2.ffm", null, null)){
throw new RuntimeException("avformat_open_input failed");
}

if(avformat_find_stream_info(avFormatContext, null) < 0){
throw new RuntimeException("avformat_find_stream_info failed");
}

int video_stream_index = 0;

//search video stream
for(int i = 0; i < avFormatContext.nb_streams(); i++){
if(avFormatContext.streams(i).codec().codec_type() == AVMEDIA_TYPE_VIDEO){
video_stream_index = i;
}
}
Log.d(TAG, "checkpoint 3");
AVPacket packet = new AVPacket();
av_init_packet(packet);

AVStream stream = new AVStream();
int cnt = 0;

//start reading packets from stream and write them to file
av_read_play(avFormatContext);//play RTSP
AVCodecContext avCodecContext = avFormatContext.streams(video_stream_index).codec();
AVCodec codec = avcodec_find_decoder(avCodecContext.codec_id());

if (codec == null){
throw new RuntimeException("codec not found");
}

if (avcodec_open2(avCodecContext, codec, null) < 0){
throw new RuntimeException("could not open codec");
}

SwsContext img_convert_ctx = new SwsContext();
img_convert_ctx = swscale.sws_getCachedContext(
img_convert_ctx,
avCodecContext.width(), avCodecContext.height(),
avCodecContext.pix_fmt(), avCodecContext.width(), avCodecContext.height(),
PIX_FMT_RGB24, SWS_BICUBIC, null, null, null
);

int size = avpicture_get_size(PIX_FMT_YUV420P, avCodecContext.width(), avCodecContext.height());
BytePointer picture_buf = new BytePointer(av_malloc(size));
AVFrame frame = avcodec_alloc_frame();
AVFrame picrgb = avcodec_alloc_frame();

int size2 = avpicture_get_size(avutil.PIX_FMT_RGB24, avCodecContext.width(), avCodecContext.height());
BytePointer picture_buf2 = new BytePointer(av_malloc(size2));

avpicture_fill(frame, picture_buf, PIX_FMT_YUV420P, avCodecContext.width(), avCodecContext.height());
avpicture_fill(picrgb, picture_buf2, avutil.PIX_FMT_RGB24, avCodecContext.width(), avCodecContext.height());
Log.d(TAG, String.format("h: %d, w: %d; h: %d w: %d", avCodecContext.width(), avCodecContext.height(), avCodecContext.height(), frame.width()));
Log.d(TAG, "checkpoint 6");
Bitmap bmp = Bitmap.createBitmap(avCodecContext.width(), avCodecContext.height(), Bitmap.Config.RGB_565);
while(av_read_frame(avFormatContext, packet) >= 0){

if(packet.size() > 0 && packet.stream_index() == video_stream_index){//packet is video
int[] gotFrame = new int[]{0};
int len = avcodec_decode_video2(avCodecContext, frame, gotFrame, packet);
if(gotFrame[0] > 0){
swscale.sws_scale(
img_convert_ctx,
frame.data(0), frame.linesize(),
0, avCodecContext.height(),
picrgb.data(0), picrgb.linesize());
bmp.copyPixelsFromBuffer(picture_buf2.asByteBuffer());
videoView.drawFrame(bmp);
}
}
av_free_packet(packet);
av_init_packet(packet);
}
av_free(frame);
av_free(picrgb);
av_free(picture_buf);
av_free(picture_buf2);

av_read_pause(avFormatContext);

堆栈跟踪:
 F/libc    ( 7460): Fatal signal 11 (SIGSEGV) at 0x76100005 (code=1), thread 7474 (Thread-13601)
E/Sensors ( 720): accelHandler 0.201182 0.180824 10.950029
I/DEBUG ( 6409): *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
I/DEBUG ( 6409): Build fingerprint: 'Verizon/d2vzw/d2vzw:4.1.2/JZO54K/I535VRBMB1:user/release-keys'
I/DEBUG ( 6409): pid: 7460, tid: 7474, name: Thread-13601 >>> com.example.javacv.stream.test2 <<<
I/DEBUG ( 6409): signal 11 (SIGSEGV), code 1 (SEGV_MAPERR), fault addr 76100005
I/DEBUG ( 6409): r0 57995360 r1 594f3c30 r2 59349200 r3 59349020
I/DEBUG ( 6409): r4 59349020 r5 76100005 r6 00000060 r7 594f3cf0
I/DEBUG ( 6409): r8 59349200 r9 59349020 sl 000001e0 fp 76100005
I/DEBUG ( 6409): ip 594f3cf0 sp 5e08bb08 lr 59308bec pc 593103b8 cpsr 00000010

常问问题:
  • 为什么不使用媒体播放器?

  • MediaPlayer 在流上强制延迟约 2 秒,我真的需要 <100 毫秒。

    最佳答案

    我对 sws_scale 使用了错误的参数。本来应该:

    swscale.sws_scale(
    img_convert_ctx,
    new PointerPointer(frame), frame.linesize(),
    0, avCodecContext.height(),
    new PointerPointer(picrgb), picrgb.linesize());

    关于android - 在 Android 中使用 ffmpeg 读取 RTSP 流时 15 秒后出现 Segfault,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/17892342/

    27 4 0
    Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
    广告合作:1813099741@qq.com 6ren.com