gpt4 book ai didi

c - 使用 linux 系统视频设备使用 ffmpeg 库以编程方式流式传输

转载 作者:行者123 更新时间:2023-12-03 09:51:36 28 4
gpt4 key购买 nike

我能够在我的代码中流式传输使用 h264 编码的静态 mp4 文件。

但是当我在我的程序中使用 linux 视频设备 /dev/video0 作为输入时,它给出了这个错误:

failed to open file `udp://xxx.xxx.xxx.xxx:28000` or configure filtergraph.

我还没有在我的程序中使用编码器。我不明白为什么我不能直接从摄像机流式传输。

静态mp4文件是用h264编码的。我的代码流就是这样,但是当我尝试使用 video4linux2 格式流式传输相机时,它无法播放。

我需要用任何格式对相机的输出进行编码吗?

#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt, const char *tag)
{
AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;
printf("%s: pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
tag,
av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, time_base),
av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, time_base),
av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, time_base),
pkt->stream_index);
}
int main(int argc, char **argv)
{
AVOutputFormat *ofmt = NULL;
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
AVInputFormat *inputFormat =av_find_input_format("video4linux2");

int ret, i;
/*if (argc < 3) {
printf("usage: %s input output\n"
"API example program to remux a media file with libavformat and libavcodec.\n"
"The output format is guessed according to the file extension.\n"
"\n", argv[0]);
return 1;
}
//in_filename = argv[1];
// out_filename = argv[2];*/

in_filename = "/dev/video0";
out_filename = "udp://xxx.xxx.xxx.xxx:28000";

avdevice_register_all();
avcodec_register_all();
av_register_all();
//Networkq
avformat_network_init();


if ((ret = avformat_open_input(&ifmt_ctx, in_filename, inputFormat, 0)) < 0) {
fprintf(stderr, "Could not open input file '%s'", in_filename);
goto end;
}

if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
fprintf(stderr, "Failed to retrieve input stream information");
goto end;
}

av_dump_format(ifmt_ctx, 0, in_filename, 0);
avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);
if (!ofmt_ctx) {
fprintf(stderr, "Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++) {
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream) {
fprintf(stderr, "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0) {
fprintf(stderr, "Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
av_dump_format(ofmt_ctx, 0, out_filename, 1);
if (!(ofmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0) {
fprintf(stderr, "Could not open output file '%s'", out_filename);
goto end;
}
}
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
fprintf(stderr, "Error occurred when opening output file\n");
goto end;
}
while (1) {
AVStream *in_stream, *out_stream;
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
log_packet(ifmt_ctx, &pkt, "in");
/* copy packet */
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
log_packet(ofmt_ctx, &pkt, "out");
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
av_free_packet(&pkt);
}
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_closep(&ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
fprintf(stderr, "Error occurred: %s\n", av_err2str(ret));
return 1;
}
return 0;
}

输出是

pi@raspberrypi:~/source_code $ ./deneme_4
Input #0, video4linux2,v4l2, from '/dev/video0':
Duration: N/A, start: 74626.180310, bitrate: 235929 kb/s
Stream #0:0: Video: rawvideo (I420 / 0x30323449), yuv420p, 1024x768, 235929 kb/s, 25 fps, 25 tbr, 1000k tbn, 1000k tbc
Output #0, mpegts, to 'udp://xxx.xxx.xxx.xxx:28000':
Stream #0:0: Unknown: none
[mpegts @ 0x1d6a5c0] Using AVStream.codec to pass codec parameters to muxers is deprecated, use AVStream.codecpar instead.
in: pts:74626180310 pts_time:74626.2 dts:74626180310 dts_time:74626.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716356228 pts_time:74626.2 dts:6716356228 dts_time:74626.2 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626408490 pts_time:74626.4 dts:74626408490 dts_time:74626.4 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716376764 pts_time:74626.4 dts:6716376764 dts_time:74626.4 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626665690 pts_time:74626.7 dts:74626665690 dts_time:74626.7 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716399912 pts_time:74626.7 dts:6716399912 dts_time:74626.7 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626923514 pts_time:74626.9 dts:74626923514 dts_time:74626.9 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716423116 pts_time:74626.9 dts:6716423116 dts_time:74626.9 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627180935 pts_time:74627.2 dts:74627180935 dts_time:74627.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716446284 pts_time:74627.2 dts:6716446284 dts_time:74627.2 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627439174 pts_time:74627.4 dts:74627439174 dts_time:74627.4 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716469526 pts_time:74627.4 dts:6716469526 dts_time:74627.4 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627695789 pts_time:74627.7 dts:74627695789 dts_time:74627.7 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716492621 pts_time:74627.7 dts:6716492621 dts_time:74627.7 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627951834 pts_time:74628 dts:74627951834 dts_time:74628 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716515665 pts_time:74628 dts:6716515665 dts_time:74628 duration:3600 duration_time:0.04 stream_index:0
in: pts:74628208064 pts_time:74628.2 dts:74628208064 dts_time:74628.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716538726 pts_time:74628.2 dts:6716538726 dts_time:74628.2 duration:3600 duration_time:0.04 stream_index:0
^C

最佳答案

/dev/video0 包含原始像素数据,据我所知,没有支持此功能的流媒体格式。您必须在写入之前对每一帧进行编码/压缩。这些文件之所以有效,是因为文件中的数据已经编码。

关于c - 使用 linux 系统视频设备使用 ffmpeg 库以编程方式流式传输,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/58974770/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com