gpt4 book ai didi

overlay - 如何使用 C 中的 Gstreamer 在视频流上叠加图片?

转载 作者:行者123 更新时间:2023-12-03 17:35:39 26 4
gpt4 key购买 nike

我想使用 Gstreamer 在来自 IP 摄像机的流上覆盖“.png”图片。
我的硬件的工作管道是:

gst-launch-1.0 
rtspsrc location=rtsp://user:pass@IP:port/channel latency=400 ! rtph264depay !
vpudec use-vpu-memory=false ! imxvideoconvert_ipu
! video/x-raw,format=I420 ! gdkpixbufoverlay
location=/home/user/folder/image.png offset-x=100 offset-y=100 ! overlaysink

当我尝试用 C 翻译这个管道时,问题就出现了。
我为此管道编写的代码运行,但显示屏上没有视频播放。播放器在将管道设置为“播放”状态之前卡住了自己。
这里有一个我的 C 实现的简单版本:

#include <gst/gst.h>
#include <glib.h>
#include <iostream>

typedef struct _CustomData {
GstElement *source;
GstElement *rtp;
GstElement *sink;
GstElement *vpudec;
GstElement *converter, *gdkpixbufoverlay, *capsfilter ;
GstBus *bus;
GstElement *pipeline;
GMainLoop *loop;
} CustomData;


static gboolean bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:{
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}

static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->rtp, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;

if (gst_pad_is_linked (sink_pad)) {
goto exit;
}

new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "application/x-rtp")) {
g_print (" It has type '%s' which is not x-rtp . Ignoring.\n",
new_pad_type);
goto exit;
}

ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print(" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}

exit:

if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
gst_object_unref (sink_pad);
}



int main (int argc, char *argv[]){

CustomData data;

gst_init (NULL, NULL);

data.loop = g_main_loop_new (NULL, FALSE);

// Create gstreamer elements
data.pipeline = gst_pipeline_new ("player");

data.source = gst_element_factory_make ("rtspsrc", "source");
data.rtp = gst_element_factory_make ("rtph264depay","rtp");
data.vpudec = gst_element_factory_make ("vpudec","vpudec");
data.converter = gst_element_factory_make
("imxcompositor_ipu","converter");
data.capsfilter = gst_element_factory_make ("capsfilter", "video-
rate");
data.gdkpixbufoverlay = gst_element_factory_make
("gdkpixbufoverlay","overlaytool");
data.sink = gst_element_factory_make ("overlaysink",
"videoSink");


if (!data.pipeline || !data.source || !data.rtp || !data.vpudec ||
!data.converter || !data.capsfilter || !data.gdkpixbufoverlay || !data.sink)
{
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}

g_object_set (data.source, "location","rtsp://user:pass@IP:port/channel",
NULL);
g_object_set (data.source,"latency", 400 , NULL);
g_object_set (data.vpudec, "use-vpu-memory", false, NULL);

g_object_set (data.gdkpixbufoverlay,
"location","/home/user/folder/image.png", NULL);
g_object_set (data.gdkpixbufoverlay, "offset-x", 100 , NULL);
g_object_set (data.gdkpixbufoverlay, "offset-y", 100 , NULL);


GstCaps *capsFormat = gst_caps_from_string ("video/x-raw,format=I420");
g_object_set ( data.capsfilter, "caps", capsFormat, NULL);
gst_caps_unref(capsFormat);

//add all elements into the pipeline
gst_bin_add_many (GST_BIN (data.pipeline),
data.source,
data.rtp,
data.vpudec,
data.converter,
data.capsfilter,
data.gdkpixbufoverlay,
data.sink,
NULL);

// link all elements
gst_element_link_many ( data.rtp, data.vpudec , data.converter ,
data.capsfilter, data.gdkpixbufoverlay, data.sink, NULL);

g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler),
&data);

// Set the pipeline to "playing" state
GstStateChangeReturn ret;
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}


// Iterate
g_main_loop_run (data.loop);

// Out of the main loop, clean
g_print ("Returned, stopping playback\n");
gst_element_set_state (data.pipeline, GST_STATE_NULL);

g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (data.pipeline));

return 0;
}

有没有人看到问题?

谢谢

最佳答案

经过多次尝试,我确实发现在我发布的 C 代码中我选择了错误的元素,因此 data.converter 元素是:

data.converter = gst_element_factory_make("imxvideoconvert_ipu ","converter");

而不是 imxcompositor_ipu .

关于overlay - 如何使用 C 中的 Gstreamer 在视频流上叠加图片?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/48266817/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com