gpt4 book ai didi

gstreamer appsrc 网络视频流

转载 作者:行者123 更新时间:2023-12-05 08:01:34 28 4
gpt4 key购买 nike

我正在尝试使用 gstreamer appsrc 通过网络播放视频流。

我在这里找到了很好的例子。

gstreamer appsrc test application

http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html

使用上面的示例,我可以使用 Xlib 在 X Window 中播放视频。当管道设置为 PLAYING 状态时,然后以某种方式发出“需要数据”信号,并且在 start_feed 回调函数中从视频文件读取的数据被注入(inject)到 appsrc GstBuffer 并播放示例视频。

我试图从网络而不是文件中获取数据,所以我认为简单的回显服务器以与上面完全相同的方式读取视频文件,并在发生连接时将数据发送到客户端。客户端应获取这些数据并将其放入 appsrc。

我的问题是如何将流数据放入 appsrc 管道?有没有人给出任何建议或好的引用?

这是使用上述链接示例的工作示例代码。

// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <stdio.h>
#include <unistd.h> // sleep()
#include <stdbool.h>

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappbuffer.h>
#include <gst/interfaces/xoverlay.h>

#define BUFF_SIZE (640*480*3)//(1024)
#define BORDER_WIDTH 2

#define DEBUG printf

typedef unsigned int uint32;
typedef unsigned char uint8;

typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *videosink;
GMainLoop *loop;
guint sourceid;
FILE *file;
} gst_app_t;

static gst_app_t gst_app;

static Window child_window = 0;
static Window window = 0;

static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;

ptr = g_malloc(BUFF_SIZE);
g_assert(ptr);

size = fread(ptr, 1, BUFF_SIZE, app->file);

if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}

buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);

ret = gst_app_src_push_buffer(app->src, buffer);

if(ret != GST_FLOW_OK){
DEBUG("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}

if(size != BUFF_SIZE){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}

return TRUE;
}

static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
DEBUG ("start feeding\n");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}

static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
DEBUG ("stop feeding\n");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}

static void on_pad_added(GstElement *element, GstPad *pad)
{
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;

DEBUG("pad added\n");

caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);

g_assert(str);

name = (gchar*)gst_structure_get_name(str);

DEBUG("pad name %s\n", name);

if(g_strrstr(name, "video")){

ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
DEBUG("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}

static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;

switch(GST_MESSAGE_TYPE(message))
{
case GST_MESSAGE_ELEMENT: {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window);
}
break;

case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;

gst_message_parse_error(message, &err, &debug);
DEBUG("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;

case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *err;
gchar *name;

gst_message_parse_warning(message, &err, &debug);
DEBUG("Warning %s\nDebug %s\n", err->message, debug);

name = GST_MESSAGE_SRC_NAME(message);

DEBUG("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;

case GST_MESSAGE_EOS:
DEBUG("End of stream\n");
g_main_loop_quit(app->loop);
break;

case GST_MESSAGE_STATE_CHANGED:
break;

default:
DEBUG("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}

return TRUE;
}

static gboolean terminate_playback (GstElement * loop)
{
DEBUG ("Terminating playback\n");
g_main_loop_quit ((GMainLoop *)loop);
return FALSE;
}

int gstreamer_init(int argc, char *argv[])
{
gst_app_t *app = &gst_app;

GstBus *bus;
GstStateChangeReturn state_ret;

app->file = fopen(argv[1], "r");

g_assert(app->file);

/* initialization */
gst_init((int)0, NULL);

app->loop = g_main_loop_new(NULL, FALSE);

/* create elements */
app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline");

app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc");
app->decoder = gst_element_factory_make("decodebin2", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->videosink = gst_element_factory_make("autovideosink", "myvideosink");

if (!app->videosink) {
DEBUG ("output could not be found - check your install\n");
}

g_assert(app->src);
g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->videosink);

bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);

g_signal_connect(app->decoder, "pad-added",
G_CALLBACK(on_pad_added), app->ffmpeg);

//gst_app_src_set_emit_signals(app->src, true);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);

gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src,
app->decoder, app->ffmpeg, app->videosink, NULL);

/* link everything together */
if (!gst_element_link((GstElement *)app->src, app->decoder)) {

DEBUG ("Failed to link one or more elements!\n");
return -1;
}

if(!gst_element_link(app->ffmpeg, app->videosink)){
DEBUG("failed to link ffmpeg and videosink");
return -1;
}

state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {

DEBUG("Failed to start up pipeline!\n");
return 1;
}

DEBUG("set state returned %d\n", state_ret);

//g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop);

g_main_loop_run(app->loop);

state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL);
DEBUG("set state null returned %d\n", state_ret);
gst_object_unref(app->pipeline);

return 1;
}

/*
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink
*
* 1. dependency library install
* $ sudo apt-get install gstreamer0.10-plugins-bad
* $ sudo apt-get install gstreamer0.10-ffmpeg
*
* 2. compile
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10
*
* 3. how to run program
* $ ./hello <video_file_name>
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts
*/

int main(int argc, char *argv[])
{
Display *disp;
Window root;
long fgcolor, bgcolor;

GC gc;
XGCValues gc_val;
XEvent event;
char *msg = "Hello, World!";
int screen;

disp = XOpenDisplay(NULL);
if (disp == NULL) {
fprintf(stderr, "Cannot open display\n");
exit(1);
}

screen = DefaultScreen(disp);

root = RootWindow(disp, screen);
fgcolor = BlackPixel(disp, screen);
bgcolor = WhitePixel(disp, screen);

window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1,
fgcolor, bgcolor);

child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1,
fgcolor, bgcolor);

gc_val.foreground = fgcolor;
gc_val.background = bgcolor;
gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val);

XSelectInput(disp, child_window, ExposureMask | KeyPressMask);

g_warning("map xwindow");
//XMapWindow(disp, window);
XMapWindow(disp, window);
XMapWindow(disp, child_window);
XSync(disp, FALSE);

//XDrawLine (disp, window, gc, 0, 0, 1000, 800);
//XDrawLine (disp, child_window, gc, 0, 0, 800, 600);

gstreamer_init(argc, argv);

XDestroyWindow( disp, window );
XDestroyWindow( disp, child_window );

XCloseDisplay( disp );

return 0;
}

最佳答案

您将希望至少有一个其他线程(在每一端)来处理通过套接字(如 TCP 或 UDP,如果在本地网络上)的通信。这通常有一个等待数据包的阻塞调用。要发送数据,您可以形成一个 gstreamer tee 和队列,然后形成一个 appsrc 来缓冲/将数据发送到套接字。要接收,您可以将数据从套接字拉到缓冲区。请记住,操作系统的套接字缓冲区相对较小,如果您没有足够快地从中提取数据包,或者推送到一个数据包的速度太快,就会丢弃数据包。因此缓冲区。

NEED_DATA 信号上,您使用 pushBuffer() 从该缓冲区拉取到管道。在 ENOUGH_DATA 信号上,您可以继续缓冲或处理它,无论您的应用程序需要做什么。

关于gstreamer appsrc 网络视频流,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/13074894/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com