gpt4 book ai didi

python - 使用 python-gstreamer 播放 RTSP

转载 作者:太空狗 更新时间:2023-10-30 02:06:20 25 4
gpt4 key购买 nike

我使用 gstreamer 从 IP 摄像机(如 Axis)播放 RTSP 流。我使用这样的命令行:

gst-launch-0.10 rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp latency=0 ! decodebin ! autovideosink

它工作正常。

我想在 pygtk 中使用 gui 来控制它,所以我使用 gstreamer python 绑定(bind)。我写了这段代码:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
source.set_property("location", "rtsp://192.168.0.127/axis-media/media.amp")
decoder = gst.element_factory_make("decodebin", "decoder")
sink = gst.element_factory_make("autovideosink", "sink")

self.player.add(source, decoder, sink)
gst.element_link_many(source, decoder, sink)

bus = self.player.get_bus()
bus.add_signal_watch()
bus.enable_sync_message_emission()
bus.connect("message", self.on_message)
bus.connect("sync-message::element", self.on_sync_message)
[...]

但它不起作用并退出并显示此消息:

gst.element_link_many(source, decoder,sink)
gst.LinkError: failed to link source with decoder

我也尝试用它来改进我的 CLI,因为我只使用 h264:

gst-launch-0.10 -v rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp ! rtph264depay !  ffdec_h264 ! xvimagesink

然后像这样在我的 python 代码中实现它:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
depay = gst.element_factory_make("rtph264depay", "depay")
decoder = gst.element_factory_make("ffdec_h264", "decoder")
sink = gst.element_factory_make("xvimagesink", "output")

self.player.add(source, depay, decoder, sink)
gst.element_link_many(source, depay, decoder, sink)
[...]

但是我得到了同样的错误:(

gst.LinkError: failed to link source with depay

我的源 (rtspsrc) 之间有问题,因为它与带有 filesrc 的 decodebin 一起工作(当然不能与 rtph264depay 一起工作)

我不明白为什么它不起作用,因为它在 cli 中工作。有 gstreamer 专家可以帮助我吗?

提前致谢。

问候,

最佳答案

我有您正在寻找的代码的“C”实现。我认为转换为“Python”应该相当简单

 //Display RTSP streaming of video
//(c) 2011 enthusiasticgeek
// This code is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.

#include <string.h>
#include <math.h>
#include <gst/gst.h>
#include <glib.h>

static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data){
GMainLoop *loop = (GMainLoop *) data;

switch (GST_MESSAGE_TYPE (msg)) {

case GST_MESSAGE_EOS:
g_print ("Stream Ends\n");
g_main_loop_quit (loop);
break;

case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;

gst_message_parse_error (msg, &error, &debug);
g_free (debug);

g_printerr ("Error: %s\n", error->message);
g_error_free (error);

g_main_loop_quit (loop);
break;
}
default:
break;
}

return TRUE;
}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data){

GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;

/* We can now link this pad with the rtsp-decoder sink pad */
g_print ("Dynamic pad created, linking source/demuxer\n");

sinkpad = gst_element_get_static_pad (decoder, "sink");

gst_pad_link (pad, sinkpad);

gst_object_unref (sinkpad);
}

int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
GstElement *source;
GstElement *decoder;
GstElement *sink;
GstElement *pipeline;
GstElement *demux;
GstElement *colorspace;

/* Initializing GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

//gst-launch-0.10 rtspsrc location=rtsp://<ip> ! decodebin ! ffmpegcolorspace ! autovideosink
//gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! mpeg4videoparse ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
//gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
/* Create Pipe's Elements */
pipeline = gst_pipeline_new ("video player");
g_assert (pipeline);
source = gst_element_factory_make ("rtspsrc", "Source");
g_assert (source);
demux = gst_element_factory_make ("rtpmp4vdepay", "Depay");
g_assert (demux);
decoder = gst_element_factory_make ("ffdec_mpeg4", "Decoder");
g_assert (decoder);
colorspace = gst_element_factory_make ("ffmpegcolorspace", "Colorspace");
g_assert(colorspace);
sink = gst_element_factory_make ("autovideosink", "Output");
g_assert (sink);

/*Make sure: Every elements was created ok*/
if (!pipeline || !source || !demux || !decoder || !colorspace || !sink) {
g_printerr ("One of the elements wasn't create... Exiting\n");
return -1;
}

g_printf(" \nPipeline is Part(A) ->(dynamic/runtime link) Part(B)[ Part(B-1) -> Part(B-2) -> Part(B-3) ]\n\n");
g_printf(" [source](dynamic)->(dynamic)[demux]->[decoder]->[colorspace]->[videosink] \n\n");

/* Set video Source */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
//g_object_set (G_OBJECT (source), "do-rtcp", TRUE, NULL);
g_object_set (G_OBJECT (source), "latency", 0, NULL);

/* Putting a Message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Add Elements to the Bin */
gst_bin_add_many (GST_BIN (pipeline), source, demux, decoder, colorspace, sink, NULL);

/* Link confirmation */
if (!gst_element_link_many (demux, decoder, colorspace, sink, NULL)){
g_warning ("Linking part (B) Fail...");
}

g_printf("\nNote that the source will be linked to the demuxer(depayload) dynamically.\n\
The reason is that rtspsrc may contain various elements (for example\n\
audio and video). The source pad(s) will be created at run time,\n\
by the rtspsrc when it detects the amount and nature of elements.\n\
Therefore we connect a callback function which will be executed\n\
when the \"pad-added\" is emitted.\n");

/* Dynamic Pad Creation */
if(! g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added),demux))
{
g_warning ("Linking part (A) with part (B) Fail...");
}
/* Run the pipeline */
g_print ("Playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);

/* Ending Playback */
g_print ("End of the Streaming... ending the playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);

/* Eliminating Pipeline */
g_print ("Eliminating Pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));

return 0;
}

生成文件

test = test12
ext = c
CC = gcc
CPP = g++
gstreamer:
$(CC) -g $(test).$(ext) -o $(test) `pkg-config gstreamer-0.10 --libs --cflags` `pkg-config gtk+-2.0 --libs --cflags`
clean:
rm -rf $(test)

更新

等效的 Java 代码

 // Display RTSP streaming of video
// (c) 2011 enthusiasticgeek
// This code is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
// Leave Credits intact

package video2; //replace this with your package
import java.awt.BorderLayout;
import java.awt.Dimension;

import javax.swing.JFrame;
import javax.swing.SwingUtilities;

//import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Gst;
import org.gstreamer.Pad;
import org.gstreamer.PadDirection;
import org.gstreamer.Pipeline;
import org.gstreamer.swing.VideoComponent;

/**
* A Simple videotest example.
*/
public class Main {
public Main() {
}
private static Pipeline pipe;
public static void main(String[] args) {
// Quartz is abysmally slow at scaling video for some reason, so turn it off.
System.setProperty("apple.awt.graphics.UseQuartz", "false");

args = Gst.init("SwingVideoTest", args);

pipe = new Pipeline("pipeline");
/*
final Element videosrc = ElementFactory.make("videotestsrc", "source");
final Element videofilter = ElementFactory.make("capsfilter", "flt");
videofilter.setCaps(Caps.fromString("video/x-raw-yuv, width=720, height=576"
+ ", bpp=32, depth=32, framerate=25/1"));
*/

pipe.getBus().connect(new Bus.ERROR() {
public void errorMessage(GstObject source, int code, String message) {
System.out.println("Error occurred: " + message);
Gst.quit();
}
});
pipe.getBus().connect(new Bus.STATE_CHANGED() {
public void stateChanged(GstObject source, State old, State current, State pending) {
if (source == pipe) {
System.out.println("Pipeline state changed from " + old + " to " + current);
}
}
});
pipe.getBus().connect(new Bus.EOS() {
public void endOfStream(GstObject source) {
System.out.println("Finished playing file");
Gst.quit();
}
});

pipe.getBus().connect(new Bus.TAG() {
public void tagsFound(GstObject source, TagList tagList) {
for (String tag : tagList.getTagNames()) {
System.out.println("Found tag " + tag + " = "
+ tagList.getValue(tag, 0));
}
}
});

final Element source = ElementFactory.make("rtspsrc", "Source");
final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay");
final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder");
final Element colorspace = ElementFactory.make("ffmpegcolorspace", "Colorspace");
//final Element sink = ElementFactory.make ("autovideosink", "Output");

SwingUtilities.invokeLater(new Runnable() {

public void run() {
// Create the video component and link it in
VideoComponent videoComponent = new VideoComponent();
Element videosink = videoComponent.getElement();

source.connect(new Element.PAD_ADDED() {
public void padAdded(Element element, Pad pad) {
pad.link(demux.getStaticPad("sink"));
}
});

Pad p = new Pad(null, PadDirection.SRC);
source.addPad(p);

source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp"); //replace this with your source

pipe.addMany(source, demux, decoder, colorspace, videosink);
Element.linkMany(demux, decoder, colorspace, videosink);

// Now create a JFrame to display the video output
JFrame frame = new JFrame("Swing Video Test");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.add(videoComponent, BorderLayout.CENTER);
videoComponent.setPreferredSize(new Dimension(720, 576));
frame.pack();
frame.setVisible(true);

// Start the pipeline processing
pipe.play();
}
});
}
}

关于python - 使用 python-gstreamer 播放 RTSP,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/4192871/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com