gpt4 book ai didi

python - 如何使用 Gstreamer 和 Python RTSP 流式传输视频?

转载 作者:行者123 更新时间:2023-12-05 03:54:58 27 4
gpt4 key购买 nike

我有一个代码,目前使用 Python 的 gstreamer 绑定(bind)获取一个视频并在屏幕上显示它。当播放器打开时单击“转发”按钮时,我可以搜索视频,这对我来说是一个重要的功能,因此我不想使用解析来编写管道并将其发送到 gst-launch .

我现在想做的是不仅将此视频流式传输到一个新打开的窗口,而且(或者仅当我不能同时拥有这两个窗口时)通过 RTSP 在 VLC 或什至通过 LAN 的另一个客户端打开它。有什么办法吗?

很抱歉,代码很长,但这里是:

import sys, os, time
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject, Gtk
from gi.repository import GdkX11, GstVideo

class GTK_Main(object):

def __init__(self):
window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
window.set_title("Vorbis-Player")
window.set_default_size(500, -1)
window.connect("destroy", Gtk.main_quit, "WM destroy")
vbox = Gtk.VBox()
window.add(vbox)
self.entry = Gtk.Entry()
vbox.pack_start(self.entry, False, False, 0)
hbox = Gtk.HBox()
vbox.add(hbox)
buttonbox = Gtk.HButtonBox()
hbox.pack_start(buttonbox, False, False, 0)
rewind_button = Gtk.Button("Rewind")
rewind_button.connect("clicked", self.rewind_callback)
buttonbox.add(rewind_button)
self.button = Gtk.Button("Start")
self.button.connect("clicked", self.start_stop)
buttonbox.add(self.button)
forward_button = Gtk.Button("Forward")
forward_button.connect("clicked", self.forward_callback)
buttonbox.add(forward_button)
self.time_label = Gtk.Label()
self.time_label.set_text("00:00 / 00:00")
hbox.add(self.time_label)
window.show_all()

self.player = Gst.ElementFactory.make("playbin", "player")
bus = self.player.get_bus()
bus.add_signal_watch()
bus.enable_sync_message_emission()
bus.connect("message", self.on_message)
bus.connect("sync-message::element", self.on_sync_message)

def start_stop(self, w):
if self.button.get_label() == "Start":
filepath = self.entry.get_text().strip()
if os.path.isfile(filepath):
filepath = os.path.realpath(filepath)
self.butto

n.set_label("Stop")
self.player.set_property("uri", "file://" + filepath)
self.player.set_state(Gst.State.PLAYING)
time.sleep(1)
self.forward_callback(60)
else:
self.player.set_state(Gst.State.NULL)
self.button.set_label("Start")

def on_message(self, bus, message):
t = message.type
if t == Gst.MessageType.EOS:
self.player.set_state(Gst.State.NULL)
self.button.set_label("Start")
elif t == Gst.MessageType.ERROR:
self.player.set_state(Gst.State.NULL)
err, debug = message.parse_error()
print ("Error: %s" % err, debug)
self.button.set_label("Start")

def on_sync_message(self, bus, message):
if message.get_structure().get_name() == 'prepare-window-handle':
imagesink = message.src
imagesink.set_property("force-aspect-ratio", True)
imagesink.set_window_handle(self.movie_window.get_property('window').get_xid())

def rewind_callback(self, w):
rc, pos_int = self.player.query_position(Gst.Format.TIME)
seek_ns = pos_int - 10 * 1000000000
if seek_ns < 0:
seek_ns = 0
print ("Backward: %d ns -> %d ns" % (pos_int, seek_ns))
self.player.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_ns)

def forward_callback(self, w):
rc, pos_int = self.player.query_position(Gst.Format.TIME)
if type(w) == int:
seek_ns = w * 1000000000
else:
seek_ns = pos_int + 10 * 1000000000
print ("Forward: %d ns -> %d ns" % (pos_int, seek_ns))
self.player.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, seek_ns)

def convert_ns(self, t):
# This method was submitted by Sam Mason.
# It's much shorter than the original one.
s,ns = divmod(t, 1000000000)
m,s = divmod(s, 60)

if m < 60:
return "%02i:%02i" %(m,s)
else:
h,m = divmod(m, 60)
return "%i:%02i:%02i" %(h,m,s)


GObject.threads_init()
Gst.init(None)
GTK_Main()
Gtk.main()

我在 this 找到了这段代码教程。

最佳答案

因此,我最终设法使提供的链接有效。我的问题是视频的宽度和高度,它们必须与您要播放的视频具有完全相同的值,可能与 opencv 帧传递有关......另外,“is-live”属性设置为true 由于使用链接解决方案中的网络摄像头必须设置为 false(或根本不使用,因为这是默认值),否则视频将以一定的黑屏延迟开始。代码最终是:

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class SensorFactory(GstRtspServer.RTSPMediaFactory):
def __init__(self, **properties):
super(SensorFactory, self).__init__(**properties)
self.cap = cv2.VideoCapture("path/to/video")
self.number_frames = 0
self.fps = 8
self.duration = 1 / self.fps * Gst.SECOND # duration of a frame in nanoseconds
self.launch_string = 'appsrc name=source block=true format=GST_FORMAT_TIME ' \
'caps=video/x-raw,format=BGR,width=1280,height=720,framerate={}/1 ' \
'! videoconvert ! video/x-raw,format=I420 ' \
'! x264enc speed-preset=ultrafast tune=zerolatency ! queue ' \
'! rtph264pay config-interval=1 name=pay0 pt=96 '.format(self.fps)
# streams to gst-launch-1.0 rtspsrc location=rtsp://localhost:8554/test latency=50 ! decodebin ! autovideosink

def on_need_data(self, src, lenght):
if self.cap.isOpened():
ret, frame = self.cap.read()
if ret:
data = frame.tostring()
#print(data)
buf = Gst.Buffer.new_allocate(None, len(data), None)
buf.fill(0, data)
buf.duration = self.duration
timestamp = self.number_frames * self.duration
buf.pts = buf.dts = int(timestamp)
buf.offset = timestamp
self.number_frames += 1
retval = src.emit('push-buffer', buf)
#print('pushed buffer, frame {}, duration {} ns, durations {} s'.format(self.number_frames,
# self.duration,
# self.duration / Gst.SECOND))
if retval != Gst.FlowReturn.OK:
print(retval)

def do_create_element(self, url):
return Gst.parse_launch(self.launch_string)

def do_configure(self, rtsp_media):
self.number_frames = 0
appsrc = rtsp_media.get_element().get_child_by_name('source')
appsrc.connect('need-data', self.on_need_data)


class GstServer(GstRtspServer.RTSPServer):
def __init__(self, **properties):
super(GstServer, self).__init__(**properties)
self.factory = SensorFactory()
self.factory.set_shared(True)
self.get_mount_points().add_factory("/test", self.factory)
self.attach(None)


GObject.threads_init()
Gst.init(None)

server = GstServer()

loop = GObject.MainLoop()
loop.run()

关于python - 如何使用 Gstreamer 和 Python RTSP 流式传输视频?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/60571501/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com