gpt4 book ai didi

c++ - GStreamer:rtpjpegdepay/jpegdec 输出的帧颜色错误

转载 作者:行者123 更新时间:2023-11-28 04:35:18 25 4
gpt4 key购买 nike

MJPEG RTSP 流服务器已使用 VLC 运行。然后我又运行了一个 VLC 以确定来自流的帧具有正确的颜色。所有颜色都是正确的。

我的任务是在没有任何解压缩的情况下在 rtpjpegdepay 插件的输出上捕获 MJPEG 帧。我已经使用以下管道创建了 C++ 应用程序:

rtspsrc -> rtpjpegdepay -> appsink 

源 URL 是使用下面的行设置的:

g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);

每个 jpeg 帧都是用下面的方法捕获的:

g_signal_connect(m_sink, "new-sample", G_CALLBACK(captureGstBuffer), this);

但是收到的图片很奇怪(是有分隔带的路,只有颜色不对):

Jpeg with wrong colors

接下来我尝试使用另一个管道:

rtspsrc -> rtpjpegdepay -> jpegdec -> appsink

使用相同的“新样本”方法,我收到的 YUV420 图像具有与 rtpjpegdepay 相同的错误颜色。

我尝试了从 1.8.3 到 1.14.2 的不同版本的 Gstreamer

这种行为的原因可能是什么?

这是代码(YUV420):

static void OnPadAdded(GstElement *element, GstPad *pad, void *data)
{
GstElement *rtpjpeg = GST_ELEMENT(data);
GstPad *sinkpad;

sinkpad = gst_element_get_static_pad(rtpjpeg, "sink");
gst_pad_link(pad, sinkpad);
gst_object_unref(sinkpad);
}

static int fileind = 0;

void WriteToFile(BYTE *pBuffer, DWORD dwBufSize)
{
fileind++;

std::stringstream ssFileName;
ssFileName << "D:\\Temp\\file" << fileind << ".yuv";
FILE* fp = fopen(ssFileName.str().c_str(), "wb+");
fwrite(pBuffer, dwBufSize, 1, fp);
fclose(fp);
}

static GstFlowReturn CaptureGstBuffer(GstElement *sink, void *data) {
GstSample *sample;

g_signal_emit_by_name(sink, "pull-sample", &sample);
if (sample) {

GstBuffer *buffer = gst_sample_get_buffer(sample);

GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);

WriteToFile((BYTE *)map.data, map.size);

gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
}

return GST_FLOW_OK;
}

long RTSPClientDevice::StartClient()
{
m_loop = g_main_loop_new(NULL, FALSE);

m_pipeline = gst_pipeline_new("mjpeg-catcher");
g_assert(m_pipeline);
m_source = gst_element_factory_make("rtspsrc", "Source");
g_assert(m_source);
m_depay = gst_element_factory_make("rtpjpegdepay", "Depay");
g_assert(m_depay);
m_decoder = gst_element_factory_make("jpegdec", "Decoder");
g_assert(m_decoder);
m_sink = gst_element_factory_make("appsink", "Output");
g_assert(m_sink);

if (!m_pipeline || !m_source || !m_depay || !m_decoder || !m_sink) {
return Z_ERR;
}

std::string url = "";
GetClientURL(url);

g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);
g_object_set(G_OBJECT (m_source), "do-rtcp", 1, NULL);
g_object_set(G_OBJECT(m_source), "latency", 0, NULL);
g_object_set(G_OBJECT(m_source), "probation", 1, NULL);

m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
gst_bus_add_watch(m_bus, bus_call, m_loop);
gst_object_unref(m_bus);

gst_bin_add_many(GST_BIN(m_pipeline), m_source, m_depay, m_decoder ,
m_sink, NULL);

if (!gst_element_link(m_source, m_depay)) {
return Z_ERR;
}

if (!gst_element_link(m_depay, m_decoder)) {
return Z_ERR;
}

if (!gst_element_link(m_decoder, m_sink)) {
return Z_ERR;
}

if (!g_signal_connect(m_source, "pad-added", G_CALLBACK(OnPadAdded),
m_capsfilter))
{
return Z_ERR;
}

g_object_set(G_OBJECT(m_sink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_object_set(G_OBJECT(m_sink), "max-buffers", (guint)1, NULL);
g_object_set(G_OBJECT(m_sink), "drop", (guint)1, NULL);
g_object_set(G_OBJECT(m_sink), "sync", (guint)0, NULL);
g_object_set(G_OBJECT(m_sink), "max_lateness", G_GINT64_CONSTANT(-1), NULL);
g_object_set(G_OBJECT(m_sink), "qos", (guint)1, NULL);


/*GstCaps *caps = gst_caps_from_string("video/x-raw,encoding-name=RGB,format=(fourcc)YUV444,width=1280,height=720");
g_object_set(m_videoconvert, "caps", caps, NULL);
gst_caps_unref(caps);*/

if (g_signal_connect(m_sink, "new-sample", G_CALLBACK(CaptureGstBuffer), this) <= 0)
{
return Z_ERR;
}

gst_element_set_state(m_pipeline, GST_STATE_PLAYING);

ControlThreadStart(); //Place for g_main_loop_run

m_isStarted = true;

return Z_OK;
}

long RTSPClientDevice::StopClient()
{
if(!m_isStarted)
{
return Z_OK;
}

if (g_main_loop_is_running(m_loop))
{
g_main_loop_quit(m_loop);
g_main_context_wakeup(g_main_loop_get_context(m_loop));
}

gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(m_pipeline));
//TODO: unref plugins

g_main_loop_unref(m_loop);

m_isStarted = false;

return Z_OK;
}

已编辑:

我尝试使用以下内容:

gst-launch-1.0.exe -v rtspsrc location = rtsp://127.0.0.1:554/Streaming/Channels/101 ! rtpjpegdepay ! jpeg解析! multifilesink post-messages=true location="frame%d.jpg"

结果相同 - 颜色错误。

命令:

gst-launch-1.0.exe -v rtspsrc location = rtsp://127.0.0.1:554/Streaming/Channels/101 ! rtpjpegdepay ! multifilesink post-messages=true location="frame%d.jpg"

也产生相同的帧

我还捕获了来自 gst-launch-1.0 的日志(看不到错误 - 只有 INFO 和 DEBUG)和 wireshark 日志(也看不到任何问题)。我将尝试更深入地分析它。该 avi 是使用海康威视相机的 VLC 捕获的。

最佳答案

我曾尝试使用 VLC MJPEG 服务器进行调试。这是问题所在。我的基于 gstreamer rtpjpegdepay 插件的客户端在直接连接到 Hikvision MJPEG 流时没有任何问题。所以大概是 VLC 错误或违反任何一方的标准。

关于c++ - GStreamer:rtpjpegdepay/jpegdec 输出的帧颜色错误,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/51615670/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com