- android - RelativeLayout 背景可绘制重叠内容
- android - 如何链接 cpufeatures lib 以获取 native android 库?
- java - OnItemClickListener 不起作用,但 OnLongItemClickListener 在自定义 ListView 中起作用
- java - Android 文件转字符串
我有一个解决方案,我使用 Media Foundation 的 h264 编码器对来自网络摄像头的视频 (YUY2) 样本进行编码。然后我通过 TCP 连接将它发送到另一个应用程序,该应用程序使用 Media Foundation 的 h264 解码器将流解码回 YUY2 格式。解码后,视频样本/图像使用 DirectX 呈现在屏幕上。
问题在于,在关键帧之间,视频图像会出现越来越多的伪像。收到关键帧时,伪影消失。
我将 TCP 连接排除在范围之外,并在编码后立即进行解码,但仍然有工件困扰着我。
这是从网络摄像头接收样本的回调方法:
//-------------------------------------------------------------------
// OnReadSample
//
// Called when the IMFMediaSource::ReadSample method completes.
//-------------------------------------------------------------------
HRESULT CPreview::OnReadSample(
HRESULT hrStatus,
DWORD /* dwStreamIndex */,
DWORD dwStreamFlags,
LONGLONG llTimestamp,
IMFSample *pSample // Can be NULL
)
{
HRESULT hr = S_OK;
IMFMediaBuffer *pBuffer = NULL;
EnterCriticalSection(&m_critsec);
if (FAILED(hrStatus))
{
hr = hrStatus;
}
if (SUCCEEDED(hr))
{
if (pSample)
{
IMFSample *pEncodedSample = NULL;
hr = m_pCodec->EncodeSample(pSample, &pEncodedSample);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || pEncodedSample == NULL)
{
hr = m_pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
LeaveCriticalSection(&m_critsec);
return S_OK;
}
LONGLONG llEncodedSampleTimeStamp = 0;
LONGLONG llEncodedSampleDuration = 0;
pEncodedSample->GetSampleTime(&llEncodedSampleTimeStamp);
pEncodedSample->GetSampleDuration(&llEncodedSampleDuration);
pBuffer = NULL;
hr = pEncodedSample->GetBufferByIndex(0, &pBuffer);
if (hr != S_OK)
{
hr = m_pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
LeaveCriticalSection(&m_critsec);
return hr;
}
BYTE *pOutBuffer = NULL;
DWORD dwMaxLength, dwCurrentLength;
hr = pBuffer->Lock(&pOutBuffer, &dwMaxLength, &dwCurrentLength);
if (hr != S_OK)
{
hr = m_pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
LeaveCriticalSection(&m_critsec);
return hr;
}
// Send encoded webcam data to connected clients
//SendData(pOutBuffer, dwCurrentLength, llEncodedSampleTimeStamp, llEncodedSampleDuration);
pBuffer->Unlock();
SafeRelease(&pBuffer);
IMFSample *pDecodedSample = NULL;
m_pCodec->DecodeSample(pEncodedSample, &pDecodedSample);
if (pDecodedSample != NULL)
{
pDecodedSample->SetSampleTime(llTimestamp);
pDecodedSample->SetSampleTime(llTimestamp - llLastSampleTimeStamp);
llLastSampleTimeStamp = llTimestamp;
hr = pDecodedSample->GetBufferByIndex(0, &pBuffer);
//hr = pSample->GetBufferByIndex(0, &pBuffer);
// Draw the frame.
if (SUCCEEDED(hr))
{
hr = m_draw.DrawFrame(pBuffer);
}
SafeRelease(&pDecodedSample);
}
SafeRelease(&pBuffer);
SafeRelease(&pEncodedSample);
}
}
// Request the next frame.
if (SUCCEEDED(hr))
{
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
NULL, // actual
NULL, // flags
NULL, // timestamp
NULL // sample
);
}
if (FAILED(hr))
{
NotifyError(hr);
}
SafeRelease(&pBuffer);
LeaveCriticalSection(&m_critsec);
return hr;
}
这是编码器/解码器的初始化代码:
HRESULT Codec::InitializeEncoder()
{
IMFMediaType *pMFTInputMediaType = NULL, *pMFTOutputMediaType = NULL;
IUnknown *spTransformUnk = NULL;
DWORD mftStatus = 0;
UINT8 blob[] = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, 0xc0, 0x1e, 0x96, 0x54, 0x05, 0x01,
0xe9, 0x80, 0x80, 0x40, 0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x3c, 0x80 };
CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
MFStartup(MF_VERSION);
// Create H.264 encoder.
CHECK_HR(CoCreateInstance(CLSID_CMSH264EncoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 encoder MFT.\n");
CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&pEncoderTransform)), "Failed to get IMFTransform interface from H264 encoder MFT object.\n");
// Transform output type
MFCreateMediaType(&pMFTOutputMediaType);
pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, 500000);
CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, 640, 480), "Failed to set frame size on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, 30, 1), "Failed to set frame rate on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
pMFTOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
pMFTOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
// Special attributes for H264 transform, if needed
/*CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base), "Failed to set profile on H264 MFT out type.\n");
CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MPEG2_LEVEL, eAVEncH264VLevel4), "Failed to set level on H264 MFT out type.\n");
CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MAX_KEYFRAME_SPACING, 10), "Failed to set key frame interval on H264 MFT out type.\n");
CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncCommonQuality, 100), "Failed to set H264 codec qulaity.\n");
CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncMPVGOPSize, 1), "Failed to set CODECAPI_AVEncMPVGOPSize = 1\n");*/
CHECK_HR(pEncoderTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 encoder MFT.\n");
// Transform input type
MFCreateMediaType(&pMFTInputMediaType);
pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2);
CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, 640, 480), "Failed to set frame size on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, 30, 1), "Failed to set frame rate on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
CHECK_HR(pEncoderTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n");
CHECK_HR(pEncoderTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n");
if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus)
{
printf("E: pEncoderTransform->GetInputStatus() not accept data.\n");
goto done;
}
CHECK_HR(pEncoderTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n");
CHECK_HR(pEncoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n");
CHECK_HR(pEncoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n");
return S_OK;
done:
SafeRelease(&pMFTInputMediaType);
SafeRelease(&pMFTOutputMediaType);
return S_FALSE;
}
HRESULT Codec::InitializeDecoder()
{
IUnknown *spTransformUnk = NULL;
IMFMediaType *pMFTOutputMediaType = NULL;
IMFMediaType *pMFTInputMediaType = NULL;
DWORD mftStatus = 0;
// Create H.264 decoder.
CHECK_HR(CoCreateInstance(CLSID_CMSH264DecoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 decoder MFT.\n");
// Query for the IMFTransform interface
CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&pDecoderTransform)), "Failed to get IMFTransform interface from H264 decoder MFT object.\n");
// Create input mediatype for the decoder
MFCreateMediaType(&pMFTInputMediaType);
pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, 640, 480), "Failed to set frame size on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, 30, 1), "Failed to set frame rate on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
pMFTInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
pMFTInputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
CHECK_HR(pDecoderTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n");
CHECK_HR(pDecoderTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n");
if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus)
{
printf("E: pDecoderTransform->GetInputStatus() not accept data.\n");
goto done;
}
// Create outmedia type for the decoder
MFCreateMediaType(&pMFTOutputMediaType);
pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2);
CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, 640, 480), "Failed to set frame size on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, 30, 1), "Failed to set frame rate on H264 MFT out type.\n");
CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
CHECK_HR(pDecoderTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 decoder MFT.\n");
CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n");
CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n");
CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n");
return S_OK;
done:
SafeRelease(&pMFTInputMediaType);
SafeRelease(&pMFTOutputMediaType);
return S_FALSE;
}
这是实际的解码/编码器部分:
HRESULT Codec::EncodeSample(IMFSample *pSample, IMFSample **ppEncodedSample)
{
return TransformSample(pEncoderTransform, pSample, ppEncodedSample);
}
HRESULT Codec::DecodeSample(IMFSample *pSample, IMFSample **ppEncodedSample)
{
return TransformSample(pDecoderTransform, pSample, ppEncodedSample);
}
HRESULT Codec::TransformSample(IMFTransform *pTransform, IMFSample *pSample, IMFSample **ppSampleOut)
{
IMFSample *pOutSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
DWORD mftOutFlags;
pTransform->ProcessInput(0, pSample, 0);
CHECK_HR(pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n");
// Note: Decoder does not return MFT flag MFT_OUTPUT_STATUS_SAMPLE_READY, so we just need to rely on S_OK return
if (pTransform == pEncoderTransform && mftOutFlags == S_OK)
{
return S_OK;
}
else if (pTransform == pEncoderTransform && mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY ||
pTransform == pDecoderTransform && mftOutFlags == S_OK)
{
DWORD processOutputStatus = 0;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer;
MFT_OUTPUT_STREAM_INFO StreamInfo;
pTransform->GetOutputStreamInfo(0, &StreamInfo);
CHECK_HR(MFCreateSample(&pOutSample), "Failed to create MF sample.\n");
CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n");
if (pTransform == pEncoderTransform)
CHECK_HR(pBuffer->SetCurrentLength(StreamInfo.cbSize), "Failed SetCurrentLength.\n");
CHECK_HR(pOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n");
outputDataBuffer.dwStreamID = 0;
outputDataBuffer.dwStatus = 0;
outputDataBuffer.pEvents = NULL;
outputDataBuffer.pSample = pOutSample;
HRESULT hr = pTransform->ProcessOutput(0, 1, &outputDataBuffer, &processOutputStatus);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
{
SafeRelease(&pBuffer);
SafeRelease(&pOutSample);
return hr;
}
LONGLONG llVideoTimeStamp, llSampleDuration;
pSample->GetSampleTime(&llVideoTimeStamp);
pSample->GetSampleDuration(&llSampleDuration);
CHECK_HR(outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n");
CHECK_HR(outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n");
if (pTransform == pEncoderTransform)
{
IMFMediaBuffer *pMediaBuffer = NULL;
DWORD dwBufLength;
CHECK_HR(pOutSample->ConvertToContiguousBuffer(&pMediaBuffer), "ConvertToContiguousBuffer failed.\n");
CHECK_HR(pMediaBuffer->GetCurrentLength(&dwBufLength), "Get buffer length failed.\n");
WCHAR *strDebug = new WCHAR[256];
wsprintf(strDebug, L"Encoded sample ready: time %I64d, sample duration %I64d, sample size %i.\n", llVideoTimeStamp, llSampleDuration, dwBufLength);
OutputDebugString(strDebug);
SafeRelease(&pMediaBuffer);
}
else if (pTransform == pDecoderTransform)
{
IMFMediaBuffer *pMediaBuffer = NULL;
DWORD dwBufLength;
CHECK_HR(pOutSample->ConvertToContiguousBuffer(&pMediaBuffer), "ConvertToContiguousBuffer failed.\n");
CHECK_HR(pMediaBuffer->GetCurrentLength(&dwBufLength), "Get buffer length failed.\n");
WCHAR *strDebug = new WCHAR[256];
wsprintf(strDebug, L"Decoded sample ready: time %I64d, sample duration %I64d, sample size %i.\n", llVideoTimeStamp, llSampleDuration, dwBufLength);
OutputDebugString(strDebug);
SafeRelease(&pMediaBuffer);
}
// Decoded sample out
*ppSampleOut = pOutSample;
//SafeRelease(&pMediaBuffer);
SafeRelease(&pBuffer);
return S_OK;
}
done:
SafeRelease(&pBuffer);
SafeRelease(&pOutSample);
return S_FALSE;
}
我已经为此搜索了很长时间的解决方案,发现了一个与我的问题定义非常相似的问题,但由于它针对的是不同的 API,因此对我没有帮助。 FFMPEG decoding artifacts between keyframes
最好的问候,托尼·里科宁
最佳答案
我来晚了一点,但我可以确认主页上的答案是正确的解决方案。我也遇到了同样的问题,但我只使用了这个示例代码的解码器部分。我在阅读 MP4 文件时发现关键帧之间的伪像越来越多。我一收到关键帧,图像看起来不错,然后逐渐变差。这是我在 Codec::InitializeDecoder() 中添加的代码:
// Set CODECAPI_AVLowLatencyMode
ICodecAPI *mpCodecAPI = NULL;
hr = pDecoderTransform->QueryInterface(IID_PPV_ARGS(&mpCodecAPI));
CHECK_HR(hr, "Failed to get ICodecAPI.\n");
VARIANT var;
var.vt = VT_BOOL;
var.boolVal = VARIANT_TRUE;
hr = mpCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var);
CHECK_HR(hr, "Failed to enable low latency mode.\n");
添加这些更改后,程序运行得更好了!感谢 GitHub 上的这个软件为我提供了必要的代码: https://github.com/GameTechDev/ChatHeads/blob/master/VideoStreaming/EncodeTransform.cpp
关于c++ - Media Foundation 网络摄像头视频 H264 编码/解码在播放时产生伪像,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/41546721/
我对自定义 CSS 或在将图像作为 Logo 上传到页面时使用编码 block 有疑问。我正在为我的网站使用 squarespace,我需要帮助编码我的 Logo 以使其适合每个页面。一个选项是使用自
如 encoding/json 包文档中所述, Marshal traverses the value v recursively. If an encountered value implement
我必须做一些相当于Java中的iconv -f utf8 -t sjisMS $INPUT_FILE的事情。该命令在 Unix 中 我在java中没有找到任何带有sjisMS的编码。 Java中有Sh
从 PHP 5.3 迁移到 PHP 5.6 后,我遇到了编码问题。我的 MySQL 数据库是 latin1,我的 PHP 文件是 windows-1251。现在一切都显示为“ñëåäíèòå àäðå
我有一个 RScript文件(我们称之为 main.r ),它引用了另一个文件,使用以下代码: source("functions.R") 但是,当我运行 RScript 文件时,它提示以下错误:
我无法设法从 WSDL 创建 RPC/编码风格的代码 - 有谁知道哪个框架可以做到这一点? 带有 adb 和 xmlbeans 映射的 Axis2 无法正常工作(无法处理响应中的肥皂编码)直接使用 X
安装了最新版本的Node.Js()和npm包**(1.2.10)**当我运行 Express 命令来生成项目时,它向我抛出以下错误 buffer.js:240 switch (encoding &
JavaScript中有JSON编码/解码base64编码/解码函数吗? 最佳答案 是的,btoa() 和 atob() 在某些浏览器中可以工作: var enc = btoa("this is so
>>> unicode('восстановление информации', 'utf-16') Traceback (most recent call last): File "", line
我当然熟悉 java.net.URLEncoder 和 java.net.URLDecoder 类。但是,我只需要 HTML 样式的编码。 (我不想将 ' ' 替换为 '+' 等)。我不知道任何只做
有一个非常简单的 SSIS 包: OLE DB Source 通过 View 获取数据(数据库表 nvarchar 或 nchar 中的所有字符串列)。 派生列,用于格式化现有日期并将其添加到数据集(
我正在使用一个在 Node 中进行base64编码的软件,如下所示: const enc = new Buffer('test', 'base64') console.log(enc) 显示: 我正
我试图将带有日语字符的数据插入到 oracle 数据库中。事情是保存在数据库中的是一堆倒置的问号。我该如何解决这个问题 最佳答案 见 http://www.errcode.net/blogs/?p=6
当我在 java 中解压 zip 文件时,我发现文件名中出现了带有重音字符的奇怪行为。 西索: Add File user : L'equipe Technique -- Folder : spec
在网上冲浪我找到了 ExtJS 的 Ext.Gantt 插件,该扩展有一个特殊的编码。任何人都知道如何编码那样或其他复杂的形式。 Encoded Gantt Chart 最佳答案 它似乎被 Dean
我正在用C语言做一个编码任务,我进展顺利,直到读取符号并根据表格分配相应的代码的部分。我必须连接几个代码,直到它们的长度达到 32 位,为此我必须将它们写入一个文件中。这种写入文件的方法给我带来了很多
我有一个外部链接的 javascript 文件。在那个 javascript 里面,我有这个功能: function getMonthNumber(monthName){ monthName = mo
使用mechanize,我检索到一个网页的源页面,其中包含一些非ASCII字符,比如汉字。 代码如下: #using python2.6 from mechanize import Browser b
我有一个包含字母 ø 的文件。当我用这段代码 File.ReadLines(filePath) 读取它时,我得到了一个问号而不是它。 当我像这样添加编码时 File.ReadLines(filePat
如何翻译下面的字符串 H.P. Dembinski, B. K\'{e}gl, I.C. Mari\c{s}, M. Roth, D. Veberi\v{c} 进入 H. P. Dembinski,
我是一名优秀的程序员,十分优秀!