作者热门文章
- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我想使用 Frame Rate convertor DSP在我的媒体基金会应用程序中。我正在使用“SourceReader”来读取视频文件。谁能告诉我在何处以及如何将 DMO 与 MF 集成以获得帧速率转换。我似乎不明白向 DMO 提供什么样的样本(压缩/未压缩)以获得新的帧速率。 DMO 如何改变帧率?它是否为新样本提供了新的时间戳?没有代码示例演示其用法。请帮忙,我被卡住了。
谢谢,
莫茨
最佳答案
这是一个老问题。
使用SourceReader 进行帧率转换,需要手动集成DMO。
这个想法是从 SourceReader 获取兼容的样本,比如说由 DMO 处理的视频子类型。
How does the DMO change the frame rate?
This DSP changes the frame rate by repeating or dropping frames.
Does it give a new time-stamp to the new samples?
#pragma once
#define WIN32_LEAN_AND_MEAN
#define STRICT
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mfreadwrite")
#pragma comment(lib, "mfuuid")
#pragma comment(lib, "wmcodecdspuuid")
#include <WinSDKVer.h>
#include <new>
#include <windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <mferror.h>
#include <Wmcodecdsp.h>
template <class T> inline void SAFE_RELEASE(T*& p){
if(p){
p->Release();
p = NULL;
}
}
HRESULT ProcessConverter();
HRESULT InitDMO(IMFTransform**, IMFMediaType*);
HRESULT ProcessSample(IMFSourceReader*, IMFTransform*);
HRESULT ProcessDMO(IMFTransform*, IMFSample*, DWORD&, const UINT32);
HRESULT InitOutputDataBuffer(IMFTransform*, MFT_OUTPUT_DATA_BUFFER*, const UINT32);
void main(){
HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
if(SUCCEEDED(hr)){
hr = MFStartup(MF_VERSION, MFSTARTUP_LITE);
if(SUCCEEDED(hr)){
hr = ProcessConverter();
hr = MFShutdown();
}
CoUninitialize();
}
}
HRESULT ProcessConverter(){
HRESULT hr;
IMFSourceReader* pReader = NULL;
// Change the URL
if(FAILED(hr = MFCreateSourceReaderFromURL(L"Wildlife.wmv", NULL, &pReader))){
return hr;
}
DWORD dwMediaTypeIndex = 0;
IMFMediaType* pType = NULL;
hr = pReader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, dwMediaTypeIndex, &pType);
if(SUCCEEDED(hr)){
// We must ask for a subtype compatible with DMO :
// ARGB32 RGB24 RGB32 RGB555 RGB565 AYUV IYUV UYVY Y211 Y411 Y41P YUY2 YUYV YV12 YVYU
hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
if(SUCCEEDED(hr)){
hr = pReader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType);
}
// We need this because we use the MediaType to initialize the Transform
if(SUCCEEDED(hr)){
SAFE_RELEASE(pType);
hr = pReader->GetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType);
}
if(SUCCEEDED(hr)){
IMFTransform* pTransform = NULL;
hr = InitDMO(&pTransform, pType);
if(SUCCEEDED(hr)){
hr = ProcessSample(pReader, pTransform);
// Seems not really needed with the DMO
/*hr = */ pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL);
/*hr = */ pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL);
/*hr = */ pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, NULL);
}
SAFE_RELEASE(pTransform);
}
}
SAFE_RELEASE(pType);
SAFE_RELEASE(pReader);
return hr;
}
HRESULT InitDMO(IMFTransform** ppTransform, IMFMediaType* pType){
HRESULT hr = CoCreateInstance(CLSID_CFrameRateConvertDmo, NULL, CLSCTX_INPROC_SERVER, IID_IMFTransform, reinterpret_cast<void**>(ppTransform));
if(SUCCEEDED(hr)){
hr = (*ppTransform)->SetInputType(0, pType, 0);
}
if(SUCCEEDED(hr)){
// Change the frame rate as needed, here num = 60000 and den = 1001
hr = MFSetAttributeRatio(pType, MF_MT_FRAME_RATE, 60000, 1001);
}
if(SUCCEEDED(hr)){
hr = (*ppTransform)->SetOutputType(0, pType, 0);
}
if(SUCCEEDED(hr)){
hr = (*ppTransform)->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL);
}
if(SUCCEEDED(hr)){
hr = (*ppTransform)->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL);
}
return hr;
}
HRESULT ProcessSample(IMFSourceReader* pReader, IMFTransform* pTransform){
HRESULT hr;
IMFMediaType* pType = NULL;
if(FAILED(hr = pTransform->GetOutputCurrentType(0, &pType))){
return hr;
}
// We need the frame size to create the sample buffer.
UINT32 uiFrameSize = 0;
hr = pType->GetUINT32(MF_MT_SAMPLE_SIZE, &uiFrameSize);
SAFE_RELEASE(pType);
if(FAILED(hr) || uiFrameSize == 0){
return hr;
}
BOOL bProcess = TRUE;
DWORD streamIndex;
DWORD flags;
LONGLONG llTimeStamp;
IMFSample* pSample = NULL;
DWORD dwReaderCount = 0;
DWORD dwDMOCount = 0;
while(bProcess){
hr = pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &flags, &llTimeStamp, &pSample);
if(FAILED(hr) || flags != 0){
bProcess = FALSE;
}
else{
hr = ProcessDMO(pTransform, pSample, dwDMOCount, uiFrameSize);
// You can check timestamp from the SourceReader
//hr = pSample->GetSampleDuration(&llTimeStamp);
//hr = pSample->GetSampleTime(&llTimeStamp);
SAFE_RELEASE(pSample);
dwReaderCount++;
}
}
// Todo : check dwReaderCount and dwDMOCount here.
// For example with native frame rate = 30000/1001 and dwReaderCount = 900
// DMO frame rate = 30000/1001 -> dwReaderCount = 900
// DMO frame rate = 60000/1001 -> dwReaderCount = 1800
// DMO frame rate = 25/1 -> dwReaderCount = 750
SAFE_RELEASE(pSample);
return hr;
}
HRESULT ProcessDMO(IMFTransform* pTransform, IMFSample* pSample, DWORD& dwDMOCount, const UINT32 uiFrameSize){
HRESULT hr = S_OK;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer;
DWORD processOutputStatus = 0;
// Todo : we should avoid recreating the buffer...
hr = InitOutputDataBuffer(pTransform, &outputDataBuffer, uiFrameSize);
while(hr == S_OK){
hr = pTransform->ProcessOutput(0, 1, &outputDataBuffer, &processOutputStatus);
if(hr == MF_E_TRANSFORM_NEED_MORE_INPUT){
break;
}
// You can check new timestamp from the DMO
/*if(outputDataBuffer.pSample != NULL){
LONGLONG llTimeStamp = 0;
hr = outputDataBuffer.pSample->GetSampleTime(&llTimeStamp);
hr = outputDataBuffer.pSample->GetSampleDuration(&llTimeStamp);
}*/
dwDMOCount++;
}
if(hr == MF_E_TRANSFORM_NEED_MORE_INPUT){
hr = pTransform->ProcessInput(0, pSample, 0);
}
if(outputDataBuffer.pSample != NULL){
SAFE_RELEASE(outputDataBuffer.pSample);
}
return hr;
}
HRESULT InitOutputDataBuffer(IMFTransform* pMFTransform, MFT_OUTPUT_DATA_BUFFER* pOutputBuffer, const UINT32 uiFrameSize){
MFT_OUTPUT_STREAM_INFO outputStreamInfo;
DWORD outputStreamId = 0;
ZeroMemory(&outputStreamInfo, sizeof(outputStreamInfo));
ZeroMemory(pOutputBuffer, sizeof(*pOutputBuffer));
HRESULT hr = pMFTransform->GetOutputStreamInfo(outputStreamId, &outputStreamInfo);
if(SUCCEEDED(hr)){
if((outputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == 0 &&
(outputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES) == 0){
IMFSample* pOutputSample = NULL;
IMFMediaBuffer* pMediaBuffer = NULL;
hr = MFCreateSample(&pOutputSample);
if(SUCCEEDED(hr)){
hr = MFCreateMemoryBuffer(uiFrameSize, &pMediaBuffer);
}
if(SUCCEEDED(hr)){
hr = pOutputSample->AddBuffer(pMediaBuffer);
}
if(SUCCEEDED(hr)){
pOutputBuffer->pSample = pOutputSample;
pOutputBuffer->pSample->AddRef();
}
SAFE_RELEASE(pMediaBuffer);
SAFE_RELEASE(pOutputSample);
}
}
return hr;
}
关于directshow - 如何在 MF 应用程序中使用帧速率转换器 DMO,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/8412343/
我想使用 Frame Rate convertor DSP在我的媒体基金会应用程序中。我正在使用“SourceReader”来读取视频文件。谁能告诉我在何处以及如何将 DMO 与 MF 集成以获得帧速
错误21002:[sql-dmo]用户***已经存在错误 此错误的原因多是因为将MSSQL备份移植到另一服务器还原时出现。 主要原因是原来的备份还原时保留了原用户的信息,导致
我是一名优秀的程序员,十分优秀!