gpt4 book ai didi

RESOURCE_MANIPULATION ERROR #281 when calling ID3D11DeviceContext::CopySubresourceRegion to render video from a webcam(调用ID3D11DeviceContext::CopySubresource Region以从网络摄像头渲染视频时,RESOURCE_MANGRIPTING错误#281)

转载 作者:bug小助手 更新时间:2023-10-25 13:42:51 33 4
gpt4 key购买 nike



I'm trying to re-write my software-only logic for displaying video feed from a web camera. This time using hardware and DirectX. (I need to preface this by saying that this is the first time that I'm writing something for DirectX.)

我正试图重写我的纯软件逻辑,以便显示来自网络摄像头的视频。这一次使用了硬件和DirectX。(在此之前,我需要说明的是,这是我第一次为DirectX编写代码。)


I have the following declared on the global scale:

我在全球范围内宣布了以下几点:


IMFMediaSource* g_pMediaSource = NULL;
ReaderCallback* gpRdrCallback = NULL;
IMFSourceReader* g_pSrcReader = NULL;

//DirectX stuff:
IDXGISwapChain1* g_pHW_SwapChain = NULL;
ID3D11Device1* g_pHW_D3DDevice = NULL;
ID3D11DeviceContext1* g_pHW_ImmContext = NULL;
IMFDXGIDeviceManager* g_pHW_DXGIDevMgr = NULL;
ID3D11RenderTargetView* g_pHW_RenderTargetView = NULL;
ID3D11Texture2D* g_pHW_BackBuffer = NULL;

I get the IMFMediaSource for the webcam, using this code when the app starts:

我获取网络摄像头的IMFMediaSource,在应用程序启动时使用以下代码:


//Error handling is omitted for readability
CComPtr<IMFAttributes> com_attributes;
hr = MFCreateAttributes(&com_attributes, 2);

hr = com_attributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);

hr = com_attributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK,
strWebcamSymLink.c_str());

CComPtr<IMFActivate> com_Activ;
hr = MFCreateDeviceSourceActivate(com_attributes, &com_Activ);

hr = com_Activ->ActivateObject(IID_PPV_ARGS(&g_pMediaSource));

Then I do the following (once) during initialization to set up DirectX:

然后,我在初始化期间执行以下操作(一次)来设置DirectX:


(Error handling is omitted for readability.)

(为提高可读性,省略了错误处理。)


//For this test, the input parameters are:
//(As they are received from a webcam)
//
// szFrameW = 160;
// szFrameH = 90;
// nFrameRateNumer = 5;
// nFrameRateDenom = 1;
// nAspectRatioNumer = 1;
// nAspectRatioDenom = 1;
// dwIdxDev = 0; //Webcam device index
//

DXGI_SWAP_CHAIN_DESC sd = {};

sd.BufferCount = 1;

sd.BufferDesc.Width = szFrameW;
sd.BufferDesc.Height = szFrameH;
sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
sd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE;
sd.BufferDesc.Scaling = DXGI_MODE_SCALING_CENTERED;
sd.BufferDesc.RefreshRate.Numerator = nFrameRateNumer;
sd.BufferDesc.RefreshRate.Denominator = nFrameRateDenom;

sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;

sd.OutputWindow = ghWnd; //Main window handle
sd.Windowed = TRUE;

sd.SampleDesc.Count = 1;
sd.SampleDesc.Quality = 0;


D3D_FEATURE_LEVEL d3dFeatureLvls[] = {
D3D_FEATURE_LEVEL_11_1,
};

UINT dwNumLvlsRequested = _countof(d3dFeatureLvls);

D3D_FEATURE_LEVEL FeatureLevelsSupported;

CComPtr<IDXGISwapChain> com_SwapChain;
CComPtr<ID3D11Device> com_Dev;
CComPtr<ID3D11DeviceContext> com_Ctx;

hr = D3D11CreateDeviceAndSwapChain(NULL,
D3D_DRIVER_TYPE_HARDWARE,
NULL,
D3D11_CREATE_DEVICE_BGRA_SUPPORT |
#ifdef _DEBUG
D3D11_CREATE_DEVICE_DEBUG
#else
0
#endif
,
d3dFeatureLvls,
dwNumLvlsRequested,
D3D11_SDK_VERSION,
&sd,
&com_SwapChain,
&com_Dev,
&FeatureLevelsSupported,
&com_Ctx);

//Get version 1.0 of interfaces - I'm not sure if I need it here?
hr = com_SwapChain.QueryInterface(&g_pHW_SwapChain);
hr = com_Dev.QueryInterface(&g_pHW_D3DDevice);
hr = com_Ctx.QueryInterface(&g_pHW_ImmContext);

hr = g_pHW_SwapChain->GetBuffer(0, IID_PPV_ARGS(&g_pHW_BackBuffer));

hr = g_pHW_D3DDevice->CreateRenderTargetView(g_pHW_BackBuffer, NULL, &g_pHW_RenderTargetView);

g_pHW_ImmContext->OMSetRenderTargets(1, &g_pHW_RenderTargetView, NULL);

D3D11_VIEWPORT vp;
vp.Width = (FLOAT)szFrameW;
vp.Height = (FLOAT)szFrameH;
vp.MinDepth = 0.0f;
vp.MaxDepth = 1.0f;
vp.TopLeftX = 0;
vp.TopLeftY = 0;

g_pHW_ImmContext->RSSetViewports( 1, &vp );

CComPtr<IMFTransform> com_Transform;
hr = g_pMediaSource->QueryInterface(IID_PPV_ARGS(&com_Transform));

UINT uiToken = 0;
hr = MFCreateDXGIDeviceManager(&uiToken, &g_pHW_DXGIDevMgr);

hr = g_pHW_DXGIDevMgr->ResetDevice(g_pHW_D3DDevice, uiToken);

hr = com_Transform->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER,
(ULONG_PTR)g_pHW_DXGIDevMgr);

Then to initiate the Microsoft Media Foundation for asynchronous rendering from a webcam:

然后启动Microsoft Media Foundation以从网络摄像头进行异步渲染:


//Error handling is omitted for readability

hr = MFCreateAttributes(&com_attributes, 3);

gpRdrCallback = new ReaderCallback();
hr = com_attributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, gpRdrCallback);

hr = com_attributes->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE);
hr = com_attributes->SetUINT32(MF_SOURCE_READER_DISCONNECT_MEDIASOURCE_ON_SHUTDOWN, TRUE);
hr = com_attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, g_pHW_DXGIDevMgr);

hr = MFCreateSourceReaderFromMediaSource(g_pMediaSource, com_attributes, &g_pSrcReader);

CComPtr<IMFMediaType> com_vid_output;
hr = MFCreateMediaType(&com_vid_output);

hr = com_vid_output->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
hr = com_vid_output->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
hr = com_vid_output->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
hr = com_vid_output->SetUINT64(MF_MT_FRAME_SIZE,
PackSize(szFrameW, szFrameH));

hr = com_vid_output->SetUINT64(MF_MT_FRAME_RATE,
PackSize(nFrameRateNumer, nFrameRateDenom));

hr = MFSetAttributeRatio(com_vid_output,
MF_MT_PIXEL_ASPECT_RATIO,
nAspectRatioNumer,
nAspectRatioDenom);

hr = g_pSrcReader->SetCurrentMediaType(dwIdxDev, NULL, com_vid_output);

//Initiate the read from a webcam (async)
hr = g_pSrcReader->SetStreamSelection(dwIdxDev, TRUE);
hr = g_pSrcReader->ReadSample(dwIdxDev,
0,
NULL, NULL, NULL, NULL);

After that from my ReaderCallback when a frame is read:

之后,当读取帧时,从我的ReaderCallback:


//Error handling is omitted for readability

HRESULT ReaderCallback::OnReadSample(HRESULT hrStatus,
DWORD dwStreamIndex,
DWORD dwStreamFlags,
LONGLONG llTimestamp,
IMFSample* pSample)
{
if(SUCCEEDED(hrStatus))
{
CComPtr<IMFMediaBuffer> com_buffer;
hr = pSample->ConvertToContiguousBuffer(&com_buffer);

com_buffer->AddRef(); /pseudo-call
remember_video_buffer_for_later_processing(com_buffer);

}

//Initiate reading of another frame
hr = g_pSrcReader->ReadSample(dwIdxDev,
0,
NULL, NULL, NULL, NULL);

return S_OK;
}

Finally, when I am ready to render the video frame onto the window:
(The frame is received from my ReaderCallback::OnReadSample above.)

最后,当我准备好将视频帧呈现到窗口上时:(该帧是从上面的ReaderCallback::OnReadSample接收的。)


//Error handling is omitted for readability

void renderFromRawVideoPixels(IMFMediaBuffer* pMediaBuffer)
{
CComPtr<IMFDXGIBuffer> com_DxgBuffer;
hr = pMediaBuffer->QueryInterface(IID_PPV_ARGS(&com_DxgBuffer));

CComPtr<ID3D11Texture2D> com_Texture;
hr = com_DxgBuffer->GetResource(IID_PPV_ARGS(&com_Texture));

UINT nSubindex;
hr = com_DxgBuffer->GetSubresourceIndex(&nSubindex);

g_pHW_ImmContext->CopySubresourceRegion(g_pHW_BackBuffer,
0, 0 ,0, 0,
com_Texture,
nSubindex,
NULL);

DXGI_PRESENT_PARAMETERS dpp = {};
hr = g_pHW_SwapChain->Present1(0, 0, &dpp);
}

The call to g_pHW_ImmContext->CopySubresourceRegion above leaves the following error in the debugging output window and I get a black screen:

上面对g_phw_ImmContext->CopySubourceRegion的调用在调试输出窗口中留下了以下错误,并且我得到一个黑屏:



D3D11 ERROR: ID3D11DeviceContext::CopySubresourceRegion: Cannot invoke
CopySubresourceRegion when the Formats of each Resource are not the
same or at least castable to each other, unless one format is
compressed (DXGI_FORMAT_R9G9B9E5_SHAREDEXP, or
DXGI_FORMAT_BC[1,2,3,4,5,6,7]_* ) and the source format is similar to
the dest according to: BC[1|4] ~= R16G16B16A16|R32G32, BC[2|3|5|6|7]
~= R32G32B32A32, R9G9B9E5_SHAREDEXP ~= R32. [ RESOURCE_MANIPULATION
ERROR #281: COPYSUBRESOURCEREGION_INVALIDSOURCE]



I have no idea what it means.

我不知道这是什么意思。


Any help on how to resolve this?

任何帮助如何解决这个问题?


更多回答

I would say if you call GetDesc of those resources behind g_pHW_BackBuffer and com_Texture before the CopySubresourceRegion call, you would be able to see how exactly they are incompatible for the copy operation.

我会说,如果您在调用CopySubresource Region之前调用g_phw_BackBuffer和com_Texture后面的那些资源的GetDesc,您将能够确切地看到它们对于复制操作是多么不兼容。

@RomanR. and if they are incompatible, what is the way to convert them from one to another?

@RomanR。如果它们是不相容的,如何将它们从一个转换为另一个?

Here's the difference. The com_Texture: {format=DXGI_FORMAT_B8G8R8X8_UNORM; BindFlags=D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE}, while g_pHW_BackBuffer: {format=DXGI_FORMAT_R8G8B8A8_UNORM; BindFlags=D3D11_BIND_RENDER_TARGET;}

这就是区别所在。COM_纹理:{FORMAT=DXGI_FORMAT_B8G8R8X8_NORM;绑定标志=D3D11_BIND_RENDER_TARGET|D3D11_BIND_SHADER_RESOURCE},而g_PHW_BackBuffer:{FORMAT=DXGI_FORMAT_R8G8B8A8_NORM;BindFLAGS=D3D11_BIND_RENDER_TARGET;}

The two resources are both 32 bpp, but still the formats are different B8G8R8X8_UNORM vs R8G8B8A8_UNORM. You definitely don't want to copy BGR to RGB in any case. You want to have compatible format there, probably the easiest is to create swapchain with the format which is going to be compatible.

这两个资源都是32bpp,但格式仍然不同,B8G8R8X8_NORM与R8G8B8A8_UNRM。在任何情况下,您都绝对不想将BGR复制到RGB。你想在那里有兼容的格式,可能最简单的是用要兼容的格式创建交换链。

@RomanR. I just tried it. First off, when I specify DXGI_FORMAT_B8G8R8X8_UNORM to my call to D3D11CreateDeviceAndSwapChain it returns E_INVALIDARG. But even if that succeeded, where do I get the format to feed into D3D11CreateDeviceAndSwapChain? Isn't it dependent on a webcam.

@RomanR。我刚试过了。首先,当我指定DXGI_FORMAT_B8G8R8X8_NORM调用D3D11CreateDeviceAndSwapChain时,它返回E_INVALIDARG。但是,即使成功了,我从哪里获得格式以馈送到D3D11CreateDeviceAndSwapChain?它不是依赖于网络摄像头吗。

优秀答案推荐

As the system tells you, the formats between what Media Foundation gives you back and what the DirectX swapchain expects are not compatible.

正如系统告诉您的那样,Media Foundation返回给您的内容和DirectX交换链期望的内容之间的格式不兼容。


You can make them compatible with two changes:

您可以使它们与两个更改兼容:


A) change the swapchain format, change this:

A)更改交换链格式,更改如下内容:


sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;

into this:

进入这一阶段:


sd.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;

B) change MF output video format, change this:

B)更改mf输出视频格式,更改如下:


hr = com_vid_output->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
=> MF will give you back DXGI_FORMAT_B8G8R8X8_UNORM

into this:

进入这一阶段:


hr = com_vid_output->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
=> MF will give you back DXGI_FORMAT_B8G8R8A8_UNORM

更多回答

Thanks. But oh wow, seriously. That's what was wrong with it 🤦‍♂️ I did try to change the format but chose DXGI_FORMAT_B8G8R8X8_UNORM instead of DXGI_FORMAT_B8G8R8A8_UNORM.

谢谢。但是,哦,哇,说真的。这就是它的错误所在,🤦‍♂️我确实试图更改格式,但选择了DXGI_FORMAT_B8G8R8X8_UNRM,而不是DXGI_FORMAT_B8G8R8A8_UNRM。

A follow-up if you don't mind. Why is it stretching it to the size of the entire window though? Didn't I specify the D3D11_VIEWPORT to be the size of the frame?

如果你不介意的话,我来跟进一下。但为什么它要把它拉大到整个窗口的大小呢?我不是指定了D3D11_VIEPORT为帧的大小吗?

It's not only the swapchain format, it's essentially the MF output format. Follow up: not sure if you can do that with a windowed swapchain created like that. You can instead create it using CreateSwapChainForHwnd (and create device with D3D11CreateDevice), like in here pastebin.com/raw/3a2Gg8v3 PS: your render target view is useless here (you just want a DXGI surface basically), CreateRenderTargetView OMSetRenderTargets and RSSetViewports can be removed.

它不仅是交换链格式,本质上也是MF输出格式。后续:不确定是否可以使用这样创建的窗口交换链做到这一点。相反,您可以使用CreateSwapChainForHwnd创建它(并使用D3D11CreateDevice创建设备),就像这里的pastebin.com/raw/3a2Gg8v3 ps:您的渲染目标视图在这里是无用的(您基本上只需要一个DXGI表面),CreateRenderTargetView OMSetRenderTarget和RSSetViewport可以被删除。

33 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com