Reputation: 103
I read a mp4 file(H264 encodec) asynchronously, and I have configured the relevant parameters before, but when I finally pass the sample through CopyResource to the target texture shared, it always fails as below.
D3D11 ERROR: ID3D11DeviceContext::CopyResource: Cannot invoke CopyResource when the Formats of each Resource are not the same or at least castable to each other, unless one format is compressed (DXGI_FORMAT_R9G9B9E5_SHAREDEXP, or DXGI_FORMAT_BC[1,2,3,4,5]_* ) and the source format is similar to the dest according to: BC[1|4] ~= R16G16B16A16|R32G32, BC[2|3|5] ~= R32G32B32A32, R9G9B9E5_SHAREDEXP ~= R32. [ RESOURCE_MANIPULATION ERROR #284: COPYRESOURCE_INVALIDSOURCE]
If I specify MF_MT_SUBTYPE/MF_MT_FRAME_SIZE/MF_MT_FRAME_RATE and so on, these can actually take effect? Or do I have to implement CResizerDMO, CColorConvertDMO, CFrameRateConvertDMO and so on step by step? I see that IMFSinkWriter seems to be able to achieve the goal as long as these parameters are configured.
HRESULT CAsyncFileReader::StartAsyncRead(PCWSTR pszURL)
{
CComPtr<IMFAttributes> pAttributes = NULL;
HRESULT hr = MFCreateAttributes(&pAttributes, 5);
RETURN_ON_FAIL(hr);
hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this);
RETURN_ON_FAIL(hr);
hr = pAttributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, _dxgi_device_manager);
RETURN_ON_FAIL(hr);
hr = pAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
RETURN_ON_FAIL(hr);
hr = pAttributes->SetUINT32(MF_LOW_LATENCY, 1);
RETURN_ON_FAIL(hr);
//MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING (version < win8)
hr = pAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE);
RETURN_ON_FAIL(hr);
hr = MFCreateSourceReaderFromURL(pszURL, pAttributes, &m_pReader);
RETURN_ON_FAIL(hr);
CComPtr<IMFMediaType> pType = NULL;
hr = m_pReader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &pType);
RETURN_ON_FAIL(hr);
hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, Capture_Width, Capture_Height);
hr = m_pReader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType);
RETURN_ON_FAIL(hr);
...
}
//called by OnReadSample callback
HRESULT CAsyncFileReader::Copy2Texture(IMFSample* sample)
{
CComPtr<IMFMediaBuffer> buffer;
HRESULT hr = sample->GetBufferByIndex(0, &buffer);
if (SUCCEEDED(hr))
{
CComPtr<IMFDXGIBuffer> dxgiBuffer;
hr = buffer->QueryInterface(IID_PPV_ARGS(&dxgiBuffer));
if (SUCCEEDED(hr))
{
CComPtr<ID3D11Texture2D> texture;
hr = dxgiBuffer->GetResource(IID_PPV_ARGS(&texture));
if (SUCCEEDED(hr))
{
// unsigned int subresource = 0;
// hr = dxgiBuffer->GetSubresourceIndex(&subresource);
if (SUCCEEDED(hr))
{
CComPtr<ID3D11DeviceContext> immediate_context;
_d3d_device->GetImmediateContext(&immediate_context);
//tbd, AcquireSync need restore if failed under certain conditions
UINT64 key = AllocKey();
hr = _dxgi_keyed_mutex->AcquireSync(key, 10);
RETURN_ON_FAIL(hr);
immediate_context->CopyResource(_shared_texture2d, texture); <-------mftrace as above
//immediate_context->CopySubresourceRegion(_shared_texture2d, 0, 0, 0, 0, texture, subresource, nullptr);
hr = _dxgi_keyed_mutex->ReleaseSync(key + 1);
RETURN_ON_FAIL(hr);
}
}
}
}
return hr;
}
HRESULT CreateSharedD3D11Texture2D(ID3D11Device* d3d11_device, const UINT32& w, const UINT32& h, ID3D11Texture2D** d3d11_texture_2d)
{
D3D11_TEXTURE2D_DESC texDesc;
texDesc.ArraySize = 1;
texDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
texDesc.CPUAccessFlags = 0;
texDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
texDesc.Width = w;
texDesc.Height = h;
texDesc.MipLevels = 1;
texDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.SampleDesc.Count = 1;
texDesc.SampleDesc.Quality = 0;
texDesc.Usage = D3D11_USAGE_DEFAULT;
return d3d11_device->CreateTexture2D(&texDesc, nullptr, d3d11_texture_2d);
}
In addition, DXGI_FORMAT_B8G8R8A8_UNORM of texture is equivalent to MFVideoFormat_RGB32, right?
Upvotes: 0
Views: 306
Reputation: 69716
For the question put as wide as in the subject the answer is "no".
If you are okay to narrow it down to getting DXGI_FORMAT_B8G8R8A8_UNORM
as a result of format conversion and fitting using Media Foundation Source Reader API, then such solution exists.
Respective Media Foundation formats are MFVideoFormat_RGB32
and MFVideoFormat_ARGB32
.
In your code snippet you GetNativeMediaType
and then you SetCurrentMediaType
the same format exactly. Instead you need to build a new RGB32/ARGB32 media type between these calls and set this new format instead. You might need to update Source Reader attributes to enable conversions. Source Reader API will manage necessary primitives (transforms) internally in order to provide you with data in the requested format.
Alternatively you can pull data in the "native" format and managed decoders/convrerters yourselves, which will be pretty much the same except that it will take quite some time before you figure out how to implement data flow through transform chain.
Upvotes: 1