F
Fleµve
Guest
Hi.
I Decide to integrate WebCam (IMFSourceReaderCallback) to my engine and I make some workaround with MFCaptureD3D from Windows Sample. But I get a little confuse about two thing.
1. In my engine, all was synchronize, with CRXMaterial::OnTick. By Example, if you take IMFMediaEngine and ask to read a video (mp4/avi). This one get Texture ID3D11Texture2D1 from ID3D11ShaderResourceView1 and send it back to IMFMediaEngine::TransferFrame and your video frame was copy inside your texture.
Now with IMFSourceReaderCallback you must draw the frame inside IMFSourceReaderCallback::OnReadSample. If i will respect OnTick philosophy I got a idea. Buffering IMFMediaBuffer inside IMFSourceReaderCallback::OnReadSample and take it back inside CRXMaterial::OnTick to transfer inside resource texture.
2. The problem come with example who use IDirect3DSurface9 and YUV color system. In sort, how transfer IMF2DBuffer to ID3D11Texture2D?
OnReadSample
//***************************************************************************
//* Class name : CRXWebCam
//* Output : HRESULT
//* Function name : OnReadSample
//* Description :
//* Input : HRESULT hrStatus
//* DWORD dwStreamIndex
//* DWORD dwStreamFlags
//* LONGLONG llTimestamp
//* IMFSample* pSample
//***************************************************************************
HRESULT CRXWebCam::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample)
{
HRESULT hr = S_OK;
IMFMediaBuffer* pBuffer = NULL;
if (FAILED(hrStatus))
hr = hrStatus;
if (SUCCEEDED(hr))
{
if (pSample)
{
hr = pSample->GetBufferByIndex(0, &pBuffer);
//Buffering the frame
m_pBuffer = pBuffer;
m_pBuffer->AddRef();
// Draw the frame.
}
}
if (SUCCEEDED(hr))
hr = m_pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
if (pBuffer) pBuffer->Release();
pBuffer = NULL;
return hr;
}
OnTick
//***************************************************************************
// Class name : CRXMedia
// Function name : Render
// Description : Render Media Class
//***************************************************************************
void CRXMaterial::OnTick()
{
/*IDXGIFactory4* spFactory;
IDXGIAdapter1* spAdapter;
IDXGIOutput* spOutput;
ThrowIfFailed(CreateDXGIFactory2(0, __uuidof(IDXGIFactory4), (void**)&spFactory));
ThrowIfFailed(spFactory->EnumAdapters1(0, &spAdapter));
ThrowIfFailed(spAdapter->EnumOutputs(0, &spOutput));*/
if (m_pMediaEngine)
{
if (/*SUCCEEDED(spOutput->WaitForVBlank()) && this && */!m_pMediaEngine->m_spMediaEngine->IsPaused())
{
ID3D11Texture2D1* Texture;
m_pTextureView->GetResource((ID3D11Resource**)&Texture);
DWORD SizeX;
DWORD SizeY;
m_pMediaEngine->m_spMediaEngine->GetNativeVideoSize(&SizeX, &SizeY);
RECT Rect = { 0,0, (LONG)SizeX, (LONG)SizeY };
MFVideoNormalizedRect NormRect = { 0.0f, 0.0f, 1.0f, 1.0f };
MFARGB BackColor = { 0, 0, 0, 255 };
m_pMediaEngine->TransferFrame(Texture, NormRect, Rect, BackColor);
Texture->Release();
}
}
if (m_pWebCamEngine) //HERE
{
if (m_pWebCamEngine->m_pBuffer)
{
ID3D11Texture2D1* Texture;
m_pTextureView->GetResource((ID3D11Resource**)&Texture);
IMF2DBuffer* m_p2DBuffer = NULL;
//m_pWebCamEngine->m_pBuffer->QueryInterface(IID_PPV_ARGS(&m_p2DBuffer));
m_pWebCamEngine->m_pBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer);
BYTE* ppbScanLine0;
LONG plStride;
m_p2DBuffer->Lock2D(&ppbScanLine0, &plStride);
//YUV to RGB???
/*for (DWORD y = 0; y < m_Height; y++)
{
RGBQUAD* pDestPel = (RGBQUAD*)mapped.pData;
WORD* pSrcPel = (WORD*)ppbScanLine0;
for (DWORD x = 0; x < 640; x += 2)
{
// Byte order is U0 Y0 V0 Y1
int y0 = (int)LOBYTE(pSrcPel[x]);
int u0 = (int)HIBYTE(pSrcPel[x]);
int y1 = (int)LOBYTE(pSrcPel[x + 1]);
int v0 = (int)HIBYTE(pSrcPel[x + 1]);
pDestPel[x] = ConvertYCrCbToRGB(y0, v0, u0);
pDestPel[x + 1] = ConvertYCrCbToRGB(y1, v0, u0);
}
ppbScanLine0 += plStride;
//mapped.pData += mapped.RowPitch;
}*/
//m_pDirect3D->GetD3DDeviceContext()->Unmap(Texture, NULL);
m_p2DBuffer->Unlock2D();
m_p2DBuffer->Release();
Texture->Release();
if (m_pWebCamEngine->m_pBuffer) m_pWebCamEngine->m_pBuffer->Release();
m_pWebCamEngine->m_pBuffer = NULL;
}
}
/*spFactory->Release();
spAdapter->Release();
spOutput->Release();*/
}
Continue reading...
I Decide to integrate WebCam (IMFSourceReaderCallback) to my engine and I make some workaround with MFCaptureD3D from Windows Sample. But I get a little confuse about two thing.
1. In my engine, all was synchronize, with CRXMaterial::OnTick. By Example, if you take IMFMediaEngine and ask to read a video (mp4/avi). This one get Texture ID3D11Texture2D1 from ID3D11ShaderResourceView1 and send it back to IMFMediaEngine::TransferFrame and your video frame was copy inside your texture.
Now with IMFSourceReaderCallback you must draw the frame inside IMFSourceReaderCallback::OnReadSample. If i will respect OnTick philosophy I got a idea. Buffering IMFMediaBuffer inside IMFSourceReaderCallback::OnReadSample and take it back inside CRXMaterial::OnTick to transfer inside resource texture.
2. The problem come with example who use IDirect3DSurface9 and YUV color system. In sort, how transfer IMF2DBuffer to ID3D11Texture2D?
OnReadSample
//***************************************************************************
//* Class name : CRXWebCam
//* Output : HRESULT
//* Function name : OnReadSample
//* Description :
//* Input : HRESULT hrStatus
//* DWORD dwStreamIndex
//* DWORD dwStreamFlags
//* LONGLONG llTimestamp
//* IMFSample* pSample
//***************************************************************************
HRESULT CRXWebCam::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample* pSample)
{
HRESULT hr = S_OK;
IMFMediaBuffer* pBuffer = NULL;
if (FAILED(hrStatus))
hr = hrStatus;
if (SUCCEEDED(hr))
{
if (pSample)
{
hr = pSample->GetBufferByIndex(0, &pBuffer);
//Buffering the frame
m_pBuffer = pBuffer;
m_pBuffer->AddRef();
// Draw the frame.
}
}
if (SUCCEEDED(hr))
hr = m_pReader->ReadSample((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
if (pBuffer) pBuffer->Release();
pBuffer = NULL;
return hr;
}
OnTick
//***************************************************************************
// Class name : CRXMedia
// Function name : Render
// Description : Render Media Class
//***************************************************************************
void CRXMaterial::OnTick()
{
/*IDXGIFactory4* spFactory;
IDXGIAdapter1* spAdapter;
IDXGIOutput* spOutput;
ThrowIfFailed(CreateDXGIFactory2(0, __uuidof(IDXGIFactory4), (void**)&spFactory));
ThrowIfFailed(spFactory->EnumAdapters1(0, &spAdapter));
ThrowIfFailed(spAdapter->EnumOutputs(0, &spOutput));*/
if (m_pMediaEngine)
{
if (/*SUCCEEDED(spOutput->WaitForVBlank()) && this && */!m_pMediaEngine->m_spMediaEngine->IsPaused())
{
ID3D11Texture2D1* Texture;
m_pTextureView->GetResource((ID3D11Resource**)&Texture);
DWORD SizeX;
DWORD SizeY;
m_pMediaEngine->m_spMediaEngine->GetNativeVideoSize(&SizeX, &SizeY);
RECT Rect = { 0,0, (LONG)SizeX, (LONG)SizeY };
MFVideoNormalizedRect NormRect = { 0.0f, 0.0f, 1.0f, 1.0f };
MFARGB BackColor = { 0, 0, 0, 255 };
m_pMediaEngine->TransferFrame(Texture, NormRect, Rect, BackColor);
Texture->Release();
}
}
if (m_pWebCamEngine) //HERE
{
if (m_pWebCamEngine->m_pBuffer)
{
ID3D11Texture2D1* Texture;
m_pTextureView->GetResource((ID3D11Resource**)&Texture);
IMF2DBuffer* m_p2DBuffer = NULL;
//m_pWebCamEngine->m_pBuffer->QueryInterface(IID_PPV_ARGS(&m_p2DBuffer));
m_pWebCamEngine->m_pBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer);
BYTE* ppbScanLine0;
LONG plStride;
m_p2DBuffer->Lock2D(&ppbScanLine0, &plStride);
//YUV to RGB???
/*for (DWORD y = 0; y < m_Height; y++)
{
RGBQUAD* pDestPel = (RGBQUAD*)mapped.pData;
WORD* pSrcPel = (WORD*)ppbScanLine0;
for (DWORD x = 0; x < 640; x += 2)
{
// Byte order is U0 Y0 V0 Y1
int y0 = (int)LOBYTE(pSrcPel[x]);
int u0 = (int)HIBYTE(pSrcPel[x]);
int y1 = (int)LOBYTE(pSrcPel[x + 1]);
int v0 = (int)HIBYTE(pSrcPel[x + 1]);
pDestPel[x] = ConvertYCrCbToRGB(y0, v0, u0);
pDestPel[x + 1] = ConvertYCrCbToRGB(y1, v0, u0);
}
ppbScanLine0 += plStride;
//mapped.pData += mapped.RowPitch;
}*/
//m_pDirect3D->GetD3DDeviceContext()->Unmap(Texture, NULL);
m_p2DBuffer->Unlock2D();
m_p2DBuffer->Release();
Texture->Release();
if (m_pWebCamEngine->m_pBuffer) m_pWebCamEngine->m_pBuffer->Release();
m_pWebCamEngine->m_pBuffer = NULL;
}
}
/*spFactory->Release();
spAdapter->Release();
spOutput->Release();*/
}
Continue reading...