FFmpeg DXVA2解码得到的数据使用surface来承载的,surface限制很多,如果能用纹理来渲染的话,那我们就可以充分开发D3D,比如可以用坐标变换来实现电子放大的功能,还可以用坐标变换来实现视频图像任意角度的旋转等功能。而对于我来说,最重要的是纹理渲染可以使得解码后的数据能够用像素着色器来做简单的视频图像处理,如果是用的是D3D11,对于更为复杂的视频图像处理算法也是有望可以用Compute Shader实现,以便充分利用显卡来加速和释放CPU。
static bool setup_texture(IDirect3DDevice9* Device, int Width, int Height,D3DFORMAT format)
{
if (!Device)
{
return false ;
}
HRESULT hr = 0;
hr = Device->CreateVertexBuffer(
4 * sizeof(Dxva2TexVertex),
D3DUSAGE_WRITEONLY,
Dxva2TexVertex::FVF,
D3DPOOL_MANAGED,
&QuadVB,
0);
Dxva2TexVertex* v = 0;
QuadVB->Lock(0, 0, (void**)&v, 0);
v[0] = Dxva2TexVertex(-20.0f, 20.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f);
v[1] = Dxva2TexVertex( 20.0f, 20.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f);
v[2] = Dxva2TexVertex( 20.0f, -20.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f);
v[3] = Dxva2TexVertex(-20.0f, -20.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f);
QuadVB->Unlock();
D3DXMATRIX P;
D3DXMatrixPerspectiveFovLH(&P,
D3DX_PI * 0.5f,
1.0f,
1.0f, //近裁减面到坐标原点的距离
1000.0f //远裁减面到原点的距离
);
Device->SetTransform(D3DTS_PROJECTION, &P);
Device->SetRenderState(D3DRS_LIGHTING, false);
D3DXVECTOR3 position( 0.0f, 0.0f, -20.0f);
D3DXVECTOR3 target(0.0f, 0.0f, 0.0f);
D3DXVECTOR3 up(0.0f, 1.0f, 0.0f);
D3DXMATRIX V;
D3DXMatrixLookAtLH(&V, &position, &target, &up);//计算取景变换矩阵
Device->SetTransform(D3DTS_VIEW, &V);//取景变换
hr = Device->CreateTexture ( Width, Height, 1, D3DUSAGE_RENDERTARGET, format, D3DPOOL_DEFAULT, &g_SurfaceTexture, NULL ) ;
if (FAILED(hr)) return false;
g_SurfaceTexture->GetSurfaceLevel(0, &g_OffScreenSurface);
return true;
}
hr = Device->CreateTexture ( Width, Height, 1, D3DUSAGE_RENDERTARGET, format, D3DPOOL_DEFAULT, &g_SurfaceTexture, NULL ) ;
if (FAILED(hr)) return false;
g_SurfaceTexture->GetSurfaceLevel(0, &g_OffScreenSurface);
CreateTexture的第四个参数注意设置为D3DUSAGE_RENDERTARGET,第五个参数format与设置D3D时的参数中的 d3dpp.BackBufferFormat = d3ddm.Format; 保持一致,详见工程源码。GetSurfaceLevel能够拿到具体某个level的mipmap的surface,我获取的是g_SurfaceTexture在level为0的surface,即g_OffScreenSurface。
static int dxva2_retrieve_data(AVCodecContext *s, AVFrame *frame)
{
LPDIRECT3DSURFACE9 surface = (LPDIRECT3DSURFACE9)frame->data[3];
InputStream *ist = (InputStream *)s->opaque;
DXVA2Context *ctx = (DXVA2Context *)ist->hwaccel_ctx;
HRESULT hr ;
int ret = 0 ;
EnterCriticalSection(&cs);
if (ctx->d3d9device && g_OffScreenSurface)
{
ctx->d3d9device->SetRenderTarget(0, g_OffScreenSurface);
ctx->d3d9device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(200, 200, 200), 1.0f, 0);
ctx->d3d9device->BeginScene();
ctx->d3d9device->SetTexture(0, NULL);
GetClientRect(d3dpp.hDeviceWindow, &m_rtViewport);
ctx->d3d9device->StretchRect(surface, NULL, g_OffScreenSurface, NULL, D3DTEXF_LINEAR);
ctx->d3d9device->EndScene();