在 UWP 视频处理相关的开发任务中,有时候我们会通过 IBasicVideoEffect 接口和 Win2D 组件来实时地对视频应用一些效果。一般情况下,我们会将 IBasicVideoEffect.SupportedEncodingProperties 设置为 MediaEncodingSubtypes.Argb32 ,这样我们拿到视频帧的 surface 后,可以直接使用 CanvasBitmap.CreateFromDirect3D11Surface 方法来将它转换为 CanvasBitmap ,然后做其他处理(比如保存为图片文件)。然而在大部分情况下,视频流都是用 H.264 编码的,这时候如果不将 IBasicVideoEffect.SupportedEncodingProperties 设置为 MediaEncodingSubtypes.Nv12 的话,播放高码率视频的时候会卡得你怀疑人生;但是设置了 NV12 格式后,你又会发现 Win2D 不支持直接将 NV12 格式的 IDirect3DSurface 转为 CanvasBitmap (目前 Win2D 只支持 ARGB 类的格式)。本文提供将 NV12 格式的 IDirect3DSurface 转换为 CanvasVirtualBitmap 并保存为图片文件的方法。

不过为了实现这个转换,需要使用 Native 代码直接调用 DX 相关 API 来实现。话不多说,直接上代码。

首先引入必要的头文件和命名空间:

#include <Windows.Graphics.DirectX.Direct3D11.interop.h>
#include <Microsoft.Graphics.Canvas.native.h>
#include <d3d11.h>
#include <d2d1.h>
#include <d2d1_1.h>
#include <d2d1_3.h>
#include <dxgi.h>

using namespace Windows::Graphics::DirectX::Direct3D11;
using namespace Microsoft::Graphics::Canvas;
using namespace Microsoft::WRL;

然后是实现代码(省略了错误处理):

int GetNv12Bitmap(IDirect3DDevice^ d3dDevice, IDirect3DSurface^ d3dSurface, CanvasDevice^* outputDevice, CanvasVirtualBitmap^* outputBitmap)
{
    ComPtr<IDXGISurface> dxgiSurface = nullptr;
    HRESULT hr = GetDXGIInterface(d3dSurface, dxgiSurface.GetAddressOf());

    ComPtr<ID3D11Device> nativeD3dDevice;
    hr = GetDXGIInterface(d3dDevice, nativeD3dDevice.GetAddressOf());

    IDXGIDevice* dxgiDevice = nullptr;
    hr = nativeD3dDevice->QueryInterface<IDXGIDevice>(&dxgiDevice);

    ID2D1Device* d2dDevice = nullptr;
    D2D1_CREATION_PROPERTIES properties;
    properties.debugLevel = D2D1_DEBUG_LEVEL::D2D1_DEBUG_LEVEL_NONE;
    properties.options = D2D1_DEVICE_CONTEXT_OPTIONS::D2D1_DEVICE_CONTEXT_OPTIONS_NONE;
    properties.threadingMode = D2D1_THREADING_MODE::D2D1_THREADING_MODE_SINGLE_THREADED;
    hr = D2D1CreateDevice(dxgiDevice, properties, &d2dDevice);

    ID2D1DeviceContext* d2dDeviceContext = nullptr;
    hr = d2dDevice->CreateDeviceContext(properties.options, &d2dDeviceContext);

    ID2D1DeviceContext2* d2dDeviceContext2 = nullptr;
    hr = d2dDeviceContext->QueryInterface<ID2D1DeviceContext2>(&d2dDeviceContext2);

    ID2D1ImageSource* d2dImageSource = nullptr;
    hr = d2dDeviceContext2->CreateImageSourceFromDxgi(
        dxgiSurface.GetAddressOf(),
        1,
        DXGI_COLOR_SPACE_TYPE::DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
        D2D1_IMAGE_SOURCE_FROM_DXGI_OPTIONS::D2D1_IMAGE_SOURCE_FROM_DXGI_OPTIONS_NONE,
        &d2dImageSource);

    CanvasDevice^ canvasDevice = GetOrCreate<CanvasDevice>(d2dDevice);
    *outputDevice = canvasDevice;
    *outputBitmap = GetOrCreate<CanvasVirtualBitmap>(canvasDevice, d2dImageSource);

    return 0;
}

将 CanvasVirtualBitmap 保存为 PNG 文件:

GetNv12Bitmap(d3dDevice, surface, out CanvasDevice canvasDevice, out CanvasVirtualBitmap canvasVirtualBitmap);
using (canvasDevice)
{
    using (canvasVirtualBitmap)
    {
        Rect bounds = new Rect(0, 0, videoWidth, videoHeight);
        await CanvasImage.SaveAsync(canvasVirtualBitmap, bounds, 96, canvasDevice, stream, CanvasBitmapFileFormat.Png);
    }
}

参考资料:
[1] is possible convert YUV surface to CanvasBitmap ?

» 转载请注明来源及链接:未来代码研究所

Related Posts:

Leave a Reply

World Line
Time Machine
Online Tools