DirectX屏幕捕获和输出为视频 [英] DirectX Screen Capture and Output as Video

查看:1339
本文介绍了DirectX屏幕捕获和输出为视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在做桌面屏幕捕获,并希望输出成为视频文件。目前,我有代码从此处输出png图片。我修改了一些代码将输出改为JPEG文件,然后使用openCV 3.0.0转换为avi视频输出。原因我需要JPEG文件作为输出是因为我运行在Windows 8.1和OpenCV VideoWriter :: fourcc('M','J','P','G')是唯一的选项为我工作。 p>

PNG图像的输出效果完美,但不是JPEG。图像上有垂直线,生成JPEG输出的时间比PNG长得多。



我有两个选项:



  1. 摆脱OpenCV 3.0.0问题接受PNG文件输入并能够输出视频文件。(最好是AVI / MP4文件格式)


请帮忙。谢谢。



我的代码:

  #includestdafx。 h
#include< Wincodec.h> //我们使用WIC保存图像
#include< d3d9.h> // DirectX 9 header
#include< opencv\cv.h>
#include< opencv\cxcore.hpp>
#include< opencv2\highgui\highgui.hpp>
#pragma comment(lib,d3d9.lib)//链接到DirectX 9库

using namespace cv;

#define WIDEN2(x)L ## x
#define WIDEN(x)WIDEN2(x)
#define __WFILE__ WIDEN(__ FILE__)
#define HRCHECK (__expr){hr =(__ expr); if(FAILED(hr)){wprintf(LFAILURE 0x%08X(%i)\\\
\tline:%u file:'%s'\ texpr:'WIDEN(#__ expr)L'\\\
,hr,hr,__LINE __,__ WFILE __); goto cleanup;}}
#define RELEASE(__ p){if(__ p!= nullptr){ __p-> Release(); __ p = nullptr;}}

HRESULT Direct3D9TakeScreenshots(UINT adapter,UINT count);
HRESULT SavePixelsToFile32bppPBGRA(UINT width,UINT height,UINT stride,LPBYTE pixels,LPWSTR filePath,const GUID& format);


int _tmain(int argc,_TCHAR * argv [])
{
HRESULT hr = Direct3D9TakeScreenshots(D3DADAPTER_DEFAULT,10);
return 0;
}


HRESULT Direct3D9TakeScreenshots(UINT适配器,UINT计数)
{
HRESULT hr = S_OK;
IDirect3D9 * d3d = nullptr;
IDirect3DDevice9 * device = nullptr;
IDirect3DSurface9 * surface = nullptr;
D3DPRESENT_PARAMETERS parameters = {0};
D3DDISPLAYMODE mode;
D3DLOCKED_RECT rc;
UINT pitch;
SYSTEMTIME st;
LPBYTE * shots = nullptr;

//初始化D3D并获取屏幕大小
d3d = Direct3DCreate9(D3D_SDK_VERSION);
HRCHECK(d3d-> GetAdapterDisplayMode(adapter,& mode));

parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
parameters.BackBufferHeight = mode.Height;
parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;

//创建设备&捕获表面
HRCHECK(d3d-> CreateDevice(adapter,D3DDEVTYPE_HAL,NULL,D3DCREATE_SOFTWARE_VERTEXPROCESSING,& parameter,& device));
HRCHECK(device-> CreateOffscreenPlainSurface(mode.Width,mode.Height,D3DFMT_A8R8G8B8,D3DPOOL_SYSTEMMEM,& surface,nullptr));

//计算所需的缓冲区大小
HRCHECK(surface-> LockRect(& rc,NULL,0));
pitch = rc.Pitch;
HRCHECK(surface-> UnlockRect());

//分配屏幕截图缓冲区
shots = new LPBYTE [count];
for(UINT i = 0; i {
shots [i] = new BYTE [pitch * mode.Height];
}

GetSystemTime(& st); // measure the time we spend doing< count>捕获
wprintf(LSTART Capture - >%i:%i:%i。%i\\\
,st.wHour,st.wMinute,st.wSecond,st.wMilliseconds);
for(UINT i = 0; i {
//获取数据
HRCHECK(device-> GetFrontBufferData(0,surface));

//将它复制到缓冲区
HRCHECK(surface-> LockRect(& rc,NULL,0));
CopyMemory(shots [i],rc.pBits,rc.Pitch * mode.Height);
HRCHECK(surface-> UnlockRect());
}
GetSystemTime(& st);
wprintf(LEND Capture - >%i:%i:%i。%i\\\
,st.wHour,st.wMinute,st.wSecond,st.wMilliseconds);

//保存所有截图
for(UINT i = 0; i {
WCHAR file [100];
wsprintf(file,Lcap%i.jpg,i);
HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width,mode.Height,pitch,shots [i],file,GUID_ContainerFormatJpeg));
}

cleanup:
if(shots!= nullptr)
{
for(UINT i = 0; i {
delete shots [i];
}
delete [] shots;
}

RELEASE(surface);
RELEASE(device);
RELEASE(d3d);
return hr;
}

HRESULT SavePixelsToFile32bppPBGRA(UINT width,UINT height,UINT stride,LPBYTE pixels,LPWSTR filePath,const GUID& format)
{
if ||!pixels)
return E_INVALIDARG;

HRESULT hr = S_OK;
IWICImagingFactory * factory = nullptr;
IWICBitmapEncoder * encoder = nullptr;
IWICBitmapFrameEncode * frame = nullptr;
IWICStream * stream = nullptr;
GUID pf = GUID_WICPixelFormat32bppPBGRA;
BOOL coInit = CoInitialize(nullptr);

HRCHECK(CoCreateInstance(CLSID_WICImagingFactory,nullptr,CLSCTX_INPROC_SERVER,IID_PPV_ARGS(& factory))));
HRCHECK(factory-> CreateStream(& stream));
HRCHECK(stream-> InitializeFromFilename(filePath,GENERIC_WRITE));
HRCHECK(factory-> CreateEncoder(format,nullptr,& encoder));
HRCHECK(encoder-> Initialize(stream,WICBitmapEncoderNoCache));
HRCHECK(encoder-> CreateNewFrame(& frame,nullptr)); //我们不使用这里的选项
HRCHECK(frame-> Initialize(nullptr)); //我们不使用任何选项
HRCHECK(frame-> SetSize(width,height));
HRCHECK(frame-> SetPixelFormat(& pf));
HRCHECK(frame-> WritePixels(height,stride,stride * height,pixels));
HRCHECK(frame-> Commit());
HRCHECK(encoder-> Commit());

cleanup:
RELEASE(stream);
RELEASE(frame);
RELEASE(encoder);
RELEASE(factory);
if(coInit)CoUninitialize();

//这部分编码JPEG文件到视频文件
VideoCapture in_capture(cap%d.jpg);

Mat img;

VideoWriter out_capture(video.avi,CV_FOURCC('M','J','P','G'),1,Size(1920,1080)

while(true)
{
in_capture>> img;
if(img.empty())
break;

out_capture.write(img);
}

return hr;
}


解决方案

因为JPEG编码器会对 JPEG本机编解码器




  • GUID_WICPixelFormat8bppGray

  • GUID_WICPixelFormat24bppBGR

  • GUID_WICPixelFormat32bppCMYK



您尝试 D3DXSaveSurfaceToFile ,是否太慢了?



EDIT



对于第二个选项,我使一个程序将Media Foundation的屏幕截图编码为h264:




  • 这项工作在Windows 7的1280 * 1024分辨率,并且性能合理,但我不知道如果它是脚你的需要。

  • 程序不是实时捕获,也许你将必须更正传递给sink写入器的时间戳。考虑使用媒体基础时钟。

  • 您可以尝试更改编码格式: MF_TRANSCODE_CONTAINERTYPE

  • 小心编码器的分辨率有限。

  • 更改MF_MT_AVG_BITRATE可以提高性能的质量。

  • 在我的系统上,捕获映像是相反的,因此我需要以最严格的方式复制映像(请参阅#define REVERSE_IMAGE)。也许比MFCopyImage慢,但是我不能确定在测试这两个实现后。

  • 我们不需要WIC,因为Media Foundation编码器处理d3d9捕获的格式。 / li>
  • 在Windows 7上,考虑使用IDirect3DDevice9Ex而不是idirect3ddevice9。

  • 我已经将您的d3d9捕获代码与本教程混合:使用Sink Writer编码视频。该方案的设计也可以改进。



这里是代码:

  #include< Windows.h> 
#include< mfapi.h>
#include< mfidl.h>
#include< Mfreadwrite.h>
#include< mferror.h>
#include< d3d9.h>

#pragma注释(lib,mfreadwrite)
#pragma注释(lib,mfplat)
#pragma注释(lib,mfuuid)
#pragma comment(lib,d3d9.lib)

template< class T> void SafeRelease(T ** ppT){

if(* ppT){
(* ppT) - > Release
* ppT = NULL;
}
}

#define REVERSE_IMAGE

//格式常量
const UINT32 VIDEO_FPS = 30;
const UINT64 VIDEO_FRAME_DURATION = 10 * 1000 * 1000 / VIDEO_FPS;
const UINT32 VIDEO_BIT_RATE = 2000000;
const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_H264;
const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32;
const UINT32 VIDEO_FRAME_COUNT = 5 * VIDEO_FPS;

HRESULT InitializeDirect3D9(IDirect3DDevice9 ** ppDevice,IDirect3DSurface9 ** ppSurface,UINT32& uiWidth,UINT32& uiHeight){

IDirect3D9 * d3d = NULL;

d3d = Direct3DCreate9(D3D_SDK_VERSION);

if(d3d == NULL)
return E_POINTER;

D3DDISPLAYMODE模式;
HRESULT hr = d3d-> GetAdapterDisplayMode(D3DADAPTER_DEFAULT,& mode);

if(FAILED(hr)){
SafeRelease(& d3d);
return hr;
}

D3DPRESENT_PARAMETERS parameters = {0};

parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
uiHeight = parameters.BackBufferHeight = mode.Height;
uiWidth = parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;

hr = d3d-> CreateDevice(D3DADAPTER_DEFAULT,D3DDEVTYPE_HAL,NULL,D3DCREATE_SOFTWARE_VERTEXPROCESSING,& parameters,ppDevice);

if(FAILED(hr)){
SafeRelease(& d3d);
return hr;
}

hr =(* ppDevice) - > CreateOffscreenPlainSurface(mode.Width,mode.Height,D3DFMT_A8R8G8B8,D3DPOOL_SYSTEMMEM,ppSurface,nullptr);

SafeRelease(& d3d);

return hr;
}

HRESULT InitializeSinkWriter(IMFSinkWriter ** ppWriter,DWORD * pStreamIndex,const UINT32 uiWidth,const UINT32 uiHeight){

* ppWriter = NULL;
* pStreamIndex = NULL;

IMFSinkWriter * pSinkWriter = NULL;
IMFMediaType * pMediaTypeOut = NULL;
IMFMediaType * pMediaTypeIn = NULL;
DWORD streamIndex;

HRESULT hr = MFCreateSinkWriterFromURL(Loutput.mp4,NULL,NULL,& pSinkWriter);

//设置输出媒体类型。
if(SUCCEEDED(hr)){
hr = MFCreateMediaType(& pMediaTypeOut);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeOut-> SetGUID(MF_MT_MAJOR_TYPE,MFMediaType_Video);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeOut-> SetGUID(MF_MT_SUBTYPE,VIDEO_ENCODING_FORMAT);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeOut-> SetUINT32(MF_MT_AVG_BITRATE,VIDEO_BIT_RATE);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeOut-> SetUINT32(MF_MT_INTERLACE_MODE,MFVideoInterlace_Progressive);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeSize(pMediaTypeOut,MF_MT_FRAME_SIZE,uiWidth,uiHeight);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeRatio(pMediaTypeOut,MF_MT_FRAME_RATE,VIDEO_FPS,1);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeRatio(pMediaTypeOut,MF_MT_PIXEL_ASPECT_RATIO,1,1);
}
if(SUCCEEDED(hr)){
hr = pSinkWriter-> AddStream(pMediaTypeOut,& streamIndex);
}

//设置输入介质类型。
if(SUCCEEDED(hr)){
hr = MFCreateMediaType(& pMediaTypeIn);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeIn-> SetGUID(MF_MT_MAJOR_TYPE,MFMediaType_Video);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeIn-> SetGUID(MF_MT_SUBTYPE,VIDEO_INPUT_FORMAT);
}
if(SUCCEEDED(hr)){
hr = pMediaTypeIn-> SetUINT32(MF_MT_INTERLACE_MODE,MFVideoInterlace_Progressive);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeSize(pMediaTypeIn,MF_MT_FRAME_SIZE,uiWidth,uiHeight);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeRatio(pMediaTypeIn,MF_MT_FRAME_RATE,VIDEO_FPS,1);
}
if(SUCCEEDED(hr)){
hr = MFSetAttributeRatio(pMediaTypeIn,MF_MT_PIXEL_ASPECT_RATIO,1,1);
}
if(SUCCEEDED(hr)){
hr = pSinkWriter-> SetInputMediaType(streamIndex,pMediaTypeIn,NULL);
}

//告诉接收器写入器开始接受数据。
if(SUCCEEDED(hr)){
hr = pSinkWriter-> BeginWriting();
}

//返回指向调用者的指针。
if(SUCCEEDED(hr)){

* ppWriter = pSinkWriter;
(* ppWriter) - > AddRef();
* pStreamIndex = streamIndex;
}

SafeRelease(& pSinkWriter);
SafeRelease(& pMediaTypeOut);
SafeRelease(& pMediaTypeIn);
return hr;
}

HRESULT WriteFrame(IDirect3DDevice9 * pDevice,IDirect3DSurface9 * pSurface,IMFSinkWriter * pWriter,DWORD streamIndex,const LONGLONG& rtStart,const UINT32 uiWidth,const UINT32 uiHeight){

HRESULT hr = pDevice-> GetFrontBufferData(0,pSurface);

if(FAILED(hr)){
return hr;
}

D3DLOCKED_RECT rc;
hr = pSurface-> LockRect(& rc,NULL,0);

if(FAILED(hr)){
return hr;
}

IMFSample * pSample = NULL;
IMFMediaBuffer * pBuffer = NULL;

const LONG cbWidth = 4 * uiWidth;
const DWORD cbBuffer = cbWidth * uiHeight;

BYTE * pData = NULL;

//创建一个新的内存缓冲区。
hr = MFCreateMemoryBuffer(cbBuffer,& pBuffer);

//锁定缓冲区并将视频帧复制到缓冲区。
if(SUCCEEDED(hr)){
hr = pBuffer-> Lock(& pData,NULL,NULL);
}

if(SUCCEEDED(hr)){

#ifdef REVERSE_IMAGE
for(int i = 0,j = uiHeight - 1; i < uiHeight; i ++,j--)
for(int k = 0; k pData [(i * cbWidth)+ k] =((BYTE *)rc。 pBits)[(j * cbWidth)+ k];
#else
hr = MFCopyImage(pData,cbWidth,(BYTE *)rc.pBits,rc.Pitch,cbWidth,uiHeight);
#endif
}

if(pBuffer){
pBuffer-> Unlock();
}

//设置缓冲区的数据长度。
if(SUCCEEDED(hr)){
hr = pBuffer-> SetCurrentLength(cbBuffer);
}

//创建一个媒体样本并将缓冲区添加到样本中。
if(SUCCEEDED(hr)){
hr = MFCreateSample(& pSample);
}

if(SUCCEEDED(hr)){
hr = pSample-> AddBuffer(pBuffer);
}

//设置时间戳和持续时间。
if(SUCCEEDED(hr)){
hr = pSample-> SetSampleTime(rtStart);
}

if(SUCCEEDED(hr)){
hr = pSample-> SetSampleDuration(VIDEO_FRAME_DURATION);
}

//将示例发送到Sink Writer。
if(SUCCEEDED(hr)){
hr = pWriter-> WriteSample(streamIndex,pSample);
}

hr = pSurface-> UnlockRect();

SafeRelease(& pSample);
SafeRelease(& pBuffer);
return hr;
}

void main(){

HRESULT hr = CoInitializeEx(NULL,COINIT_APARTMENTTHREADED);

if(SUCCEEDED(hr)){

hr = MFStartup(MF_VERSION);

if(SUCCEEDED(hr)){

UINT32 uiWidth = 0;
UINT32 uiHeight = 0;

IDirect3DDevice9 * pDevice = NULL;
IDirect3DSurface9 * pSurface = NULL;

hr = InitializeDirect3D9(& pDevice,& pSurface,uiWidth,uiHeight);

if(SUCCEEDED(hr)){

IMFSinkWriter * pSinkWriter = NULL;
DWORD stream;

hr = InitializeSinkWriter(& pSinkWriter,& stream,uiWidth,uiHeight);

if(SUCCEEDED(hr)){

LONGLONG rtStart = 0;

for(DWORD i = 0; i
hr = WriteFrame(pDevice,pSurface,pSinkWriter,stream,rtStart,uiWidth,uiHeight );

if(FAILED(hr)){
break;
}

rtStart + = VIDEO_FRAME_DURATION;
}
}

if(SUCCEEDED(hr)){
hr = pSinkWriter-> Finalize();
}

SafeRelease(& pSinkWriter);
}

SafeRelease(& pDevice);
SafeRelease(& pSurface);
MFShutdown();
}

CoUninitialize();
}
}


I am doing desktop screen capture and wanted the output to be a video file. Currently I have codes getting from here to output png images. I modified the code a bit to change the output to JPEG file and then convert it to avi video output using openCV 3.0.0. Reason I need the JPEG file as output is because I am running on Windows 8.1 and OpenCV VideoWriter::fourcc('M', 'J', 'P', 'G') are the only options that work for me.

Output of PNG image works perfectly but not JPEG. There are vertical lines on the image and the time to generate the JPEG outputs is relatively far longer than PNG.

I have 2 options here:

  1. Improve the output of JPEG file to work faster and have a clear image.

  2. Get rid of the OpenCV 3.0.0 issue to accept PNG file input and able to output a video file.(preferably AVI/MP4 file format)

Either solution will do for me. PLease help. Thank you.

My code:

#include "stdafx.h"
#include <Wincodec.h>             // we use WIC for saving images
#include <d3d9.h>                 // DirectX 9 header
#include <opencv\cv.h>
#include <opencv\cxcore.hpp>
#include <opencv2\highgui\highgui.hpp>
#pragma comment(lib, "d3d9.lib")  // link to DirectX 9 library

using namespace cv;

#define WIDEN2(x) L ## x
#define WIDEN(x) WIDEN2(x)
#define __WFILE__ WIDEN(__FILE__)
#define HRCHECK(__expr) {hr=(__expr);if(FAILED(hr)){wprintf(L"FAILURE 0x%08X (%i)\n\tline: %u file: '%s'\n\texpr: '" WIDEN(#__expr) L"'\n",hr, hr, __LINE__,__WFILE__);goto cleanup;}}
#define RELEASE(__p) {if(__p!=nullptr){__p->Release();__p=nullptr;}}

HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count);
HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format);


int _tmain(int argc, _TCHAR* argv[])
{
    HRESULT hr = Direct3D9TakeScreenshots(D3DADAPTER_DEFAULT, 10);
    return 0;
}


HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count)
{
    HRESULT hr = S_OK;
    IDirect3D9 *d3d = nullptr;
    IDirect3DDevice9 *device = nullptr;
    IDirect3DSurface9 *surface = nullptr;
    D3DPRESENT_PARAMETERS parameters = { 0 };
    D3DDISPLAYMODE mode;
    D3DLOCKED_RECT rc;
    UINT pitch;
    SYSTEMTIME st;
    LPBYTE *shots = nullptr;

    // init D3D and get screen size
    d3d = Direct3DCreate9(D3D_SDK_VERSION);
    HRCHECK(d3d->GetAdapterDisplayMode(adapter, &mode));

    parameters.Windowed = TRUE;
    parameters.BackBufferCount = 1;
    parameters.BackBufferHeight = mode.Height;
    parameters.BackBufferWidth = mode.Width;
    parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
    parameters.hDeviceWindow = NULL;

    // create device & capture surface
    HRCHECK(d3d->CreateDevice(adapter, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, &device));
    HRCHECK(device->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &surface, nullptr));

    // compute the required buffer size
    HRCHECK(surface->LockRect(&rc, NULL, 0));
    pitch = rc.Pitch;
    HRCHECK(surface->UnlockRect());

    // allocate screenshots buffers
    shots = new LPBYTE[count];
    for (UINT i = 0; i < count; i++)
    {
        shots[i] = new BYTE[pitch * mode.Height];
    }

    GetSystemTime(&st); // measure the time we spend doing <count> captures
    wprintf(L"START Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
    for (UINT i = 0; i < count; i++)
    {
        // get the data
        HRCHECK(device->GetFrontBufferData(0, surface));

        // copy it into our buffers
        HRCHECK(surface->LockRect(&rc, NULL, 0));
        CopyMemory(shots[i], rc.pBits, rc.Pitch * mode.Height);
        HRCHECK(surface->UnlockRect());
    }
    GetSystemTime(&st);
    wprintf(L"END Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);

    // save all screenshots
    for (UINT i = 0; i < count; i++)
    {
        WCHAR file[100];
        wsprintf(file, L"cap%i.jpg", i);
        HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width, mode.Height, pitch, shots[i], file, GUID_ContainerFormatJpeg));
    }

    cleanup:
    if (shots != nullptr)
    {
        for (UINT i = 0; i < count; i++)
        {
        delete shots[i];
        }
        delete[] shots;
    }

    RELEASE(surface);
    RELEASE(device);
    RELEASE(d3d);
    return hr;
}

HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format)
{
    if (!filePath || !pixels)
        return E_INVALIDARG;

    HRESULT hr = S_OK;
    IWICImagingFactory *factory = nullptr;
    IWICBitmapEncoder *encoder = nullptr;
    IWICBitmapFrameEncode *frame = nullptr;
    IWICStream *stream = nullptr;
    GUID pf = GUID_WICPixelFormat32bppPBGRA;
    BOOL coInit = CoInitialize(nullptr);

    HRCHECK(CoCreateInstance(CLSID_WICImagingFactory, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&factory)));
    HRCHECK(factory->CreateStream(&stream));
    HRCHECK(stream->InitializeFromFilename(filePath, GENERIC_WRITE));
    HRCHECK(factory->CreateEncoder(format, nullptr, &encoder));
    HRCHECK(encoder->Initialize(stream, WICBitmapEncoderNoCache));
    HRCHECK(encoder->CreateNewFrame(&frame, nullptr)); // we don't use options here
    HRCHECK(frame->Initialize(nullptr)); // we dont' use any options here
    HRCHECK(frame->SetSize(width, height));
    HRCHECK(frame->SetPixelFormat(&pf));
    HRCHECK(frame->WritePixels(height, stride, stride * height, pixels));
    HRCHECK(frame->Commit());
    HRCHECK(encoder->Commit());

    cleanup:
    RELEASE(stream);
    RELEASE(frame);
    RELEASE(encoder);
    RELEASE(factory);
    if (coInit) CoUninitialize();

    //This part do encode JPEG file to video file 
    VideoCapture in_capture("cap%d.jpg");

    Mat img;

    VideoWriter out_capture("video.avi", CV_FOURCC('M','J','P','G'), 1, Size(1920,1080));

    while (true)
    {
        in_capture >> img;
        if(img.empty())
            break;

        out_capture.write(img);
    }

    return hr;
}

解决方案

Your image isn't correct because the JPEG encoder encodes 3 formats JPEG Native Codec:

  • GUID_WICPixelFormat8bppGray
  • GUID_WICPixelFormat24bppBGR
  • GUID_WICPixelFormat32bppCMYK

Did you try D3DXSaveSurfaceToFile, is it too slow ?

EDIT

For your second option, i've made a program to encode the screen capture with Media Foundation into h264 :

  • This work on Windows Seven with 1280*1024 resolution, and with reasonable performance, but i don't known if it's feet your needs.
  • This program is not real time capture, perhaps you will have to correct the timestamp passed to the sink writer. Consider using a Media Foundation clock.
  • You can try to change encoding format : MF_TRANSCODE_CONTAINERTYPE
  • Be careful that encoders are limited in resolution.
  • Changing the MF_MT_AVG_BITRATE can improve quality over performance. Set the value according to your requirments.
  • On my system, the capture image is reverse, so i need to copy the image in the rigth way (see #define REVERSE_IMAGE). Perhaps it is slower than MFCopyImage, but i'm not realy sure after testing both implementations.
  • We don't need WIC, because Media Foundation encoders handle the format from the d3d9 capture.
  • On Windows Seven, consider using IDirect3DDevice9Ex instead of idirect3ddevice9.
  • I've mixed your d3d9 capture code with this tutorial : Using the Sink Writer to Encode Video. The design of the program can be improved too.

Here is the code :

#include <Windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <Mfreadwrite.h>
#include <mferror.h>
#include <d3d9.h>

#pragma comment(lib, "mfreadwrite")
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mfuuid")
#pragma comment(lib, "d3d9.lib")

template <class T> void SafeRelease(T **ppT){

    if(*ppT){
        (*ppT)->Release();
        *ppT = NULL;
    }
}

#define REVERSE_IMAGE

// Format constants
const UINT32 VIDEO_FPS = 30;
const UINT64 VIDEO_FRAME_DURATION = 10 * 1000 * 1000 / VIDEO_FPS;
const UINT32 VIDEO_BIT_RATE = 2000000;
const GUID   VIDEO_ENCODING_FORMAT = MFVideoFormat_H264;
const GUID   VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32;
const UINT32 VIDEO_FRAME_COUNT = 5 * VIDEO_FPS;

HRESULT InitializeDirect3D9(IDirect3DDevice9** ppDevice, IDirect3DSurface9** ppSurface, UINT32& uiWidth, UINT32& uiHeight){

    IDirect3D9* d3d = NULL;

    d3d = Direct3DCreate9(D3D_SDK_VERSION);

    if(d3d == NULL)
        return E_POINTER;

    D3DDISPLAYMODE mode;
    HRESULT hr = d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &mode);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    D3DPRESENT_PARAMETERS parameters = {0};

    parameters.Windowed = TRUE;
    parameters.BackBufferCount = 1;
    uiHeight = parameters.BackBufferHeight = mode.Height;
    uiWidth = parameters.BackBufferWidth = mode.Width;
    parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
    parameters.hDeviceWindow = NULL;

    hr = d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, ppDevice);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    hr = (*ppDevice)->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, ppSurface, nullptr);

    SafeRelease(&d3d);

    return hr;
}

HRESULT InitializeSinkWriter(IMFSinkWriter **ppWriter, DWORD *pStreamIndex, const UINT32 uiWidth, const UINT32 uiHeight){

    *ppWriter = NULL;
    *pStreamIndex = NULL;

    IMFSinkWriter   *pSinkWriter = NULL;
    IMFMediaType    *pMediaTypeOut = NULL;
    IMFMediaType    *pMediaTypeIn = NULL;
    DWORD           streamIndex;

    HRESULT hr = MFCreateSinkWriterFromURL(L"output.mp4", NULL, NULL, &pSinkWriter);

    // Set the output media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeOut);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex);
    }

    // Set the input media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeIn);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL);
    }

    // Tell the sink writer to start accepting data.
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->BeginWriting();
    }

    // Return the pointer to the caller.
    if(SUCCEEDED(hr)){

        *ppWriter = pSinkWriter;
        (*ppWriter)->AddRef();
        *pStreamIndex = streamIndex;
    }

    SafeRelease(&pSinkWriter);
    SafeRelease(&pMediaTypeOut);
    SafeRelease(&pMediaTypeIn);
    return hr;
}

HRESULT WriteFrame(IDirect3DDevice9* pDevice, IDirect3DSurface9* pSurface, IMFSinkWriter* pWriter, DWORD streamIndex, const LONGLONG& rtStart, const UINT32 uiWidth, const UINT32 uiHeight){

    HRESULT hr = pDevice->GetFrontBufferData(0, pSurface);

    if(FAILED(hr)){
        return hr;
    }

    D3DLOCKED_RECT rc;
    hr = pSurface->LockRect(&rc, NULL, 0);

    if(FAILED(hr)){
        return hr;
    }

    IMFSample *pSample = NULL;
    IMFMediaBuffer *pBuffer = NULL;

    const LONG cbWidth = 4 * uiWidth;
    const DWORD cbBuffer = cbWidth * uiHeight;

    BYTE *pData = NULL;

    // Create a new memory buffer.
    hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);

    // Lock the buffer and copy the video frame to the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->Lock(&pData, NULL, NULL);
    }

    if(SUCCEEDED(hr)){

#ifdef REVERSE_IMAGE
        for(int i = 0, j = uiHeight - 1; i < uiHeight; i++, j--)
            for(int k = 0; k < cbWidth; k++)
                    pData[(i * cbWidth) + k] = ((BYTE*)rc.pBits)[(j * cbWidth) + k];
#else
        hr = MFCopyImage(pData, cbWidth, (BYTE*)rc.pBits, rc.Pitch, cbWidth, uiHeight);
#endif
    }

    if(pBuffer){
        pBuffer->Unlock();
    }

    // Set the data length of the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->SetCurrentLength(cbBuffer);
    }

    // Create a media sample and add the buffer to the sample.
    if(SUCCEEDED(hr)){
        hr = MFCreateSample(&pSample);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->AddBuffer(pBuffer);
    }

    // Set the time stamp and the duration.
    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleTime(rtStart);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleDuration(VIDEO_FRAME_DURATION);
    }

    // Send the sample to the Sink Writer.
    if(SUCCEEDED(hr)){
        hr = pWriter->WriteSample(streamIndex, pSample);
    }

    hr = pSurface->UnlockRect();

    SafeRelease(&pSample);
    SafeRelease(&pBuffer);
    return hr;
}

void main(){

    HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);

    if(SUCCEEDED(hr)){

        hr = MFStartup(MF_VERSION);

        if(SUCCEEDED(hr)){

                UINT32 uiWidth = 0;
                UINT32 uiHeight = 0;

                IDirect3DDevice9* pDevice = NULL;
                IDirect3DSurface9* pSurface = NULL;

                hr = InitializeDirect3D9(&pDevice, &pSurface, uiWidth, uiHeight);

                if(SUCCEEDED(hr)){

                        IMFSinkWriter *pSinkWriter = NULL;
                        DWORD stream;

                        hr = InitializeSinkWriter(&pSinkWriter, &stream, uiWidth, uiHeight);

                        if(SUCCEEDED(hr)){

                            LONGLONG rtStart = 0;

                            for(DWORD i = 0; i < VIDEO_FRAME_COUNT; ++i){

                                hr = WriteFrame(pDevice, pSurface, pSinkWriter, stream, rtStart, uiWidth, uiHeight);

                                            if(FAILED(hr)){
                                                    break;
                                            }

                                            rtStart += VIDEO_FRAME_DURATION;
                                    }
                            }

                            if(SUCCEEDED(hr)){
                                    hr = pSinkWriter->Finalize();
                            }

                            SafeRelease(&pSinkWriter);
                    }

                    SafeRelease(&pDevice);
                    SafeRelease(&pSurface);
                    MFShutdown();
            }

            CoUninitialize();
    }
}

这篇关于DirectX屏幕捕获和输出为视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆