未压缩的视频样本(IMFSample / RGB32)到位图缩略图 [英] Uncompressed video sample (IMFSample/RGB32) to bitmap thumbnail

查看:130
本文介绍了未压缩的视频样本(IMFSample / RGB32)到位图缩略图的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在尝试在Windows运行时C ++项目中为视频生成缩略图。我将输入视频文件作为IRandomAccessStream作为参数传递。我能够使用SourceReader获取IMFSamples,但我不知道如何将数据传回调用者
作为在XAML / C#app中使用的图像缩略图。


请忽略一些硬编码值,为简单起见,我只是在第0.5个标记处获得第一个缩略图。我不确定从IMFSample转换为WriteBitmap的最佳方法是什么。这是我到目前为止所尝试的...如果我的方向正确,我甚至不会确定
...



IAsyncOperation < WriteableBitmap ^> ^
MediaTransformer :: GenerateThumbnailAsync( IRandomAccessStream ^
inputStream


< p style ="margin-bottom:0px; font-size:9.5px; line-height:normal; font-family:Consolas">
{



       WriteableBitmap ^位图=
ref new
WriteableBitmap
(1280,720);



       IUnknown * pUnknown =
reinterpret_cast < IUnknown *>(位图 - > PixelBuffer);



       IBufferByteAccess * pBufferByteAccess =
nullptr ;



       CHK (pUnknown-> QueryInterface( IID_PPV_ARGS (& pBufferByteAccess)));



      &NBSP; pUnknown-> Release();





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; return create_async([ this ,bitmap,pBufferByteAccess,inputStream]( cancellation_token
令牌) - > WriteableBitmap ^



     ;   {



  ;            
HRESULT
hr = S_OK ;





          & NBSP;&NBSP;&NBSP;
AutoMF
mf;





             
//



           &NBSP;&NBSP;&NBSP;
//创建ReadWriteClassFactory



             
//



              CoCreateInstance(CLSID_MFReadWriteClassFactory,
NULL CLSCTX_INPROC_SERVER ,IID_IMFReadWriteClassFactory,( void **)(& spClassFactory));





             
//



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; &NBSP;&NBSP;&NBSP;
//创建源阅读器



             
//



             
ComPtr
< IMFByteStream > spInputByteStream;



    ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
CHK
(MFCreateMFByteStreamOnStreamEx(( IUnknown *) inputStream ,& spInputByteStream));



             
ComPtr
< IMFAttributes > spReaderAttributes;



     ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
CHK
(MFCreateAttributes(& spReaderAttributes,10));



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
CHK
(spReaderAttributes-> SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING,
true ) );



    ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
CHK
(spReaderAttributes-> SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS,
true ) );



    ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
CHK
(spClassFactory-> CreateInstanceFromObject(CLSID_MFSourceReader,spInputByteStream.Get(),spReaderAttributes.Get(),IID_IMFSourceReader,& spSourceReader));





      ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
ComPtr
< IMFMediaType > spType =
NULL
;



          &NBSP;&NBSP;&NBSP;&NBSP;
//配置源阅读器以提供渐进式RGB32帧。



           &NBSP;&NBSP;&NBSP;
//如果需要,源阅读器将加载解码器。



             
CHK
(MFCreateMediaType(& spType));



             
CHK
(spType-> SetGUID(MF_MT_MAJOR_TYPE,MFMediaType_Video))



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
CHK
(spType-> SetGUID(MF_MT_SUBTYPE,MFVideoFormat_RGB32));



             &NBSP;
CHK
(spSourceReader-> SetCurrentMediaType(



        &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;( DWORD MF_SOURCE_READER_FIRST_VIDEO_STREAM



         ;                  
NULL , 



        ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; spType.Get()));



             的
CHK (spSourceReader-> SetStreamSelection(



        &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;( DWORD MF_SOURCE_READER_FIRST_VIDEO_STREAM , 



         ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
TRUE ));



      &nbs p;       



            
// hr = GetVideoFormat(& m_format);





             
BOOL
bCanSeek = FALSE ;





             
LONGLONG
hnsDuration = 0;



             
LONGLONG
hnsRangeStart = 0;



             
LONGLONG
hnsRangeEnd = 0;



             
LONGLONG
hnsIncrement = 5000000;





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;的
CHK
(CanSeek(& bCanSeek));



             
if
(bCanSeek)



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; {



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
CHK (的 MediaTransformer :: GetDuration(spSourceReader.Get(),&安培; hnsDuration));





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
// hnsRangeStart = 0;



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;     
// hnsRangeEnd = hnsDuration;





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;     
////我们有文件持续时间,因此我们将从



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;     
////文件中的多个位置。偶尔,第一帧



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;     
////在视频中是黑色的,所以我们不会在0时开始。



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
// hnsIncrement =(hnsRangeEnd - hnsRangeStart)/(5 + 1);



             }



             
LONGLONG
hnsPos = hnsIncrement;





             
DWORD
       dwFlags = 0;





             
字节&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; * pBitmapData = <跨度风格= "颜色:#84219c">
NULL ;&NBSP;&NBSP;&NBSP ; //位图数据



             
DWORD
       cbBitmapData = 0;       // Size of size数据,以字节为单位



             
LONGLONG
    hnsTimeStamp = 0;



             
DWORD
       cSkipped = 0;          
//跳过的帧数



             
DWORD
         streamIndex = 0;





             
IMFSample
* pSample = NULL ;



             
IMFMediaBuffer
* pBuffer = 0;



             
IMF2DBuffer2
* pImageBuffer = 0;



             
// ID2D1Bitmap * pBitmap = NULL;





             
LONG
strideSize = 0;



             
DWORD
bufferLength = 0;



             
BYTE
* pBufferStart;





             
if
(bCanSeek&&(hnsPos> 0))



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; {



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
PROPVARIANT var;



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ; PropVariantInit(& var);





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ; var.vt =
VT_I8 ;



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ; var.hVal.QuadPart = hnsPos;





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
CHK (spSourceReader-> SetCurrentPosition(GUID_NULL,VAR));



             }





             
//从源阅读器中拉出视频帧。



             
//注意:根据容器的不同,寻求可能不准确



             
//      格式以及文件的索引方式。因此,第一个



             
//      我们获得的框架可能早于所需的时间。



             
//      如果是这样,我们会跳过最多MAX_FRAMES_TO_SKIP帧。





             
while
(1)



              {



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;
IMFSample * pSampleTmp =
NULL
;





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ; hr = spSourceReader-> ReadSample(



                           ( DWORD MF_SOURCE_READER_FIRST_VIDEO_STREAM



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;          0,



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;&NBSP;&NBSP;&NBSP;&NBSP; & streamIndex,



   ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; & dwFlags,



   ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
NULL



                   &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; & pSampleTmp



   &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; );





    &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
if FAILED (hr)){
转到完成; }





     ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
if (dwFlags&
MF_SOURCE_READERF_ENDOFSTREAM



       &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; {



    &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
break ;



                   &NBSP;&NBSP; }





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
如果 (dwFlags&
MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED



&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; {



< span style ="color:#000000">                     &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
//输入更改。获取新格式。



'跨度风格= "颜色:#000000">&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
// hr = GetVideoFormat(& m_format);





&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
if FAILED (hr)){
转到完成; }



    &NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP; }





     ;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;&NBSP;
if (pSampleTmp ==
NULL

$ b$b


                      {



                          
continue;



                     }





                    
// We got a sample. Hold onto it.





                     SafeRelease(&pSample);





                     pSample = pSampleTmp;



                     pSample->AddRef();





                    
if (SUCCEEDED( pSample->GetSampleTime(&hnsTimeStamp) ))



                     {



                          
// Keep going until we get a frame that is within tolerance of the



                          
// desired seek position, or until we skip MAX_FRAMES_TO_SKIP frames.





                          
// During this process, we might reach the end of the file, so we



                          
// always cache the last sample that we got (pSample).





                          
if ( (cSkipped < MAX_FRAMES_TO_SKIP) &&



                                  (hnsTimeStamp + SEEK_TOLERANCE < hnsPos) )



                           {



                                  SafeRelease(&pSampleTmp);





                                  ++cSkipped;



                                 
continue;



                           }



                     }





                     SafeRelease(&pSampleTmp);





                     hnsPos = hnsTimeStamp;



                    
break;



              }





             
if
(pSample)



              {



                    
CHK(pSample->ConvertToContiguousBuffer(&pBuffer));



                    
CHK(pBuffer->QueryInterface(IID_IMF2DBuffer2, (void**)&pImageBuffer));





                     hr = pImageBuffer->Lock2DSize(MF2DBuffer_LockFlags_Read, &pBitmapData, &strideSize, &pBufferStart,
&bufferLength);



                     hr = pBufferByteAccess->Buffer(&pBitmapData);





                     CHK(MFCopyImage(pBitmapData, 0, pBufferStart, strideSize, 1280, 720));



              }



             
else



              {



                     hr =
MF_E_END_OF_STREAM;



              }





done:



             
if
(pBitmapData)



              {



                     pImageBuffer->Unlock2D();



              }



              SafeRelease(&pBuffer);



              SafeRelease(&pSample);



              SafeRelease(&pImageBuffer);



              SafeRelease(&pBufferStart);





             
return
bitmap;



       });



}

解决方案

I didn’t run your code, it looks similar to how I would do it.


One note though...


As far as I know for gpu acceleration (or at least to have control of your d3d device), the IMFSourceReader needs a IMFDXGIManager initialized (ResetDevice) with a D3D device (with ID3D10Multithread::SetMultithread(TRUE)
).  The IMFDXGIManager will be set to your spReaderAttributes with the MF_SOURCE_READER_D3D_MANAGER key.


Also you can get a IMFDXGIBuffer from your sample and render it using a SurfaceImageSource (D3D interop with xaml).





I am trying to generate thumbnails for a video in Windows Runtime C++ Project. I am passing in the input video file as IRandomAccessStream as a parameter. I am able to get IMFSamples using SourceReader but I am not sure how to pass back the data to the caller as an image thumbnail to be consumed in XAML/C# app.

Please disregard some of the hard-coded values and for simplicity reasons, I am just getting the first thumbnail at around 0.5 sec mark. I wasn't sure what the best way to convert from IMFSample to WriteBitmap was. Here's what I've tried so far... not even sure if I am in the right direction...

IAsyncOperation<WriteableBitmap^>^ MediaTransformer::GenerateThumbnailAsync(IRandomAccessStream^ inputStream)

{

       WriteableBitmap^ bitmap = ref new WriteableBitmap(1280, 720);

       IUnknown* pUnknown = reinterpret_cast<IUnknown*>(bitmap->PixelBuffer);

       IBufferByteAccess* pBufferByteAccess = nullptr;

       CHK(pUnknown->QueryInterface(IID_PPV_ARGS(&pBufferByteAccess)));

       pUnknown->Release();

       return create_async([this, bitmap, pBufferByteAccess, inputStream](cancellation_token token) -> WriteableBitmap^

       {

              HRESULT hr = S_OK;

              AutoMF mf;

              //

              // Create the ReadWriteClassFactory

              //

              CoCreateInstance(CLSID_MFReadWriteClassFactory, NULL, CLSCTX_INPROC_SERVER, IID_IMFReadWriteClassFactory, (void**)(&spClassFactory));

              //

              // Create the Source Reader

              //

              ComPtr<IMFByteStream> spInputByteStream;

              CHK(MFCreateMFByteStreamOnStreamEx((IUnknown*)inputStream, &spInputByteStream));

              ComPtr<IMFAttributes> spReaderAttributes;

              CHK(MFCreateAttributes(&spReaderAttributes, 10));

              CHK(spReaderAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, true));

              CHK(spReaderAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, true));

              CHK(spClassFactory->CreateInstanceFromObject(CLSID_MFSourceReader, spInputByteStream.Get(), spReaderAttributes.Get(), IID_IMFSourceReader, &spSourceReader));

              ComPtr<IMFMediaType> spType = NULL;

              // Configure the source reader to give us progressive RGB32 frames.

              // The source reader will load the decoder if needed.

              CHK(MFCreateMediaType(&spType));

              CHK(spType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video))

              CHK(spType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));

              CHK(spSourceReader->SetCurrentMediaType(

                           (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,

                           NULL

                           spType.Get()));

              CHK(spSourceReader->SetStreamSelection(

                           (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM

                           TRUE));

              

              //hr = GetVideoFormat(&m_format);

              BOOL bCanSeek = FALSE;

              LONGLONG hnsDuration = 0;

              LONGLONG hnsRangeStart = 0;

              LONGLONG hnsRangeEnd = 0;

              LONGLONG hnsIncrement = 5000000;

              CHK(CanSeek(&bCanSeek));

              if (bCanSeek)

              {

                     CHK(MediaTransformer::GetDuration(spSourceReader.Get(), &hnsDuration));

                     //hnsRangeStart = 0;

                     //hnsRangeEnd = hnsDuration;

                     //// We have the file duration , so we'll take bitmaps from

                     //// several positions in the file. Occasionally, the first frame

                     //// in a video is black, so we don't start at time 0.

                     //hnsIncrement = (hnsRangeEnd - hnsRangeStart) / (5 + 1);

              }

              LONGLONG hnsPos = hnsIncrement;

              DWORD       dwFlags = 0;

              BYTE        *pBitmapData = NULL;    // Bitmap data

              DWORD       cbBitmapData = 0;       // Size of data, in bytes

              LONGLONG    hnsTimeStamp = 0;

              DWORD       cSkipped = 0;           // Number of skipped frames

              DWORD         streamIndex = 0;

              IMFSample *pSample = NULL;

              IMFMediaBuffer *pBuffer = 0;

              IMF2DBuffer2 *pImageBuffer = 0;

              //ID2D1Bitmap *pBitmap = NULL;

              LONG strideSize = 0;

              DWORD bufferLength = 0;

              BYTE *pBufferStart;

              if (bCanSeek && (hnsPos > 0))

              {

                     PROPVARIANT var;

                     PropVariantInit(&var);

                     var.vt = VT_I8;

                     var.hVal.QuadPart = hnsPos;

                     CHK(spSourceReader->SetCurrentPosition(GUID_NULL, var));

              }

              // Pulls video frames from the source reader.

              // NOTE: Seeking might be inaccurate, depending on the container

              //       format and how the file was indexed. Therefore, the first

              //       frame that we get might be earlier than the desired time.

              //       If so, we skip up to MAX_FRAMES_TO_SKIP frames.

              while (1)

              {

                     IMFSample *pSampleTmp = NULL;

                     hr = spSourceReader->ReadSample(

                           (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,

                           0,

                           &streamIndex,

                           &dwFlags,

                           NULL,

                           &pSampleTmp

                           );

                     if (FAILED(hr)) { goto done; }

                     if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM)

                     {

                           break;

                     }

                     if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)

                     {

                           // Type change. Get the new format.

                           //hr = GetVideoFormat(&m_format);

                           if (FAILED(hr)) { goto done; }

                     }

                     if (pSampleTmp == NULL)

                     {

                           continue;

                     }

                     // We got a sample. Hold onto it.

                     SafeRelease(&pSample);

                     pSample = pSampleTmp;

                     pSample->AddRef();

                     if (SUCCEEDED( pSample->GetSampleTime(&hnsTimeStamp) ))

                     {

                           // Keep going until we get a frame that is within tolerance of the

                           // desired seek position, or until we skip MAX_FRAMES_TO_SKIP frames.

                           // During this process, we might reach the end of the file, so we

                           // always cache the last sample that we got (pSample).

                           if ( (cSkipped < MAX_FRAMES_TO_SKIP) &&

                                  (hnsTimeStamp + SEEK_TOLERANCE < hnsPos) )

                           {

                                  SafeRelease(&pSampleTmp);

                                  ++cSkipped;

                                  continue;

                           }

                     }

                     SafeRelease(&pSampleTmp);

                     hnsPos = hnsTimeStamp;

                     break;

              }

              if (pSample)

              {

                     CHK(pSample->ConvertToContiguousBuffer(&pBuffer));

                     CHK(pBuffer->QueryInterface(IID_IMF2DBuffer2, (void**)&pImageBuffer));

                     hr = pImageBuffer->Lock2DSize(MF2DBuffer_LockFlags_Read, &pBitmapData, &strideSize, &pBufferStart, &bufferLength);

                     hr = pBufferByteAccess->Buffer(&pBitmapData);

                     CHK(MFCopyImage(pBitmapData, 0, pBufferStart, strideSize, 1280, 720));

              }

              else

              {

                     hr = MF_E_END_OF_STREAM;

              }

done:

              if (pBitmapData)

              {

                     pImageBuffer->Unlock2D();

              }

              SafeRelease(&pBuffer);

              SafeRelease(&pSample);

              SafeRelease(&pImageBuffer);

              SafeRelease(&pBufferStart);

              return bitmap;

       });

}

解决方案

I didn't run your code, it looks similar to how I would do it.

One note though...

As far as I know for gpu acceleration (or at least to have control of your d3d device), the IMFSourceReader needs a IMFDXGIManager initialized (ResetDevice) with a D3D device (with ID3D10Multithread::SetMultithread(TRUE) ).  The IMFDXGIManager will be set to your spReaderAttributes with the MF_SOURCE_READER_D3D_MANAGER key.

Also you can get a IMFDXGIBuffer from your sample and render it using a SurfaceImageSource (D3D interop with xaml).



这篇关于未压缩的视频样本(IMFSample / RGB32)到位图缩略图的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆