Directshow筛选器有时会失败 [英] Directshow filter sometimes fail

查看:81
本文介绍了Directshow筛选器有时会失败的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我编写了DirectShow过滤器,以将来自两个输入引脚的两个图像组合在一起,并将结果图像输出到输出引脚.我接受的唯一媒体是(FORMAT_VideoInfo,MEDIATYPE_Video,RGB32).在输出中是宽度= 2 * input_pin_1_width的相同介质.这是代码

I have written directshow filter to unite two images from two input pins and put resulting image to output pin. The only media, i accept, is (FORMAT_VideoInfo, MEDIATYPE_Video, RGB32). In the output is the same media with width = 2*input_pin_1_width. Here is the code

    ALLOCATOR_PROPERTIES Request, Actual;
    HRESULT hr;
    if ( m_pVideoTransformerFilter->m_SelInputpin ){
        if (m_pVideoTransformerFilter->m_SelInputpin->IsConnected()) {
            hr = m_pVideoTransformerFilter->m_SelInputpin->GetAllocator()->GetProperties(&Request);
            if (FAILED(hr)) {
                // Input connected but with a secretive allocator - enough!
                return hr;
            }
        } else {
            // We''re reduced to blind guessing.  Let''s guess one byte and if
            // this isn''t enough then when the other pin does get connected
            // we can revise it.
            ZeroMemory(&Request, sizeof(Request));
            Request.cBuffers = 1;
            Request.cbBuffer = 1;
        }

        DbgLog((LOG_MEMORY,1,TEXT("Setting Allocator Requirements")));
        DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d"),
               Request.cBuffers, Request.cbBuffer));
        // Pass the allocator requirements to our output side
        // but do a little sanity checking first or we''ll just hit
        // asserts in the allocator.
        ppropInputRequest->cBuffers = Request.cBuffers;
        ppropInputRequest->cbBuffer = Request.cbBuffer*2;
        ppropInputRequest->cbAlign = Request.cbAlign;
        if (ppropInputRequest->cBuffers<=0) {ppropInputRequest->cBuffers = 1; }
        if (ppropInputRequest->cbBuffer<=0) {ppropInputRequest->cbBuffer = 1; }
        hr = pAlloc->SetProperties(ppropInputRequest, &Actual);
        if (FAILED(hr)) {
            return hr;
        }
        DbgLog((LOG_MEMORY,1,TEXT("Obtained Allocator Requirements")));
        DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d, Alignment %d"),
               Actual.cBuffers, Actual.cbBuffer, Actual.cbAlign));
        // Make sure we got the right alignment and at least the minimum required
        if (  (Request.cBuffers > Actual.cBuffers)
           || (Request.cbBuffer*2 > Actual.cbBuffer)
           || (Request.cbAlign  > Actual.cbAlign)
           ) {
            return E_FAIL;
        }
        return NOERROR;
    }
    return E_FAIL;
}



在这里我请求buffer_size = 2 * input_pin_1_bufffer_size.



Here i request buffer_size = 2*input_pin_1_bufffer_size.

unsigned  __stdcall deliver_thread(void *data_parameter)
{
	CVideoTransformerFilter *local_filter = (CVideoTransformerFilter*)data_parameter;
	
	if(local_filter!=NULL)
	{
		while(true)
		{
			if(WaitForSingleObject(local_filter->receive_event[0],INFINITE) != WAIT_OBJECT_0)
			{
				continue;
			}
			if(WaitForSingleObject(local_filter->receive_event[1],INFINITE) != WAIT_OBJECT_0)
			{
				continue;
			}
			ResetEvent(local_filter->receive_event[0]);
			ResetEvent(local_filter->receive_event[1]);
			
			if 
				(
				local_filter->samples_list_pin_1.size()!=0 
				&&
				local_filter->samples_list_pin_2.size()!=0 
				)
			{
				IMediaSample *local_sample_1 = *local_filter->samples_list_pin_1.begin();
				IMediaSample *local_sample_2 = *local_filter->samples_list_pin_2.begin();
				local_filter->samples_list_pin_1.erase(local_filter->samples_list_pin_1.begin());
				local_filter->samples_list_pin_2.erase(local_filter->samples_list_pin_2.begin());
				if (local_filter->m_outputpin!=NULL)
				{
					CComPtr<IMediaSample> local_output_sample;
					REFERENCE_TIME local_start_time,local_end_time;
					if(local_sample_1->GetTime(&local_start_time,&local_end_time)==S_OK)
					{
						if(local_filter->m_outputpin->GetAllocator()->GetBuffer(&local_output_sample,&local_start_time,&local_end_time,AM_GBF_NOTASYNCPOINT)==S_OK)
						{
							local_output_sample->SetTime(&local_start_time,&local_end_time);
							AM_MEDIA_TYPE local_media_type_1;
							AM_MEDIA_TYPE local_media_type_2;
							AM_MEDIA_TYPE local_media_type_output;
							local_filter->m_Arrinputpin[0]->ConnectionMediaType(&local_media_type_1);
							local_filter->m_Arrinputpin[1]->ConnectionMediaType(&local_media_type_2);
							local_filter->m_outputpin->ConnectionMediaType(&local_media_type_output);
						
							if 
								(
								local_media_type_1.formattype==FORMAT_VideoInfo
								&&
								local_media_type_2.formattype==FORMAT_VideoInfo
								&&
								local_media_type_output.formattype==FORMAT_VideoInfo
								)
							{
								VIDEOINFOHEADER *pvi_1 = (VIDEOINFOHEADER *) local_media_type_1.pbFormat;
								VIDEOINFOHEADER *pvi_2 = (VIDEOINFOHEADER *) local_media_type_2.pbFormat;
								VIDEOINFOHEADER *pvi_output = (VIDEOINFOHEADER *) local_media_type_output.pbFormat;
						
								BYTE *pData_1;                // Pointer to the actual image buffer
								BYTE *pData_2;                // Pointer to the actual image buffer
								BYTE *pData_output;                // Pointer to the actual image buffer
								long lDataLen_1;              // Holds length of any given sample
								long lDataLen_2;              // Holds length of any given sample
								long lDataLen_output;              // Holds length of any given sample
								RGBQUAD *prgb_1;            // Holds a pointer to the current pixel
								RGBQUAD *prgb_2;            // Holds a pointer to the current pixel
								RGBQUAD *prgb_output;            // Holds a pointer to the current pixel
								HRESULT local_result = S_OK;
		
								local_sample_1->GetPointer(&pData_1);
								local_sample_2->GetPointer(&pData_2);
								local_result=local_output_sample->GetPointer(&pData_output);
								
								lDataLen_1 = local_sample_1->GetSize();
								lDataLen_2 = local_sample_1->GetSize();
								lDataLen_output = local_output_sample->GetSize();
						
								if(lDataLen_output==lDataLen_1+lDataLen_2)
								{
									// Get the image properties from the BITMAPINFOHEADER
									int iPixelSize_1 = pvi_1->bmiHeader.biBitCount / 8;
									int cxImage_1    = pvi_1->bmiHeader.biWidth;
									int cyImage_1    = pvi_1->bmiHeader.biHeight;
									int cbImage_1    = cyImage_1 * cxImage_1 * iPixelSize_1;
									int numPixels_1  = cxImage_1 * cyImage_1;
									int iPixelSize_2 = pvi_2->bmiHeader.biBitCount / 8;
									int cxImage_2    = pvi_2->bmiHeader.biWidth;
									int cyImage_2    = pvi_2->bmiHeader.biHeight;
									int cbImage_2    = cyImage_2 * cxImage_2 * iPixelSize_2;
									int numPixels_2  = cxImage_2 * cyImage_2;
									int iPixelSize_output = pvi_output->bmiHeader.biBitCount / 8;
									int cxImage_output    = pvi_output->bmiHeader.biWidth;
									int cyImage_output    = pvi_output->bmiHeader.biHeight;
									int cbImage_output    = cyImage_output * cxImage_output * iPixelSize_output;
									int numPixels_output  = cxImage_output * cyImage_output;
									prgb_1 = (RGBQUAD*) pData_1;
									prgb_2 = (RGBQUAD*) pData_2;
									prgb_output = (RGBQUAD*) pData_output;
									ValidateWritePtr(pData_output,lDataLen_output);
									if 
										(
										cxImage_1 == cxImage_2
										&&
										cyImage_1 == cyImage_2
										&&
										cxImage_output == cxImage_1 + cxImage_2
										&&
										cyImage_output == cyImage_1
										)
									{
										int cxImage    = cxImage_1;
										int cyImage    = cyImage_1;
										for (int local_counter = 0;local_counter<cyImage;local_counter++)
										{
											memcpy(&prgb_output[(cyImage-local_counter)*cxImage*2],&prgb_1[local_counter*cxImage],cxImage*sizeof(RGBQUAD));
											memcpy(&prgb_output[cxImage+(cyImage-local_counter)*cxImage*2],&prgb_2[local_counter*cxImage],cxImage*sizeof(RGBQUAD));
										}																		
									}
								}
							}
							local_filter->m_outputpin->Deliver(local_output_sample);
						}
					}
				}
				local_sample_1->Release();
				local_sample_2->Release();
			}
		}
	}
	local_filter->thread_handle = 0;
	return 0;
}



这是传递媒体线程.
使用某些过滤器,将数据复制到输出缓冲区(memcpy)时出现错误!即使尺寸合适!
使用一些过滤器,效果很好.

我应该如何设置正确的缓冲区大小?
我应该怎么做,才能使过滤器与所有过滤器正常工作,它可以连接?
我还应该在DecideBufferSize函数中做什么呢?
为什么它可以与某些过滤器一起使用而不能与其他过滤器一起使用?



Here is deliver media thread.
With some filters i get error when copying data to output buffer (memcpy)! Even it has right size!
With some filters it works fine.

How should i set right buffer size?
What should i do, to make filter work fine with all filters, it connects?
What should i do else in function DecideBufferSize?
Why it works with some filters and does not work with others?

推荐答案

我发现了错误.
这是缓冲区偏移量.
有些分配器提供的内存超出了要求,有些分配了确切的值.

正确的代码在这里.
I have found the error.
It was buffer offset.
Some allocators give more memory, than required, some do exact value.

Right code is here.
for (int local_counter = 0;local_counter<cyImage;local_counter++)										{											memcpy(&prgb_output[(cyImage-1-local_counter)*cxImage*2],&prgb_1[local_counter*cxImage],cxImage*sizeof(RGBQUAD));											memcpy(&prgb_output[cxImage+(cyImage-1-local_counter)*cxImage*2],&prgb_2[local_counter*cxImage],cxImage*sizeof(RGBQUAD));										}


这篇关于Directshow筛选器有时会失败的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆