本文介绍了带有NVidia 3D Vision的视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我已阅读NVidio的有关NVidia 3D Vision的演示.
我有显示器,视频板,眼镜.
我使用渲染视频过滤器通过简单的视频播放器全屏显示视频.
在此滤镜中,图像是使用Direct3D曲面绘制的.
我将其从IMediaSample加载到两个表面.
之后,我将图像拉伸为具有NVidia签名的最终两个图像3D表面.
最后,我将此表面放到缓冲后表面上并执行Present.
如在演示文稿中所写.

但是在全屏NVidia 3D Vision眼镜模式下,我只能得到正确的图像.
问题是:为什么只有正确的图像?"
答案是:最后一个StretchRect中的表面矩形!"

我使用平移从源图像中提取了两个图像,就好像是两只眼睛一样.

这是功能代码(使用视频兑现从previos变体中保留了一些代码):

I have read presentation from NVidio about NVidia 3D Vision.
I have got monitor, video board, glasses.
I use the render video filter to display video in full screen with the simple video player.
In this filter the image is drawing using Direct3D surfaces.
I load it from IMediaSample to two surfaces.
After that i stretch images to final two images 3D surface with NVidia signatures.
Finally, i put this surface to back buffer surface and do Present.
As it is written in the presentetion.

But i get only right inage in the fullscreen NVidia 3D Vision glasses mode.
The question is: "Why is there only right image?"
The answer is: "Surface rectangle in last StretchRect!"

I do two images from source image using the shift, as if it was two eyes.

Here is the function code (left some code from previos variant using video cashing):

HRESULT CTransform::Transform( IMediaSample *pMediaSample, AM_MEDIA_TYPE* media_type, LPDIRECT3DDEVICE9 direct_3D_device )
{
	if (
		pMediaSample==NULL || media_type==NULL || direct_3D_device == NULL
		)
	{
		return E_POINTER;
	}
	AM_MEDIA_TYPE* pType = media_type;
    VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pType->pbFormat;
    BYTE *pData;                // Pointer to the actual image buffer
    long lDataLen;              // Holds length of any given sample
    RGBTRIPLE *prgb;            // Holds a pointer to the current pixel

    pMediaSample->GetPointer(&pData);
    lDataLen = pMediaSample->GetSize();
    // Get the image properties from the BITMAPINFOHEADER

    int iPixelSize = pvi->bmiHeader.biBitCount / 8;
    int cxImage    = pvi->bmiHeader.biWidth;
    int cyImage    = pvi->bmiHeader.biHeight;
    int cbImage    = cyImage * cxImage * iPixelSize;
    int numPixels  = cxImage * cyImage;
    iPixelSize = pvi->bmiHeader.biBitCount / 8;
    cxImage    = pvi->bmiHeader.biWidth;
    cyImage    = pvi->bmiHeader.biHeight;
    cbImage    = cyImage * cxImage * iPixelSize;
    numPixels  = cxImage * cyImage;

	prgb = (RGBTRIPLE*) pData;
	int pixels_shift = 2*cxImage/100;
	REFERENCE_TIME rtStart, rtEnd;
	pMediaSample->GetTime(&rtStart, &rtEnd);
	{
		if(buffers_size!=cxImage*cyImage)
		{
			buffers_size = cxImage*cyImage;
			delete []member_cash_buffer;
			delete []member_buffer;
			delete []local_member_buffer_1;
			delete []local_member_buffer_2;
			delete []local_member_entered_buffer;
			member_cash_buffer = new RGBTRIPLE[buffers_size];
			member_buffer = new RGBTRIPLE[buffers_size];
			local_member_buffer_1 = new RGBTRIPLE[buffers_size];
			local_member_buffer_2 = new RGBTRIPLE[buffers_size];
			local_member_entered_buffer = new RGBTRIPLE[buffers_size];
			member_valid_cash = 0;
		}
	}


#define RGB_BYTE_ORDER(r, g ,b)  ((DWORD) (((BYTE) (b) | ((WORD) (g) << 8)) | (((DWORD) (BYTE) (r)) << 16)))
	HRESULT local_handle_result = S_OK;
	{
		IDirect3DSurface9* gImageSrcLeft = NULL; // Left Source image surface in video memory
		IDirect3DSurface9* gImageSrcRight = NULL; // Right Source image Surface in video memory

		{
			local_handle_result = direct_3D_device->CreateOffscreenPlainSurface(
				cxImage-pixels_shift, // Stereo width is twice the source width
				cyImage, // Stereo height add one raw to encode signature
				D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, // Surface is in video memory
				&gImageSrcLeft, NULL);
			if(local_handle_result!=S_OK)
			{
				return local_handle_result;
			}
			local_handle_result = direct_3D_device->CreateOffscreenPlainSurface(
				cxImage-pixels_shift, // Stereo width is twice the source width
				cyImage, // Stereo height add one raw to encode signature
				D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, // Surface is in video memory
				&gImageSrcRight, NULL);
			if(local_handle_result!=S_OK)
			{
				gImageSrcLeft->Release();
				return local_handle_result;
			}
			{
				DWORD *local_bit_map_buffer;
				local_bit_map_buffer = new DWORD[(cxImage-pixels_shift)*cyImage];

				for(int local_counter_width=pixels_shift;local_counter_width<cxImage;local_counter_width++)
				{
					for(int local_counter_height=0;local_counter_height<cyImage;local_counter_height++)
					{
						int local_couter = local_counter_width+local_counter_height*cxImage;
						int local_bit_map_couter = local_counter_width-pixels_shift+(cyImage-(local_counter_height+1))*(cxImage-pixels_shift);
						local_bit_map_buffer[local_bit_map_couter] = RGB_BYTE_ORDER(prgb[local_couter].rgbtRed,prgb[local_couter].rgbtGreen,prgb[local_couter].rgbtBlue);
					}
				}

				HBITMAP handle_bit_map = NULL;

				handle_bit_map = CreateBitmap(
					cxImage-pixels_shift,
					cyImage,
					1,
					32,
					local_bit_map_buffer);

				delete []local_bit_map_buffer;

				HRESULT local_handle_result;



				HDC hdc;
				gImageSrcLeft->GetDC(&hdc);

				HDC hdc_compatible = CreateCompatibleDC(hdc);

				SelectObject(hdc_compatible,handle_bit_map);
				BitBlt(hdc, 0  ,0 ,cxImage-pixels_shift  , cyImage  , hdc_compatible, 0, 0, SRCCOPY);
				gImageSrcLeft->ReleaseDC(hdc);
				DeleteDC(hdc_compatible);

				bool local_result = DeleteObject(handle_bit_map);
			}

			{
				DWORD *local_bit_map_buffer;
				local_bit_map_buffer = new DWORD[(cxImage-pixels_shift)*cyImage];

				for(int local_counter_width=0;local_counter_width<cxImage-pixels_shift;local_counter_width++)
				{
					for(int local_counter_height=0;local_counter_height<cyImage;local_counter_height++)
					{
						int local_couter = local_counter_width+local_counter_height*cxImage;
						int local_bit_map_couter = local_counter_width+(cyImage-(local_counter_height+1))*(cxImage-pixels_shift);
						local_bit_map_buffer[local_bit_map_couter] = RGB_BYTE_ORDER(prgb[local_couter].rgbtRed,prgb[local_couter].rgbtGreen,prgb[local_couter].rgbtBlue);
					}
				}

				HBITMAP handle_bit_map = NULL;

				handle_bit_map = CreateBitmap(
					cxImage-pixels_shift,
					cyImage,
					1,
					32,
					local_bit_map_buffer);

				delete []local_bit_map_buffer;

				HRESULT local_handle_result;



				HDC hdc;
				gImageSrcRight->GetDC(&hdc);

				HDC hdc_compatible = CreateCompatibleDC(hdc);

				SelectObject(hdc_compatible,handle_bit_map);
				BitBlt(hdc, 0  ,0 ,cxImage-pixels_shift  , cyImage  , hdc_compatible, 0, 0, SRCCOPY);
				gImageSrcRight->ReleaseDC(hdc);
				DeleteDC(hdc_compatible);

				bool local_result = DeleteObject(handle_bit_map);
			}

			int gImageWidth= cxImage-pixels_shift; // Source image width
			int gImageHeight= cyImage;// Source image height

			IDirect3DSurface9* gImageSrc= NULL; // Source stereo image beeing created

			local_handle_result = direct_3D_device->CreateOffscreenPlainSurface(
				gImageWidth* 2, // Stereo width is twice the source width
				gImageHeight+ 1, // Stereo height add one raw to encode signature
				D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, // Surface is in video memory
				&gImageSrc, NULL);
			if(local_handle_result!=S_OK)
			{
				gImageSrcLeft->Release();
				gImageSrcRight->Release();
				return local_handle_result;
			}
			// Blit left srcimage to left side of stereo
			RECT srcRect= { 0, 0, gImageWidth, gImageHeight};
			RECT dstRect= { 0, 0, gImageWidth, gImageHeight};
			direct_3D_device->StretchRect(gImageSrcLeft, &srcRect, gImageSrc, &dstRect, D3DTEXF_LINEAR);
			// Blit right srcimage to right side of stereo
RECT srcRect2= { 0, 0, 2*gImageWidth, gImageHeight+1};
			RECT dstRect2= {gImageWidth, 0, 2*gImageWidth, gImageHeight};
			direct_3D_device->StretchRect(gImageSrcRight, &srcRect, gImageSrc, &dstRect2, D3DTEXF_LINEAR);

			// Stereo Blitdefines
#define NVSTEREO_IMAGE_SIGNATURE 0x4433564e //NV3D
			typedef struct _Nv_Stereo_Image_Header
			{
				unsigned int dwSignature;
				unsigned int dwWidth;
				unsigned int dwHeight;
				unsigned int dwBPP;
				unsigned int dwFlags;
			} NVSTEREOIMAGEHEADER, *LPNVSTEREOIMAGEHEADER;
			// ORedflags in the dwFlagsfielsof the _Nv_Stereo_Image_Headerstructure above
#define SIH_SWAP_EYES 0x00000001
#define SIH_SCALE_TO_FIT 0x00000002

			// Lock the stereo image
			D3DLOCKED_RECT lr;
			gImageSrc->LockRect(&lr,NULL,0);
			// write stereo signature in the last raw of the stereo image
			LPNVSTEREOIMAGEHEADER pSIH=
				(LPNVSTEREOIMAGEHEADER)(((unsigned char *) lr.pBits) + (lr.Pitch* gImageHeight));
			// Update the signature header values
			pSIH->dwSignature= NVSTEREO_IMAGE_SIGNATURE;
			pSIH->dwBPP= 32;
			pSIH->dwFlags= SIH_SWAP_EYES; // Src image has left on left and right on right
			pSIH->dwWidth= gImageWidth*2;
			pSIH->dwHeight= gImageHeight;
			// Unlock surface
			gImageSrc->UnlockRect();


			D3DVIEWPORT9 local_view_port;
			direct_3D_device->GetViewport(&local_view_port);
			RECT local_view_port_rect = {0,0,local_view_port.Width,local_view_port.Height};

			{
				gImageSrcLeft->Release();
				gImageSrcRight->Release();
			}
			{
				direct_3D_device->Clear (0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB (0, 0, 0), 0.0f, 0);
				direct_3D_device->BeginScene ();

				IDirect3DSurface9* pDestSurface;
				direct_3D_device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pDestSurface);

				if(pDestSurface)
				{
					direct_3D_device->StretchRect(gImageSrc, &srcRect2, pDestSurface, &local_view_port_rect, D3DTEXF_LINEAR);
				}

				direct_3D_device->EndScene ();
//				direct_3D_device->Present (NULL, NULL, NULL, NULL);

				if(pDestSurface)
				{
					pDestSurface->Release();
				}

				if(gImageSrc)
				{
					gImageSrc->Release();
				}
			}
		}
	}
	return S_OK;
}

推荐答案

gImageSrc->Release();

切换到窗口模式时,有时应用程序会挂起?"

渲染功能的代码在这里:

?"

The code of rendering function is here:

HRESULT CTransform::Transform( IMediaSample *pMediaSample, AM_MEDIA_TYPE* media_type, LPDIRECT3DDEVICE9 direct_3D_device )
{
	if (
		pMediaSample==NULL || media_type==NULL || direct_3D_device == NULL
		)
	{
		return E_POINTER;
	}
	AM_MEDIA_TYPE* pType = media_type;
    VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pType->pbFormat;
    BYTE *pData;                // Pointer to the actual image buffer
    long lDataLen;              // Holds length of any given sample
    RGBTRIPLE *prgb;            // Holds a pointer to the current pixel

    pMediaSample->GetPointer(&pData);
    lDataLen = pMediaSample->GetSize();
    // Get the image properties from the BITMAPINFOHEADER

    int iPixelSize = pvi->bmiHeader.biBitCount / 8;
    int cxImage    = pvi->bmiHeader.biWidth;
    int cyImage    = pvi->bmiHeader.biHeight;
    int cbImage    = cyImage * cxImage * iPixelSize;
    int numPixels  = cxImage * cyImage;
    iPixelSize = pvi->bmiHeader.biBitCount / 8;
    cxImage    = pvi->bmiHeader.biWidth;
    cyImage    = pvi->bmiHeader.biHeight;
    cbImage    = cyImage * cxImage * iPixelSize;
    numPixels  = cxImage * cyImage;

	prgb = (RGBTRIPLE*) pData;
	int pixels_shift = 2*cxImage/100;
	REFERENCE_TIME rtStart, rtEnd;
	pMediaSample->GetTime(&rtStart, &rtEnd);
	{
		if(buffers_size!=cxImage*cyImage)
		{
			buffers_size = cxImage*cyImage;
			delete []member_cash_buffer;
			delete []member_buffer;
			delete []local_member_buffer_1;
			delete []local_member_buffer_2;
			delete []local_member_entered_buffer;
			member_cash_buffer = new RGBTRIPLE[buffers_size];
			member_buffer = new RGBTRIPLE[buffers_size];
			local_member_buffer_1 = new RGBTRIPLE[buffers_size];
			local_member_buffer_2 = new RGBTRIPLE[buffers_size];
			local_member_entered_buffer = new RGBTRIPLE[buffers_size];
			member_valid_cash = 0;
		}
	}

#define RGB_BYTE_ORDER(r, g ,b)  ((DWORD) (((BYTE) (b) | ((WORD) (g) << 8)) | (((DWORD) (BYTE) (r)) << 16)))
	HRESULT local_handle_result = S_OK;
	{
		IDirect3DSurface9* gImageSrc = NULL;

		{
			local_handle_result = direct_3D_device->CreateOffscreenPlainSurface(
				cxImage,
				cyImage+1,
				D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, // Surface is in video memory
				&gImageSrc, NULL);
			if(local_handle_result!=S_OK)
			{
				return local_handle_result;
			}
			if(gImageSrc==NULL)
			{
				return local_handle_result;
			}
			{
				DWORD *local_bit_map_buffer;
				local_bit_map_buffer = new DWORD[cxImage*cyImage];

				for(int local_counter_width=0;local_counter_width<cxImage;local_counter_width++)
				{
					for(int local_counter_height=0;local_counter_height<cyImage;local_counter_height++)
					{
						int local_couter = local_counter_width+local_counter_height*cxImage;
						int local_bit_map_couter = local_counter_width+(cyImage-(local_counter_height+1))*cxImage;
						local_bit_map_buffer[local_bit_map_couter] = RGB_BYTE_ORDER(prgb[local_couter].rgbtRed,prgb[local_couter].rgbtGreen,prgb[local_couter].rgbtBlue);
					}
				}

				HBITMAP handle_bit_map = NULL;

				handle_bit_map = CreateBitmap(
					cxImage,
					cyImage,
					1,
					32,
					local_bit_map_buffer);

				delete []local_bit_map_buffer;

				HRESULT local_handle_result;



				HDC hdc;
				gImageSrc->GetDC(&hdc);

				HDC hdc_compatible = CreateCompatibleDC(hdc);

				SelectObject(hdc_compatible,handle_bit_map);
				BitBlt(hdc, 0  ,0 ,cxImage  , cyImage  , hdc_compatible, 0, 0, SRCCOPY);
				gImageSrc->ReleaseDC(hdc);
				DeleteDC(hdc_compatible);

				bool local_result = DeleteObject(handle_bit_map);
			}

			int gImageWidth= cxImage; // Source image width
			int gImageHeight= cyImage;// Source image height


			RECT srcRect= { 0, 0, gImageWidth, gImageHeight+1};
			RECT dstRect= { 0, 0, gImageWidth, gImageHeight};

			// Stereo Blitdefines
#define NVSTEREO_IMAGE_SIGNATURE 0x4433564e //NV3D
			typedef struct _Nv_Stereo_Image_Header
			{
				unsigned int dwSignature;
				unsigned int dwWidth;
				unsigned int dwHeight;
				unsigned int dwBPP;
				unsigned int dwFlags;
			} NVSTEREOIMAGEHEADER, *LPNVSTEREOIMAGEHEADER;
			// ORedflags in the dwFlagsfielsof the _Nv_Stereo_Image_Headerstructure above
#define SIH_SWAP_EYES 0x00000001
#define SIH_SCALE_TO_FIT 0x00000002
			// Lock the stereo image
			D3DLOCKED_RECT lr;
			gImageSrc->LockRect(&lr,NULL,0);
			// write stereo signature in the last raw of the stereo image
			LPNVSTEREOIMAGEHEADER pSIH=
				(LPNVSTEREOIMAGEHEADER)(((unsigned char *) lr.pBits) + (lr.Pitch* gImageHeight));
			// Update the signature header values
			pSIH->dwSignature= NVSTEREO_IMAGE_SIGNATURE;
			pSIH->dwBPP= 32;
			pSIH->dwFlags= SIH_SWAP_EYES; // Src image has left on left and right on right
			pSIH->dwWidth= gImageWidth;
			pSIH->dwHeight= gImageHeight;
			// Unlock surface
			gImageSrc->UnlockRect();


			D3DVIEWPORT9 local_view_port;
			direct_3D_device->GetViewport(&local_view_port);
			RECT local_view_port_rect = {0,0,local_view_port.Width,local_view_port.Height};

			{
				direct_3D_device->Clear (0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB (0, 0, 0), 0.0f, 0);
				direct_3D_device->BeginScene ();

				IDirect3DSurface9* pDestSurface = NULL;
				direct_3D_device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pDestSurface);

				if(pDestSurface)
				{
					direct_3D_device->StretchRect(gImageSrc, &srcRect, pDestSurface, &local_view_port_rect, D3DTEXF_LINEAR);
				}
				direct_3D_device->EndScene ();
				direct_3D_device->Present (NULL, NULL, NULL, NULL);
				if(pDestSurface)
				{
					pDestSurface->Release();
				}

				if(gImageSrc)
				{
					gImageSrc->Release();
				}
			}
		}
	}
	return S_OK;
}




这篇关于带有NVidia 3D Vision的视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!

08-14 00:42