通过TCP/IP协议进行视频渲染时出现闪烁 [英] Flickering during Video Rendering through TCP/IP Protocol

查看:73
本文介绍了通过TCP/IP协议进行视频渲染时出现闪烁的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我在WPF应用程序中使用2个工作线程以及主UI线程将视频呈现到UI上.但是我相信通过TCP/IP渲染视频帧的代码会导致闪烁.如果我在渲染过程中给出了时间延迟,则闪烁是 少一些.

我也有一些不安全的代码块.请有人可以建议我...完全迷失了:)

 public void TCPIPVideoCommunication()
{
  //ServerInfo srvrinfo = new ServerInfo(Login.ipString,7);
  //套接字soc = srvrinfo.ServerConnect();
  ServerInfo srvrinfo =新的ServerInfo(Login.ipString,7,3000);
  //视频的超时时间更长
  套接字soc = srvrinfo.ConnectToserver();
  byte [] rcvdBuffer =新的byte [115200];

  //对于RGB缓冲区,将尺寸设置为230400(宽*高* 3)
  const int FRAMESIZE = 115200;
  //FrameSize [for YUV] = 1.5 * Width * Height
  int rembytes = FRAMESIZE,lstrvdbuff = 0,frame = 0,index = 0,extraBytes = 0,width = 320,height = 240,hdrlngth = 12;
  rcvdbufferCnt = 0;
  sentbufferCnt = 0;
  bool isFrameStart = false;
  //byte [] [] buffer = new byte [3] [];
  buffer [0] =新的字节[FRAMESIZE];
  buffer [1] =新的字节[FRAMESIZE];
  buffer [2] =新的字节[FRAMESIZE];
  //buffer [3] =新的字节[FRAMESIZE];
  //buffer [4] =新的字节[FRAMESIZE];
  //buffer [5] =新的字节[FRAMESIZE];
  //buffer [6] =新的字节[FRAMESIZE];
  //buffer [7] =新的字节[FRAMESIZE];
  //buffer [8] =新的字节[FRAMESIZE];
  //buffer [9] =新的字节[FRAMESIZE];

  而(已连接)
  {
    做
    {
      lstrvdbuff = soc.Receive(rcvdBuffer);
      //soc.ReceiveTimeout = 100;

      //检查标头(12个字节)以获取正确的信息,以开始接收缓冲区
      如果(rcvdBuffer [0] == 0x0f&&rcvdBuffer [1] == 0x05)
      {
        宽度= 320;
        高度= BitConverter.ToInt32(rcvdBuffer,4);
        rembytes =(宽度*高度)+((宽度*高度)/2);
        isFrameStart = true;
      }

      如果(rembytes> = lstrvdbuff)
      {
        如果(isFrameStart)
          rembytes = rembytes-lstrvdbuff + hdrlngth;
        别的
          rembytes = rembytes-lstrvdbuff;
      }
      别的
      {
        extraBytes = lstrvdbuff-rembytes;
        lstrvdbuff =兆字节;
        rembytes = 0;
      }

      如果(isFrameStart)
      {
        isFrameStart = false;
        Array.Copy(rcvdBuffer,hdrlngth,buffer [rcvdbufferCnt],index,lstrvdbuff-hdrlngth);
        索引+ =(lstrvdbuff-hdrlngth);
      }
      别的
      {
        Array.Copy(rcvdBuffer,0,buffer [rcvdbufferCnt],index,lstrvdbuff);
        索引+ = lstrvdbuff;
      }

      lstrvdbuff = 0;

    } while(rembytes> 0);
    //File.WriteAllBytes("C:\\VaSYUVImage4.yuv",buffer);

    如果(rcvdbufferCnt> = 2)
    {
      rcvdbufferCnt = 0;
    }
    别的
    {
      rcvdbufferCnt ++;
    }

    如果(rcvdbufferCnt == 0)
    {
      sentbufferCnt = 2;
    }
    别的
    {
      sentbufferCnt = rcvdbufferCnt-1;
    }

    如果(extraBytes> 0)
      Array.Copy(rcvdBuffer,rembytes,buffer [rcvdbufferCnt],0,extraBytes);
    rembytes = FRAMESIZE-extraBytes;
    index = extraBytes;
    extraBytes = 0;
    框架++;

    如果(stopComm == true)
    {
      //仅在最后关闭套接字
      soc.Close();
      休息;
    }

    如果(VidWrkrThrd == null)
    {
      VidWrkrThrd = new Thread(new ThreadStart(YUVtoRGB));
      VidWrkrThrd.IsBackground = true;
      VidWrkrThrd.Start();
    }

    如果(VidWrkrThrd.ThreadState == System.Threading.ThreadState.Stopped)
    {
      VidWrkrThrd = new Thread(new ThreadStart(YUVtoRGB));
      VidWrkrThrd.IsBackground = true;
      VidWrkrThrd.Start();
    }
    //Thread.Sleep(10);
    autrstEvnt.Set();
    //YUVtoRGB();
  }

}

不安全的公共无效YUVtoRGB()
{
  //byte [] yBuff,uBuff,vBuff;
  //yBuff =新字节[320 * 240]; //Y = W * h
  //uBuff =新字节[(320 * 240)/4]; //U = W * h/4
  //vBuff =新字节[(320 * 240)/4]; //V = W * h/4

  rgbBuffer =新字节[320 * 240 * 3]; //RGB缓冲区=宽度*高度* 3;

  一会儿(true)
  {
    autrstEvnt.WaitOne();

    byte [] yBuff,uBuff,vBuff;
    yBuff =新字节[320 * 240]; //Y = W * h
    uBuff =新字节[(320 * 240)/4]; //U = W * h/4
    vBuff =新字节[(320 * 240)/4]; //V = W * h/4

    Array.Copy(buffer [sentbufferCnt],0,yBuff,0,width * height);
    Array.Copy(buffer [sentbufferCnt],yBuff.Length,uBuff,0,width * height/4);
    Array.Copy(buffer [sentbufferCnt],yBuff.Length + uBuff.Length,vBuff,0,width * height/4);


    字节y,v
    双倍r,g,b;
    int i,j;

    固定(字节* P1 = rgbBuffer)//因为rcvdbufferCnt已经增加
    {
      字节* P2 = P1;

      对于(i = 0; i< height; i ++)
      {

        对于(j = 0; j< width; j ++)
        {
          y = yBuff [i * width + j];
          u = uBuff [(i/2)*(width/2)+(j/2)];
          v = vBuff [(i/2)*(width/2)+(j/2)];

          r = y-16 +(1.7790 *(u-128));
          g = y-16-(0.3455 *(u-128))-(0.7169 *(v-128));
          b = y-16 +(1.4075 *(v-128));

          //r =(1.164 *(y-16))+(2.018 *(u-128));
          //g =(1.164 *(y-16))-(0.813 *(v-128))-(0.391 *(u-128));
          //b =(1.164 *(y-16))+(1.596 *(v-128));

          //b = y +(1.4075 *(v-128));
          //g = y-(0.3455 *(u-128))-(0.7169 *(v-128));
          //r = y +(1.7790 *(u-128));

          如果(r> 255)r = 255;
          如果(g> 255)g = 255;
          如果(b> 255)b = 255;
          如果(r< 0)r = 0;
          如果(g< 0)g = 0;
          如果(b< 0)b = 0;

          * P2 ++ = Convert.ToByte(r);
          * P2 ++ = Convert.ToByte(g);
          * P2 ++ = Convert.ToByte(b);
        }
      }
    }

    //引发事件
    如果(PacketReceived!= null)
    {
      PacketReceived(rgbBuffer);
    }
  }
}

公共结构BITMAPINFOHEADER
{
  公共uint biSize;
  public int biWidth;
  public int biHeight;
  公共超短双翼飞机;
  公共ushort biBitCount;
  公共uint biCompression;
  公共uint biSizeImage;
  public int biXPelsPerMeter;
  public int biYPelsPerMeter;
  公共uint biClrUsed;
  公共单位重要
}

公共无效OnPacketReceived(byte [] rgbBuffer)
{
  BITMAPINFOHEADER bitmapInfo;
  bitmapInfo.biHeight = 240;
  //bitmapInfo.biHeight = 258;
  bitmapInfo.biWidth = 320;
  //bitmapInfo.biWidth = 344;
  //bitmapInfo.biSize = 40;
  //bitmapInfo.biPlanes = 1;
  //bitmapInfo.biBitCount = 32;
  //bitmapInfo.biCompression = 0;
  bitmapInfo.biSizeImage = 320 * 240 * 3;
  ////bitmapInfo.biSizeImage = 344 * 258 * 3;
  //bitmapInfo.biXPelsPerMeter = 96;
  //bitmapInfo.biYPelsPerMeter = 96;
  //bitmapInfo.biClrUsed = 0;
  //bitmapInfo.biClrImportant = 0;

  IntPtr ptr;
  //将Intptr放入rgbBuffer
  不安全的
  {
    固定(字节* p = rgbBuffer)
    {
      ptr =新的IntPtr((void *)p);
    }
  }
  //我们将ptr处的数据复制到托管数组
  //int arrSize =(int)(frameSize * Marshal.SizeOf(Type.GetType("System.Byte"))))/Marshal.SizeOf(Type.GetType("System.Int32")));
  //Int32 [] ptrArray = new Int32 [arrSize];
  //Int32 [] ptrArray = new Int32 [230400];
  //Int32 [] ptrArray = new Int32 [266256];
  Int32 [] ptrArray = new Int32 [bitmapInfo.biSizeImage];
  Marshal.Copy(ptr,ptrArray,0,ptrArray.Length);

  //不安全的代码.
  不安全的
  {
    //将指针固定到数组.
    固定(int * pArray = ptrArray)
    {
      //pArray现在具有指向数组的指针.你可以得到一个IntPtr
      //通过将其转换为void并将其传递进来.
      IntPtr copyPtr =新的IntPtr((void *)pArray);

      位图图像=新位图(bitmapInfo.biWidth,Math.Abs​​(bitmapInfo.biHeight),(bitmapInfo.biWidth * 3),
      System.Drawing.Imaging.PixelFormat.Format24bppRgb,copyPtr);

      //image.Save("C:\\VaSTestImage.bmp);

      //从位图获取BitmapSource
      BitmapSource bmpsrc = BitmaptoBitmapSource(image);

      DrawingVisual drawingVisual = new DrawingVisual();
      DrawingContext drawingContext = drawingVisual.RenderOpen();
      Rect rect =新的Rect(0,0,320,240);
      drawingContext.DrawImage(bmpsrc,rect);
      RenderImage(bmpsrc);
      刷新(VideoCanvas);

      //应基于反复试验设置此参数,以避免在用户界面中闪烁
      //Thread.Sleep(150);

      //由于缓冲实现,该参数为零
      //Thread.Sleep(0);

      drawingContext.Close();
      drawingVisual = null;
    }
  }
}

[DllImport("gdi32.dll"))
私有静态外部布尔DeleteObject(IntPtr hObject);

私有BitmapSource BitmaptoBitmapSource(位图位图)
{
  如果(位图==空)
    抛出新的ArgumentNullException("bitmap");


  IntPtr hBitmap = bitmap.GetHbitmap();
  尝试
  {
    System.Windows.Media.Imaging.BitmapSource bitmapSource = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
    hBitmap,
    IntPtr.Zero,
    Int32Rect.Empty,
    System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());
    bitmapSource.Freeze();
    返回bitmapSource;
  }

  最后
  {
    DeleteObject(hBitmap);
  }

}

私有委托void RenderImageEventsHandler(BitmapSource image);

私有void RenderImage(BitmapSource bmpsrc)
{
  如果(this.video.Dispatcher.CheckAccess())
  {
    this.video.Source = bmpsrc;
  }
  别的
  {
    this.video.Dispatcher.BeginInvoke(new RenderImageEventsHandler(RenderImage),new object [] {bmpsrc});
  }
}

私有委托无效NoArgDelegate();

公共静态无效刷新(UIElement obj)
{
  obj.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal,
  (NoArgDelegate)代表{});
} 


 

解决方案

您好,DotNet_Work,

由于WPF利用了保留模式图形系统,因此我们不使用需要自己处理渲染.当两个渲染都用完时,可能会发生此问题 同步.

您可以使用 在WPF中具有出色性能的WriteableBitmap 类.WriteableBitmap具有两个缓冲区.后台缓冲区分配在系统内存中,并累积当前未显示的内容.前缓冲区在系统中分配 内存,并包含当前显示的内容.渲染系统将前缓冲区复制到视频存储器中以进行显示.这种双重缓冲可以提高性能.

为提高实现效率,另一个选择是CompositionTarget类.在哪个渲染事件中,您可以按帧渲染内容.这通常是 用于复杂的动画.有关更多信息,请参考 http://msdn.microsoft.com/en-us/library/system.windows.media .compositiontarget.aspx

希望此信息对您有所帮助!如果您仍有任何疑问,请随时告诉我.

最诚挚的问候

 


Hi,

I am using 2 worker threads along with main UI thread in my WPF application to render a Video onto UI. But i beleive the code which renders the Video frames through TCP/IP results in Flickering. If i give a time delay during rendering the flicker is somewhat less.

Also i have some unsafe code blocks. Please can someone suggest me ... Am completely lost :)

public void TCPIPVideoCommunication()
{
  //ServerInfo srvrinfo = new ServerInfo(Login.ipString, 7);
  //Socket soc = srvrinfo.ServerConnect();
  ServerInfo srvrinfo = new ServerInfo(Login.ipString, 7, 3000);
  //Longer timeout period for Video
  Socket soc = srvrinfo.ConnectToserver();
  byte[] rcvdBuffer = new byte[115200];

  //For RGB buffer make the size to 230400 (width * height * 3)
  const int FRAMESIZE = 115200;
  // FrameSize [for YUV] = 1.5 * Width * Height
  int rembytes = FRAMESIZE, lstrvdbuff = 0, frame = 0, index = 0, extraBytes = 0, width = 320, height = 240, hdrlngth = 12;
  rcvdbufferCnt = 0;
  sentbufferCnt = 0;
  bool isFrameStart = false;
  //byte[][] buffer = new byte[3][];
  buffer[0] = new byte[FRAMESIZE];
  buffer[1] = new byte[FRAMESIZE];
  buffer[2] = new byte[FRAMESIZE];
  //buffer[3] = new byte[FRAMESIZE];
  //buffer[4] = new byte[FRAMESIZE];
  //buffer[5] = new byte[FRAMESIZE];
  //buffer[6] = new byte[FRAMESIZE];
  //buffer[7] = new byte[FRAMESIZE];
  //buffer[8] = new byte[FRAMESIZE];
  //buffer[9] = new byte[FRAMESIZE];

  while (soc.Connected)
  {
    do
    {
      lstrvdbuff = soc.Receive(rcvdBuffer);
      //soc.ReceiveTimeout = 100;

      //Checking the header(12 bytes) for correct info to start recievning the buffer
      if (rcvdBuffer[0] == 0x0f && rcvdBuffer[1] == 0x05)
      {
        width = 320;
        height = BitConverter.ToInt32(rcvdBuffer, 4);
        rembytes = (width * height) + ((width * height) / 2);
        isFrameStart = true;
      }

      if (rembytes >= lstrvdbuff)
      {
        if (isFrameStart)
          rembytes = rembytes - lstrvdbuff + hdrlngth;
        else
          rembytes = rembytes - lstrvdbuff;
      }
      else
      {
        extraBytes = lstrvdbuff - rembytes;
        lstrvdbuff = rembytes;
        rembytes = 0;
      }

      if (isFrameStart)
      {
        isFrameStart = false;
        Array.Copy(rcvdBuffer, hdrlngth, buffer[rcvdbufferCnt], index, lstrvdbuff - hdrlngth);
        index += (lstrvdbuff - hdrlngth);
      }
      else
      {
        Array.Copy(rcvdBuffer, 0, buffer[rcvdbufferCnt], index, lstrvdbuff);
        index += lstrvdbuff;
      }

      lstrvdbuff = 0;

    } while (rembytes > 0);
    //File.WriteAllBytes("C:\\VaSYUVImage4.yuv",buffer);

    if (rcvdbufferCnt >= 2)
    {
      rcvdbufferCnt = 0;
    }
    else
    {
      rcvdbufferCnt++;
    }

    if (rcvdbufferCnt == 0)
    {
      sentbufferCnt = 2;
    }
    else
    {
      sentbufferCnt = rcvdbufferCnt - 1;
    }

    if (extraBytes > 0)
      Array.Copy(rcvdBuffer, rembytes, buffer[rcvdbufferCnt], 0, extraBytes);
    rembytes = FRAMESIZE - extraBytes;
    index = extraBytes;
    extraBytes = 0;
    frame++;

    if (stopComm == true)
    {
      //Close the socket only at the end
      soc.Close();
      break;
    }

    if (VidWrkrThrd == null)
    {
      VidWrkrThrd = new Thread(new ThreadStart(YUVtoRGB));
      VidWrkrThrd.IsBackground = true;
      VidWrkrThrd.Start();
    }

    if (VidWrkrThrd.ThreadState == System.Threading.ThreadState.Stopped)
    {
      VidWrkrThrd = new Thread(new ThreadStart(YUVtoRGB));
      VidWrkrThrd.IsBackground = true;
      VidWrkrThrd.Start();
    }
    //Thread.Sleep(10);
    autrstEvnt.Set();
    //YUVtoRGB();
  }

}

unsafe public void YUVtoRGB()
{
  //byte[] yBuff, uBuff, vBuff;
  //yBuff = new byte[320 * 240]; //Y = W*h
  //uBuff = new byte[(320 * 240) / 4]; //U = W*h / 4
  //vBuff = new byte[(320 * 240) / 4]; //V = W*h / 4

  rgbBuffer = new byte[320 * 240 * 3]; //RGB Buffer = Width * Height *3;

  while (true)
  {
    autrstEvnt.WaitOne();

    byte[] yBuff, uBuff, vBuff;
    yBuff = new byte[320 * 240]; //Y = W*h
    uBuff = new byte[(320 * 240) / 4]; //U = W*h / 4
    vBuff = new byte[(320 * 240) / 4]; //V = W*h / 4

    Array.Copy(buffer[sentbufferCnt], 0, yBuff, 0, width * height);
    Array.Copy(buffer[sentbufferCnt], yBuff.Length, uBuff, 0, width * height / 4);
    Array.Copy(buffer[sentbufferCnt], yBuff.Length + uBuff.Length, vBuff, 0, width * height / 4);


    byte y, u, v;
    double r, g, b;
    int i, j;

    fixed (byte* P1 = rgbBuffer) //coz rcvdbufferCnt would have been incremented already
    {
      byte* P2 = P1;

      for (i = 0; i < height; i++)
      {

        for (j = 0; j < width; j++)
        {
          y = yBuff[i * width + j];
          u = uBuff[(i / 2) * (width / 2) + (j / 2)];
          v = vBuff[(i / 2) * (width / 2) + (j / 2)];

          r = y - 16 + (1.7790 * (u - 128));
          g = y - 16 - (0.3455 * (u - 128)) - (0.7169 * (v - 128));
          b = y - 16 + (1.4075 * (v - 128));

          //r = (1.164*(y - 16)) + (2.018*(u - 128));
          //g = (1.164*(y - 16)) - (0.813*(v - 128)) - (0.391*(u - 128));
          //b = (1.164*(y - 16)) + (1.596*(v - 128));

          //b = y + (1.4075 * (v - 128));
          //g = y - (0.3455 * (u - 128)) - (0.7169 * (v - 128));
          //r = y + (1.7790 * (u - 128));

          if (r > 255) r = 255;
          if (g > 255) g = 255;
          if (b > 255) b = 255;
          if (r < 0) r = 0;
          if (g < 0) g = 0;
          if (b < 0) b = 0;

          *P2++ = Convert.ToByte(r);
          *P2++ = Convert.ToByte(g);
          *P2++ = Convert.ToByte(b);
        }
      }
    }

    //Raising the event
    if (PacketReceived != null)
    {
      PacketReceived(rgbBuffer);
    }
  }
}

public struct BITMAPINFOHEADER
{
  public uint biSize;
  public int biWidth;
  public int biHeight;
  public ushort biPlanes;
  public ushort biBitCount;
  public uint biCompression;
  public uint biSizeImage;
  public int biXPelsPerMeter;
  public int biYPelsPerMeter;
  public uint biClrUsed;
  public uint biClrImportant;
}

public void OnPacketReceived(byte[] rgbBuffer)
{
  BITMAPINFOHEADER bitmapInfo;
  bitmapInfo.biHeight = 240;
  //bitmapInfo.biHeight = 258;
  bitmapInfo.biWidth = 320;
  //bitmapInfo.biWidth = 344;
  //bitmapInfo.biSize = 40;
  //bitmapInfo.biPlanes = 1;
  //bitmapInfo.biBitCount = 32;
  //bitmapInfo.biCompression = 0;
  bitmapInfo.biSizeImage = 320 * 240 * 3;
  ////bitmapInfo.biSizeImage = 344 * 258 * 3;
  //bitmapInfo.biXPelsPerMeter = 96;
  //bitmapInfo.biYPelsPerMeter = 96;
  //bitmapInfo.biClrUsed = 0;
  //bitmapInfo.biClrImportant = 0;

  IntPtr ptr;
  //Get the Intptr to the rgbBuffer
  unsafe
  {
    fixed (byte* p = rgbBuffer)
    {
      ptr = new IntPtr((void*)p);
    }
  }
  // We copy the data at ptr to a managed array
  //int arrSize = (int)(frameSize * Marshal.SizeOf(Type.GetType("System.Byte"))) / Marshal.SizeOf(Type.GetType("System.Int32"));
  //Int32[] ptrArray = new Int32[arrSize];
  //Int32[] ptrArray = new Int32[230400];
  //Int32[] ptrArray = new Int32[266256];
  Int32[] ptrArray = new Int32[bitmapInfo.biSizeImage];
  Marshal.Copy(ptr, ptrArray, 0, ptrArray.Length);

  // Unsafe code.
  unsafe
  {
    // Fix the pointer to the array.
    fixed (int* pArray = ptrArray)
    {
      // pArray now has the pointer to the array. You can get an IntPtr
      //by casting to void, and passing that in.
      IntPtr copyPtr = new IntPtr((void*)pArray);

      Bitmap image = new Bitmap(bitmapInfo.biWidth, Math.Abs(bitmapInfo.biHeight), (bitmapInfo.biWidth * 3),
      System.Drawing.Imaging.PixelFormat.Format24bppRgb, copyPtr);

      //image.Save("C:\\VaSTestImage.bmp");

      //Getting a BitmapSource from Bitmap
      BitmapSource bmpsrc = BitmaptoBitmapSource(image);

      DrawingVisual drawingVisual = new DrawingVisual();
      DrawingContext drawingContext = drawingVisual.RenderOpen();
      Rect rect = new Rect(0, 0, 320, 240);
      drawingContext.DrawImage(bmpsrc, rect);
      RenderImage(bmpsrc);
      Refresh(VideoCanvas);

      //This should be set based on trial and error to avoid flickering in UI
      //Thread.Sleep(150);

      //This is made zero because of Buffering implementation
      //Thread.Sleep(0);

      drawingContext.Close();
      drawingVisual = null;
    }
  }
}

[DllImport("gdi32.dll")]
private static extern bool DeleteObject(IntPtr hObject);

private BitmapSource BitmaptoBitmapSource(Bitmap bitmap)
{
  if (bitmap == null)
    throw new ArgumentNullException("bitmap");


  IntPtr hBitmap = bitmap.GetHbitmap();
  try
  {
    System.Windows.Media.Imaging.BitmapSource bitmapSource = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
    hBitmap,
    IntPtr.Zero,
    Int32Rect.Empty,
    System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());
    bitmapSource.Freeze();
    return bitmapSource;
  }

  finally
  {
    DeleteObject(hBitmap);
  }

}

private delegate void RenderImageEventsHandler(BitmapSource image);

private void RenderImage(BitmapSource bmpsrc)
{
  if (this.video.Dispatcher.CheckAccess())
  {
    this.video.Source = bmpsrc;
  }
  else
  {
    this.video.Dispatcher.BeginInvoke(new RenderImageEventsHandler(RenderImage), new object[] { bmpsrc });
  }
}

private delegate void NoArgDelegate();

public static void Refresh(UIElement obj)
{
  obj.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal,
  (NoArgDelegate)delegate { });
}


 

解决方案

Hi DotNet_Work,

Since WPF takes advantage of retained-mode graphics system so we don't need to handle the rendering ourselves. The problem may happen when the two renderings are out of sync.

You may use WriteableBitmap class which has great performance in WPF. WriteableBitmap has two buffers. The back buffer is allocated in system memory and accumulates content that is not currently displayed. The front buffer is allocated in system memory and contains the content that is currently displayed. The rendering system copies the front buffer to video memory for display. This double-buffering improves the performance.

To enhance the efficiency of your implementation another option is CompositionTarget Class. In which Rendering event you may get your content rendered per frame. This is generally used in complex animations.  For more information please refer to http://msdn.microsoft.com/en-us/library/system.windows.media.compositiontarget.aspx

Hope this information is helpful for you! If you still have any questions please feel free to let me know.

Best regards

 


这篇关于通过TCP/IP协议进行视频渲染时出现闪烁的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆