Unity:实时视频流 [英] Unity: Live Video Streaming

查看:2380
本文介绍了Unity:实时视频流的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在尝试将实时视频从一个应用程序流传输到另一个应用程序,目前我有2个应用程序.应用1是服务器/发送者,应用2是客户端/接收者.在应用程序1中,我成功将视频字节发送到客户端.在客户端,我也收到了所有字节.我正在使用套接字和TCP.我面临的问题是,当我收到视频字节并将其分配给原始图像"纹理时,该纹理上的图像看起来放大得太多,因此像素化了.

更新后的图片

这就是我流

这就是我在客户端上得到的.

这是第一个问题,但是我目前正在从台式机到另一个进行测试,我的目标是将IPAD流式传输到台式机,当我这样做时,它是如此之慢,并且会杀死两个iPad上的应用程序和台式机.

到目前为止我尝试过的一些故障排除方法.

1:我认为这是因为我有2种不同的分辨率,因为我是从ipad流到台式机的原因,

2:纹理图像太大,我将其输出并返回630.我尝试使用Unity Texture2D.resize调整其大小,但由于该函数将像素设置为未识别,因此我得到了灰色纹理

3:我使用了其他库来调整纹理的大小,但确实得到了我想要的内容,但是在12帧后,原始图像开始在视频和?"之间闪烁.纹理太多,然后冻结在应用程序(ipad和台式机)上

4:我相信我读取纹理的方式引起了问题,因为我同时使用了Setpixels和Getpixels函数,而且它们很重.

我的代码: 服务器/发送方:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
WebCamTexture webCam;
public RawImage myImage;
Texture2D currentTexture;

private TcpListener listner;
private const int port = 8010;
private bool stop = false;

private List<TcpClient> clients = new List<TcpClient>();

private void Start()
{
    // Open the Camera on the desired device, in my case IPAD pro
    webCam = new WebCamTexture();
    // Get all devices , front and back camera
    webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

    // request the lowest width and heigh possible
    webCam.requestedHeight = 10;
    webCam.requestedWidth = 10;


    webCam.Play();

    /
    currentTexture = new Texture2D(webCam.width, webCam.height);

    // Connect to the server
    listner = new TcpListener(port);

    listner.Start();

    // Create Seperate thread for requesting from client 
    Loom.RunAsync(() => {

        while (!stop)
        {
            // Wait for client approval
            var client = listner.AcceptTcpClient();
            // We are connected
            clients.Add(client);


            Loom.RunAsync(() =>
            {
                while (!stop)
                {

                    var stremReader = client.GetStream();

                    if (stremReader.CanRead)
                    {
                        // we need storage for data
                        using (var messageData = new MemoryStream())
                        {
                            Byte[] buffer = new Byte[client.ReceiveBufferSize];


                            while (stremReader.DataAvailable)
                            {
                                int bytesRead = stremReader.Read(buffer, 0, buffer.Length);

                                if (bytesRead == 0)
                                    break;

                                // Writes to the data storage
                                messageData.Write(buffer, 0, bytesRead);

                            }

                            if (messageData.Length > 0)
                            {
                                // send pngImage
                                SendPng(client);

                            }

                        }
                    }
                }
            });
        }

    });



}

private void Update()
{
    myImage.texture = webCam;
}


// Read video pixels and send them to the client
private void SendPng (TcpClient client)
{
    Loom.QueueOnMainThread(() =>
    {
        // Get the webcame texture pixels   
        currentTexture.SetPixels(webCam.GetPixels());
        var pngBytes = currentTexture.EncodeToPNG();


        // Want to Write 
        var stream = client.GetStream();

        // Write the image bytes
        stream.Write(pngBytes, 0, pngBytes.Length);

        // send it 
        stream.Flush();

    });
}

// stop everything
private void OnApplicationQuit()
{
    webCam.Stop();
    stop = true;
    listner.Stop();

    foreach (TcpClient c in clients)
        c.Close();
}



}

客户端/接收方

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets; 
using System.Net;
using System.IO;

public class reciver : MonoBehaviour
{

public RawImage image;

const int port = 8010;

public string IP = "";

TcpClient client;


Texture2D tex;

// Use this for initialization
void Start()
{

    client = new TcpClient();

    // connect to server

    Loom.RunAsync(() => {
        Debug.LogWarning("Connecting to server...");
        // if on desktop
        client.Connect(IPAddress.Loopback, port);

        // if using the IPAD
        //client.Connect(IPAddress.Parse(IP), port);
        Debug.LogWarning("Connected!");




    });

}

float lastTimeRequestedTex = 0;
// Update is called once per frame
void Update()
{

    //if (Time.time - lastTimeRequestedTex < 0.1f)
    //  return;

    lastTimeRequestedTex = Time.time;


    if (!client.Connected)
        return;


    // Send 1 byte to server 
    var serverStream = client.GetStream();

    // request the texture from the server 
    if (serverStream.CanWrite)
    {   
        // Texture request
        // send request
        serverStream.WriteByte(byte.MaxValue);
        serverStream.Flush();
        Debug.Log("Succesfully send 1 byte");
    }


    if (serverStream.CanRead)
    {

        // Read the bytes 
        using (var writer = new MemoryStream())
        {
            var readBuffer = new byte[client.ReceiveBufferSize];


            while (serverStream.DataAvailable)
            {

                int numberOfBytesRead = serverStream.Read(readBuffer, 0, readBuffer.Length);
                if (numberOfBytesRead <= 0)
                {
                    break;
                }

                writer.Write(readBuffer, 0, numberOfBytesRead);


            }

            if (writer.Length > 0)
            {
                // got whole data in writer
                // Get the bytes and apply them to the texture
                var tex = new Texture2D(0, 0);
                tex.LoadImage(writer.ToArray());
                Debug.Log(tex.width + tex.height);
                image.texture = tex;


            }   
        }
    }
}

void OnApplicationQuit()
{
    Debug.LogWarning("OnApplicationQuit");
    client.Close();
}
}

解决方案

我运行了您的代码,它有时有效,有时却失败了(大约90%的时间).它以 5 FPS 在我的计算机上运行.在您确定以iPad为定位目标的移动设备上,此功能无法正常播放.

您的代码中几乎没有问题,但是它们是非常严重的问题.


1.您的图像在加载之前尚未完全收到.

这就是为什么您的图像看起来如此怪异的原因.

人们在使用套接字时犯下的最大错误是假设您发送的所有内容都会被立即接收.这不是真的.这就是客户编码的方式.请阅读.

这是我在回答中使用的方法:

A .获取Texture2D个字节数组.

B .发送字节数组长度.不是字节数组,而是长度.

C .客户端将首先读取该长度.

D .客户端将使用该长度读取整个纹理数据/像素,直到完成.

E .将接收的字节转换为数组.

您可以查看private int readImageByteSize(int size)private void readFrameByteArray(int size)函数以了解如何读取所有字节.

当然,您还必须知道最先发送的数据长度的长度.该长度保存为int数据类型.

最大int值是2,147,483,647,并且是10位数字长.因此,我将首先发送的数组的数组长度设为15作为协议.这也是客户端也必须遵守的规则.

现在如何工作:

Texture2D读取字节数组,读取该数组的长度,并将其发送给客户端.客户端遵循一个规则,即前15个字节就是长度.然后,客户端将读取该15个字节,将其转换回 length ,然后在循环中使用该 length 从服务器读取完整的Texture2D.

使用void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)int frameByteArrayToByteLength(byte[] frameBytesLength)函数完成长度转换.看看那些了解它们的人.


2.在主线程中执行套接字操作.

这就是为什么我的计算机上 FPS 5的原因.

请勿执行此操作,因为这会使您的帧速率降低,就像已经发生的那样.我已经回答了许多这样的问题,但是不会深入,因为看起来您知道自己在做什么,并尝试使用Thread,但是做错了.

A .当您执行以下操作时,您正在从主Thread中读取内容:Update函数中的serverStream.Read(readBuffer, 0, readBuffer.Length);.

您应该在

内部完成此操作

Loom.RunAsync(() =>
{ //your red code });

B .使用

中的stream.Write(pngBytes, 0, pngBytes.Length);发送数据时,您在SendPng函数中犯了同样的错误.

Loom.QueueOnMainThread(() =>
{});

Loom.QueueOnMainThread内部的所有操作都将在主Thread中完成.

您应该在另一个Thread.Loom.RunAsync(() =>{});

中进行发送

最后,listner = new TcpListener(port);是绝对的.这不会造成任何问题,但是在服务器代码中使用listner = new TcpListener(IPAddress.Any, port);应该可以监听IP.

完成所有这些修复后,我的计算机上的最终 FPS 已超过50.下面的代码可以进行很多改进.我会留给你做.

您可以使用在线代码比较来查看每个类中发生的更改.

服务器:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
    WebCamTexture webCam;
    public RawImage myImage;
    public bool enableLog = false;

    Texture2D currentTexture;

    private TcpListener listner;
    private const int port = 8010;
    private bool stop = false;

    private List<TcpClient> clients = new List<TcpClient>();

    //This must be the-same with SEND_COUNT on the client
    const int SEND_RECEIVE_COUNT = 15;

    private void Start()
    {
        Application.runInBackground = true;

        //Start WebCam coroutine
        StartCoroutine(initAndWaitForWebCamTexture());
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }

    IEnumerator initAndWaitForWebCamTexture()
    {
        // Open the Camera on the desired device, in my case IPAD pro
        webCam = new WebCamTexture();
        // Get all devices , front and back camera
        webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

        // request the lowest width and heigh possible
        webCam.requestedHeight = 10;
        webCam.requestedWidth = 10;

        myImage.texture = webCam;

        webCam.Play();

        currentTexture = new Texture2D(webCam.width, webCam.height);

        // Connect to the server
        listner = new TcpListener(IPAddress.Any, port);

        listner.Start();

        while (webCam.width < 100)
        {
            yield return null;
        }

        //Start sending coroutine
        StartCoroutine(senderCOR());
    }

    WaitForEndOfFrame endOfFrame = new WaitForEndOfFrame();
    IEnumerator senderCOR()
    {

        bool isConnected = false;
        TcpClient client = null;
        NetworkStream stream = null;

        // Wait for client to connect in another Thread 
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                // Wait for client connection
                client = listner.AcceptTcpClient();
                // We are connected
                clients.Add(client);

                isConnected = true;
                stream = client.GetStream();
            }
        });

        //Wait until client has connected
        while (!isConnected)
        {
            yield return null;
        }

        LOG("Connected!");

        bool readyToGetFrame = true;

        byte[] frameBytesLength = new byte[SEND_RECEIVE_COUNT];

        while (!stop)
        {
            //Wait for End of frame
            yield return endOfFrame;

            currentTexture.SetPixels(webCam.GetPixels());
            byte[] pngBytes = currentTexture.EncodeToPNG();
            //Fill total byte length to send. Result is stored in frameBytesLength
            byteLengthToFrameByteArray(pngBytes.Length, frameBytesLength);

            //Set readyToGetFrame false
            readyToGetFrame = false;

            Loom.RunAsync(() =>
            {
                //Send total byte count first
                stream.Write(frameBytesLength, 0, frameBytesLength.Length);
                LOG("Sent Image byte Length: " + frameBytesLength.Length);

                //Send the image bytes
                stream.Write(pngBytes, 0, pngBytes.Length);
                LOG("Sending Image byte array data : " + pngBytes.Length);

                //Sent. Set readyToGetFrame true
                readyToGetFrame = true;
            });

            //Wait until we are ready to get new frame(Until we are done sending data)
            while (!readyToGetFrame)
            {
                LOG("Waiting To get new frame");
                yield return null;
            }
        }
    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    private void Update()
    {
        myImage.texture = webCam;
    }

    // stop everything
    private void OnApplicationQuit()
    {
        if (webCam != null && webCam.isPlaying)
        {
            webCam.Stop();
            stop = true;
        }

        if (listner != null)
        {
            listner.Stop();
        }

        foreach (TcpClient c in clients)
            c.Close();
    }
}

客户:

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets;
using System.Net;
using System.IO;
using System;

public class reciver : MonoBehaviour
{
    public RawImage image;
    public bool enableLog = false;

    const int port = 8010;
    public string IP = "192.168.1.165";
    TcpClient client;

    Texture2D tex;

    private bool stop = false;

    //This must be the-same with SEND_COUNT on the server
    const int SEND_RECEIVE_COUNT = 15;

    // Use this for initialization
    void Start()
    {
        Application.runInBackground = true;

        tex = new Texture2D(0, 0);
        client = new TcpClient();

        //Connect to server from another Thread
        Loom.RunAsync(() =>
        {
            LOGWARNING("Connecting to server...");
            // if on desktop
            client.Connect(IPAddress.Loopback, port);

            // if using the IPAD
            //client.Connect(IPAddress.Parse(IP), port);
            LOGWARNING("Connected!");

            imageReceiver();
        });
    }


    void imageReceiver()
    {
        //While loop in another Thread is fine so we don't block main Unity Thread
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                //Read Image Count
                int imageSize = readImageByteSize(SEND_RECEIVE_COUNT);
                LOGWARNING("Received Image byte Length: " + imageSize);

                //Read Image Bytes and Display it
                readFrameByteArray(imageSize);
            }
        });
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }


    /////////////////////////////////////////////////////Read Image SIZE from Server///////////////////////////////////////////////////
    private int readImageByteSize(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytesCount = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytesCount, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        int byteLength;

        if (disconnected)
        {
            byteLength = -1;
        }
        else
        {
            byteLength = frameByteArrayToByteLength(imageBytesCount);
        }
        return byteLength;
    }

    /////////////////////////////////////////////////////Read Image Data Byte Array from Server///////////////////////////////////////////////////
    private void readFrameByteArray(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytes = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytes, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        bool readyToReadAgain = false;

        //Display Image
        if (!disconnected)
        {
            //Display Image on the main Thread
            Loom.QueueOnMainThread(() =>
            {
                displayReceivedImage(imageBytes);
                readyToReadAgain = true;
            });
        }

        //Wait until old Image is displayed
        while (!readyToReadAgain)
        {
            System.Threading.Thread.Sleep(1);
        }
    }


    void displayReceivedImage(byte[] receivedImageBytes)
    {
        tex.LoadImage(receivedImageBytes);
        image.texture = tex;
    }


    // Update is called once per frame
    void Update()
    {


    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    void LOGWARNING(string messsage)
    {
        if (enableLog)
            Debug.LogWarning(messsage);
    }

    void OnApplicationQuit()
    {
        LOGWARNING("OnApplicationQuit");
        stop = true;

        if (client != null)
        {
            client.Close();
        }
    }
}

I'm trying to stream a live video from one app to the another, Currently i have 2 apps. were app 1 is the server / sender and app 2 is the client / receiver. In app 1 i successfully send the video bytes to the client. and on the client side I'm also receiving all of the bytes. Im using sockets and TCP. The issue that I'm facing is, When i receive the video bytes and assigning them to a Raw Image texture, the image on the texture looks zoomed in too much and it's so pixilated.

Updated Image

This is what i stream

and this is what i get on the client.

This is the 1st issue, however I'm currently testing from desktop to the another, my goal is to stream form an IPAD to a desktop, and when i do that it's so slow and it kills the app on both the ipad and desktop.

Some troubleshooting i tried so far.

1: I think this is is happening because i have 2 different resolutions because i stream from ipad to Desktop

2: The texture image is too large, i output it and it returns 630. I tried to resize it using Unity Texture2D.resize but i get a gray texture because the function sets the pixels as unidentified

3: I used other libraries for resizing textures and i do get what i want, but after 12 frames the rawimage starts flickering between the video and "?" texture so much then it freezes on both app (ipad and desktop)

4: i believe the way I'm reading the texture is causing the issue because i use both Setpixels and Getpixels functions and they are heavy.

My Code: Server / Sender Side:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
WebCamTexture webCam;
public RawImage myImage;
Texture2D currentTexture;

private TcpListener listner;
private const int port = 8010;
private bool stop = false;

private List<TcpClient> clients = new List<TcpClient>();

private void Start()
{
    // Open the Camera on the desired device, in my case IPAD pro
    webCam = new WebCamTexture();
    // Get all devices , front and back camera
    webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

    // request the lowest width and heigh possible
    webCam.requestedHeight = 10;
    webCam.requestedWidth = 10;


    webCam.Play();

    /
    currentTexture = new Texture2D(webCam.width, webCam.height);

    // Connect to the server
    listner = new TcpListener(port);

    listner.Start();

    // Create Seperate thread for requesting from client 
    Loom.RunAsync(() => {

        while (!stop)
        {
            // Wait for client approval
            var client = listner.AcceptTcpClient();
            // We are connected
            clients.Add(client);


            Loom.RunAsync(() =>
            {
                while (!stop)
                {

                    var stremReader = client.GetStream();

                    if (stremReader.CanRead)
                    {
                        // we need storage for data
                        using (var messageData = new MemoryStream())
                        {
                            Byte[] buffer = new Byte[client.ReceiveBufferSize];


                            while (stremReader.DataAvailable)
                            {
                                int bytesRead = stremReader.Read(buffer, 0, buffer.Length);

                                if (bytesRead == 0)
                                    break;

                                // Writes to the data storage
                                messageData.Write(buffer, 0, bytesRead);

                            }

                            if (messageData.Length > 0)
                            {
                                // send pngImage
                                SendPng(client);

                            }

                        }
                    }
                }
            });
        }

    });



}

private void Update()
{
    myImage.texture = webCam;
}


// Read video pixels and send them to the client
private void SendPng (TcpClient client)
{
    Loom.QueueOnMainThread(() =>
    {
        // Get the webcame texture pixels   
        currentTexture.SetPixels(webCam.GetPixels());
        var pngBytes = currentTexture.EncodeToPNG();


        // Want to Write 
        var stream = client.GetStream();

        // Write the image bytes
        stream.Write(pngBytes, 0, pngBytes.Length);

        // send it 
        stream.Flush();

    });
}

// stop everything
private void OnApplicationQuit()
{
    webCam.Stop();
    stop = true;
    listner.Stop();

    foreach (TcpClient c in clients)
        c.Close();
}



}

Client / receiver side

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets; 
using System.Net;
using System.IO;

public class reciver : MonoBehaviour
{

public RawImage image;

const int port = 8010;

public string IP = "";

TcpClient client;


Texture2D tex;

// Use this for initialization
void Start()
{

    client = new TcpClient();

    // connect to server

    Loom.RunAsync(() => {
        Debug.LogWarning("Connecting to server...");
        // if on desktop
        client.Connect(IPAddress.Loopback, port);

        // if using the IPAD
        //client.Connect(IPAddress.Parse(IP), port);
        Debug.LogWarning("Connected!");




    });

}

float lastTimeRequestedTex = 0;
// Update is called once per frame
void Update()
{

    //if (Time.time - lastTimeRequestedTex < 0.1f)
    //  return;

    lastTimeRequestedTex = Time.time;


    if (!client.Connected)
        return;


    // Send 1 byte to server 
    var serverStream = client.GetStream();

    // request the texture from the server 
    if (serverStream.CanWrite)
    {   
        // Texture request
        // send request
        serverStream.WriteByte(byte.MaxValue);
        serverStream.Flush();
        Debug.Log("Succesfully send 1 byte");
    }


    if (serverStream.CanRead)
    {

        // Read the bytes 
        using (var writer = new MemoryStream())
        {
            var readBuffer = new byte[client.ReceiveBufferSize];


            while (serverStream.DataAvailable)
            {

                int numberOfBytesRead = serverStream.Read(readBuffer, 0, readBuffer.Length);
                if (numberOfBytesRead <= 0)
                {
                    break;
                }

                writer.Write(readBuffer, 0, numberOfBytesRead);


            }

            if (writer.Length > 0)
            {
                // got whole data in writer
                // Get the bytes and apply them to the texture
                var tex = new Texture2D(0, 0);
                tex.LoadImage(writer.ToArray());
                Debug.Log(tex.width + tex.height);
                image.texture = tex;


            }   
        }
    }
}

void OnApplicationQuit()
{
    Debug.LogWarning("OnApplicationQuit");
    client.Close();
}
}

解决方案

I ran your code and it worked sometimes and failed sometimes(about 90% of the time). It ran with on my computer with 5 FPS. This will not play well on mobile device which I am sure you are targeting iPad.

There are few problems in your code but they are very serious problems.


1.Your image is not completely received before you load them.

This is why your image look so weird.

The biggest mistake people make when working with socket is to assume that everything you send will be received at once. This is not true. That's the way your client is coded. Please read this.

This is the method I used in my answer:

A.Get Texture2D byte array.

B.Send the byte array length. Not the byte array but the length.

C.The client will read the length first.

D.The client will use that length to read the whole texture data/pixel until completion.

E.Convert the received bytes to array.

You can look at the private int readImageByteSize(int size) and the private void readFrameByteArray(int size) functions for how to read all the bytes.

Of-course, you must also know the length of the data's length that is sent first. The length is saved in int data-type.

The max int value is 2,147,483,647 and that is 10 digit long. So, I made the array length of the array that is sent first to be 15 as a protocol. This is a rule that must be obeyed on the client side too.

This how it works now:

Read the byte array from Texture2D, read the length of that array, send it to the client. Client follows a rule that the first 15 bytes is simply the length. Client would then read that 15 bytes, convert it back into length then use that length in a loop to read complete Texture2D from the server.

The length conversion is done with the void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes) and int frameByteArrayToByteLength(byte[] frameBytesLength) functions. Take a look at those to understand them.


2.Performing socket operation in the Main Thread.

This is why the FPS is 5 on my computer.

Don't do this as this will make your frame rate to be low just like it is already. I have answered many questions like this but won't go deep because it looks like you know what you are doing and tried to use Thread but did it wrong.

A.You were reading from the main Thread when you did: serverStream.Read(readBuffer, 0, readBuffer.Length); in the Update function.

You should have done that inside

Loom.RunAsync(() =>
{ //your red code });

B. You made the-same mistake in the SendPng function, when you were sending data with the stream.Write(pngBytes, 0, pngBytes.Length); in the

Loom.QueueOnMainThread(() =>
{});

Anything you do inside Loom.QueueOnMainThread will be done in the main Thread.

You are supposed to do the sending in another Thread.Loom.RunAsync(() =>{});


Finally, listner = new TcpListener(port); is obsolute. This did not cause any problem but use listner = new TcpListener(IPAddress.Any, port); in your server code which should listen to nay IP.

The final FPS is over 50 on my computer after making all these fixes. The code below can be improved a-lot. I will leave that for you to do.

You can use online code compare to see things that changed in each class.

SERVER:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
    WebCamTexture webCam;
    public RawImage myImage;
    public bool enableLog = false;

    Texture2D currentTexture;

    private TcpListener listner;
    private const int port = 8010;
    private bool stop = false;

    private List<TcpClient> clients = new List<TcpClient>();

    //This must be the-same with SEND_COUNT on the client
    const int SEND_RECEIVE_COUNT = 15;

    private void Start()
    {
        Application.runInBackground = true;

        //Start WebCam coroutine
        StartCoroutine(initAndWaitForWebCamTexture());
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }

    IEnumerator initAndWaitForWebCamTexture()
    {
        // Open the Camera on the desired device, in my case IPAD pro
        webCam = new WebCamTexture();
        // Get all devices , front and back camera
        webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

        // request the lowest width and heigh possible
        webCam.requestedHeight = 10;
        webCam.requestedWidth = 10;

        myImage.texture = webCam;

        webCam.Play();

        currentTexture = new Texture2D(webCam.width, webCam.height);

        // Connect to the server
        listner = new TcpListener(IPAddress.Any, port);

        listner.Start();

        while (webCam.width < 100)
        {
            yield return null;
        }

        //Start sending coroutine
        StartCoroutine(senderCOR());
    }

    WaitForEndOfFrame endOfFrame = new WaitForEndOfFrame();
    IEnumerator senderCOR()
    {

        bool isConnected = false;
        TcpClient client = null;
        NetworkStream stream = null;

        // Wait for client to connect in another Thread 
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                // Wait for client connection
                client = listner.AcceptTcpClient();
                // We are connected
                clients.Add(client);

                isConnected = true;
                stream = client.GetStream();
            }
        });

        //Wait until client has connected
        while (!isConnected)
        {
            yield return null;
        }

        LOG("Connected!");

        bool readyToGetFrame = true;

        byte[] frameBytesLength = new byte[SEND_RECEIVE_COUNT];

        while (!stop)
        {
            //Wait for End of frame
            yield return endOfFrame;

            currentTexture.SetPixels(webCam.GetPixels());
            byte[] pngBytes = currentTexture.EncodeToPNG();
            //Fill total byte length to send. Result is stored in frameBytesLength
            byteLengthToFrameByteArray(pngBytes.Length, frameBytesLength);

            //Set readyToGetFrame false
            readyToGetFrame = false;

            Loom.RunAsync(() =>
            {
                //Send total byte count first
                stream.Write(frameBytesLength, 0, frameBytesLength.Length);
                LOG("Sent Image byte Length: " + frameBytesLength.Length);

                //Send the image bytes
                stream.Write(pngBytes, 0, pngBytes.Length);
                LOG("Sending Image byte array data : " + pngBytes.Length);

                //Sent. Set readyToGetFrame true
                readyToGetFrame = true;
            });

            //Wait until we are ready to get new frame(Until we are done sending data)
            while (!readyToGetFrame)
            {
                LOG("Waiting To get new frame");
                yield return null;
            }
        }
    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    private void Update()
    {
        myImage.texture = webCam;
    }

    // stop everything
    private void OnApplicationQuit()
    {
        if (webCam != null && webCam.isPlaying)
        {
            webCam.Stop();
            stop = true;
        }

        if (listner != null)
        {
            listner.Stop();
        }

        foreach (TcpClient c in clients)
            c.Close();
    }
}

CLIENT:

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets;
using System.Net;
using System.IO;
using System;

public class reciver : MonoBehaviour
{
    public RawImage image;
    public bool enableLog = false;

    const int port = 8010;
    public string IP = "192.168.1.165";
    TcpClient client;

    Texture2D tex;

    private bool stop = false;

    //This must be the-same with SEND_COUNT on the server
    const int SEND_RECEIVE_COUNT = 15;

    // Use this for initialization
    void Start()
    {
        Application.runInBackground = true;

        tex = new Texture2D(0, 0);
        client = new TcpClient();

        //Connect to server from another Thread
        Loom.RunAsync(() =>
        {
            LOGWARNING("Connecting to server...");
            // if on desktop
            client.Connect(IPAddress.Loopback, port);

            // if using the IPAD
            //client.Connect(IPAddress.Parse(IP), port);
            LOGWARNING("Connected!");

            imageReceiver();
        });
    }


    void imageReceiver()
    {
        //While loop in another Thread is fine so we don't block main Unity Thread
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                //Read Image Count
                int imageSize = readImageByteSize(SEND_RECEIVE_COUNT);
                LOGWARNING("Received Image byte Length: " + imageSize);

                //Read Image Bytes and Display it
                readFrameByteArray(imageSize);
            }
        });
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }


    /////////////////////////////////////////////////////Read Image SIZE from Server///////////////////////////////////////////////////
    private int readImageByteSize(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytesCount = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytesCount, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        int byteLength;

        if (disconnected)
        {
            byteLength = -1;
        }
        else
        {
            byteLength = frameByteArrayToByteLength(imageBytesCount);
        }
        return byteLength;
    }

    /////////////////////////////////////////////////////Read Image Data Byte Array from Server///////////////////////////////////////////////////
    private void readFrameByteArray(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytes = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytes, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        bool readyToReadAgain = false;

        //Display Image
        if (!disconnected)
        {
            //Display Image on the main Thread
            Loom.QueueOnMainThread(() =>
            {
                displayReceivedImage(imageBytes);
                readyToReadAgain = true;
            });
        }

        //Wait until old Image is displayed
        while (!readyToReadAgain)
        {
            System.Threading.Thread.Sleep(1);
        }
    }


    void displayReceivedImage(byte[] receivedImageBytes)
    {
        tex.LoadImage(receivedImageBytes);
        image.texture = tex;
    }


    // Update is called once per frame
    void Update()
    {


    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    void LOGWARNING(string messsage)
    {
        if (enableLog)
            Debug.LogWarning(messsage);
    }

    void OnApplicationQuit()
    {
        LOGWARNING("OnApplicationQuit");
        stop = true;

        if (client != null)
        {
            client.Close();
        }
    }
}

这篇关于Unity:实时视频流的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆