使用Live555从连接到H264编码器的IP摄像机流式传输实时视频 [英] Using Live555 to Stream Live Video from an IP camera connected to an H264 encoder

查看:2782
本文介绍了使用Live555从连接到H264编码器的IP摄像机流式传输实时视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我使用一个定制的德州仪器OMAP-L138基本上由基于ARM9的SoC和DSP处理器组成的板。它连接到相机镜头。我想做的是捕获实时视频流,发送到dsp处理器的H264编码,通过uPP发送在8192字节的数据包。我想使用Live555提供的testH264VideoStreamer来通过RTSP实时传输H264编码视频。我修改的代码如下所示:

  #include< liveMedia.hh> 
#include< BasicUsageEnvironment.hh>
#include< GroupsockHelper.hh>
#include< stdio.h>
#include< unistd.h>
#include< stdlib.h>
#include< fcntl.h>
#include< string.h>
#include< errno.h>
#include< string.h>
#include< unistd.h> //允许read()函数




UsageEnvironment * env;
H264VideoStreamFramer * videoSource;
RTPSink * videoSink;


// ------------------------------------ -------------------------------------------
/ *打开文件描述符* /
int stream = open(/ dev / upp,O_RDONLY);
/ *声明一个大小为8192字节的静态8位无符号整数,在调​​用之间保持其值* /
static uint8_t buf [8192];
// -------------------------------------------- ----------------------------------


// - -------------------------------------------------- --------------------------
//执行播放功能作为转发机制
// ----- -------------------------------------------------- -----------------------
void play(); // forward


// --------------------------------- ---------------------------------------------
/ / MAIN FUNCTION / ENTRY POINT
// -------------------------------------- ----------------------------------------
int main(int argc, char ** argv)
{
//通过设置我们的live555使用环境开始:
TaskScheduler * scheduler = BasicTaskScheduler :: createNew();
env = BasicUsageEnvironment :: createNew(* scheduler);

//为RTP和RTCP创建'groupsocks':
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(* env);
//注意:这是一个多播地址。如果你希望改为使用unicast流式传输
//,那么你应该使用testOnDemandRTSPServer
//测试程序 - 不是这个测试程序 - 作为模型。

const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum + 1;
const无符号字符ttl = 255;

const端口rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);

Groupsock rtpGroupsock(* env,destinationAddress,rtpPort,ttl);
rtpGroupsock.multicastSendOnly(); //我们是一个SSM源
Groupsock rtcpGroupsock(* env,destinationAddress,rtcpPort,ttl);
rtcpGroupsock.multicastSendOnly(); //我们是一个SSM源

//从RTP'groupsock'创建一个'H264 Video RTP'sink:
OutPacketBuffer :: maxSize = 1000000;
videoSink = H264VideoRTPSink :: createNew(* env,& rtpGroupsock,96);

//为此RTP宿创建(并启动)一个'RTCP实例':
const unsigned estimatedSessionBandwidth = 500; // in kbps;对于RTCP b / w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME [maxCNAMElen + 1];
gethostname((char *)CNAME,maxCNAMElen);
CNAME [maxCNAMElen] ='\0'; //只是为了避免
RTCPInstance * rtcp
= RTCPInstance :: createNew(* env,& rtcpGroupsock,
estimatedSessionBandwidth,CNAME,
videoSink,NULL / *我们是server * /,
True / *我们是一个SSM源* /);
//注意:这启动RTCP自动运行

/ *创建RTSP SERVER * /
RTSPServer * rtspServer = RTSPServer :: createNew(* env,8554)
if(rtspServer == NULL)
{
* env<< 无法创建RTSP服务器:<< env-> getResultMsg()<< \\\
;
exit(1);
}
ServerMediaSession * sms
= ServerMediaSession :: createNew(* env,IPCAM @ TeReSol,UPP Buffer,
Session streamed by \testH264VideoStreamer\ ,
True / * SSM * /);
sms-> addSubsession(PassiveServerMediaSubsession :: createNew(* videoSink,rtcp));
rtspServer-> addServerMediaSession(sms);

char * url = rtspServer-> rtspURL(sms);
* env<< 使用网址\<<< url<<\\\\
播放此流
delete [] url;

//开始流式传输:
* env<< 正在开始流式传输... \\\
;
play();

env-> taskScheduler()。doEventLoop(); //不返回

return 0; //只是为了防止编译器警告
}



// -------------------- -------------------------------------------------- ------------
// afterPlaying() - >定义缓冲区流式处理后的操作
// ----------------------------------- -----------------------------------------------
void afterPlaying(void * / * clientData * /)
{
* env< ...从upp缓冲区读取\\\
;
// videoSink-> stopPlaying();
// Medium :: close(videoSource);
//请注意,这也会关闭此源读取的输入文件。

//再次开始播放以获得下一个流
play();

/ *我们不需要关闭dev,只要我们从它读取。但是如果我们这样做,使用:close(/ dev / upp,O_RDWR); * /

}



// - -------------------------------------------------- ------------------------------------------
// play( )方法 - >定义如何读取和输入流的内容
// -------------------------------- -------------------------------------------------- ------------
void play()
{



/ *读缓冲区的字节数),并将它们分配给buf所在的地址* /
read(stream,& buf,sizeof buf);
printf(从UPP读入缓冲区);

/ *以字节流文件源打开输入文件:* /
ByteStreamMemoryBufferSource * buffSource
= ByteStreamMemoryBufferSource :: createNew(* env,buf,sizeof buf ,False / *读取后空缓冲区* /);
/ *通过在上面的creatNew()方法中传递False意味着将立即读取缓冲区* /

if(buffSource == NULL)
{
* env< 无法从\< <缓冲区
<<\读取字节流源\ n;
exit(1);
}

FramedSource * videoES = buffSource;
//为视频基本流创建成帧器:
videoSource = H264VideoStreamFramer :: createNew(* env,videoES,False);
//最后,开始播放:
* env<< 开始从UPP ... \\\
读取;
videoSink-> startPlaying(* videoSource,afterPlaying,videoSink);
}

问题是,代码虽然编译成功,但我无法得到所需的输出。 VLC播放器上的RTSP流正在播放模式,但我看不到任何视频。我将感谢在这个问题上的任何援助。

解决方案

好的,我可能会在我的描述中有点模糊不清,但我很乐意进一步解释。所以我想出了需要做什么,并为所有可能面临类似问题的人写作。我需要做的是修改我的testH264VideoStreamer.cpp和DeviceSource.cpp文件,以便它直接从设备(在我的例子是自定义am1808板)读取数据,将其存储在缓冲区和流。我所做的更改是:

  testH264VideoStreamer.cpp 

code> #include< liveMedia.hh>
#include& lt; BasicUsageEnvironment.hh>
#include< GroupsockHelper.hh>
#include< stdio.h>
#include< unistd.h>
#include< stdlib.h>
#include< fcntl.h>
#include< string.h>
#include< errno.h>
#include< string.h>
#include< unistd.h> //允许read()函数




UsageEnvironment * env;

H264VideoStreamFramer * videoSource;
RTPSink * videoSink;

void play(); // forward
// ----------------------------------------- --------------------------------
//进入点 - >主要功能
// ------------------------------------------ -------------------------------

int main(int argc,char ** argv) {
//开始设置我们的使用环境:
TaskScheduler * scheduler = BasicTaskScheduler :: createNew();
env = BasicUsageEnvironment :: createNew(* scheduler);

//为RTP和RTCP创建'groupsocks':
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(* env);
//注意:这是一个多播地址。如果你希望改为使用unicast流式传输
//,那么你应该使用testOnDemandRTSPServer
//测试程序 - 不是这个测试程序 - 作为模型。

const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum + 1;
const unsigned char ttl = 255;

const端口rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);

Groupsock rtpGroupsock(* env,destinationAddress,rtpPort,ttl);
rtpGroupsock.multicastSendOnly(); //我们是一个SSM源
Groupsock rtcpGroupsock(* env,destinationAddress,rtcpPort,ttl);
rtcpGroupsock.multicastSendOnly(); //我们是一个SSM源

//从RTP'groupsock'创建一个'H264 Video RTP'sink:
OutPacketBuffer :: maxSize = 600000;
videoSink = H264VideoRTPSink :: createNew(* env,& rtpGroupsock,96);

//为此RTP宿创建(并启动)'RTCP实例':
const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b / w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME [maxCNAMElen + 1];
gethostname((char *)CNAME,maxCNAMElen);
CNAME [maxCNAMElen] ='\0'; //只是为了防止
RTCPInstance * rtcp
= RTCPInstance :: createNew(* env,& rtcpGroupsock,
estimatedSessionBandwidth,CNAME,
videoSink,NULL / *我们是server * /,
True / *我们是一个SSM源* /);
//注意:这启动RTCP自动运行

RTSPServer * rtspServer = RTSPServer :: createNew(* env,8554);
if(rtspServer == NULL){
* env<< 无法创建RTSP服务器:<< env-> getResultMsg()<< \\\
;
exit(1);
}
ServerMediaSession * sms
= ServerMediaSession :: createNew(* env,ipcamera,UPP Buffer,
会话由\testH264VideoStreamer \ ,
True / * SSM * /);
sms-> addSubsession(PassiveServerMediaSubsession :: createNew(* videoSink,rtcp));
rtspServer-> addServerMediaSession(sms);

char * url = rtspServer-> rtspURL(sms);
* env<< 使用网址\<<< url<<\\\\
播放此流
delete [] url;

//开始流式传输:
* env<< 开始流式传输... \\\
;
play();

env-> taskScheduler()。doEventLoop(); //不返回

return 0; //只是为了防止编译器警告
}
// -------------------------------- --------------------------------------
//此后播放的功能
// --------------------------------------------- -------------------------
void afterPlaying(void * / * clientData * /)
{

play();
}
// --------------------------------------- ---------------------------------
// PLAY FUNCTION()
// - -------------------------------------------------- ---------------------
void play()
{


//打开输入文件作为设备作为源:
DeviceSource * devSource
= DeviceSource :: createNew(* env);
if(devSource == NULL)
{

* env<< 无法从\< <缓冲区
<<\读取字节流源\\\
;
exit(1);
}

FramedSource * videoES = devSource;

//为视频基本流创建成帧器:
videoSource = H264VideoStreamFramer :: createNew(* env,videoES,False);

//最后,开始播放:
* env<< 开始从UPP ... \\\
读取;
videoSink-> startPlaying(* videoSource,afterPlaying,videoSink);
}

DeviceSource.cpp
$ b

  #includeDeviceSource.hh
#include< GroupsockHelper.hh> // forgettimeofday()
#include< stdio.h>
#include< unistd.h>
#include< stdlib.h>
#include< fcntl.h>
#include< string.h>
#include< errno.h>
#include< string.h>
#include< unistd.h>

// static uint8_t * buf =(uint8_t *)malloc(102400);
static uint8_t buf [8192];
int upp_stream;
// static uint8_t * bufPtr = buf;

DeviceSource *
DeviceSource :: createNew(UsageEnvironment& env)
{

return new DeviceSource(env);
}

EventTriggerId DeviceSource :: eventTriggerId = 0;

unsigned DeviceSource :: referenceCount = 0;

DeviceSource :: DeviceSource(UsageEnvironment& env):FramedSource(env)
{

if(referenceCount == 0)
{

upp_stream = open(/ dev / upp,O_RDWR);

}
++ referenceCount;

if(eventTriggerId == 0)
{
eventTriggerId = envir()。taskScheduler()。createEventTrigger(deliverFrame0);
}
}

DeviceSource ::〜DeviceSource(void){
--referenceCount;
envir()。taskScheduler()。deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;

if(referenceCount == 0)
{

}
}

int loop_count;

void DeviceSource :: doGetNextFrame()
{

// for(loop_count = 0; loop_count< 13; loop_count ++)
// {
read(upp_stream,buf,8192);

// bufPtr + = 8192;

//}
deliverFrame();

}

void DeviceSource :: deliverFrame0(void * clientData)
{
((DeviceSource *)clientData) - > deliveryFrame
}

void DeviceSource :: deliverFrame()
{


if(!isCurrentlyAwaitingData())return; //我们还没有准备好数据

u_int8_t * newFrameDataStart =(u_int8_t *)buf; //(u_int8_t *)buf; // %%% TO BE WRITTEN %%%
unsigned newFrameSize = sizeof(buf); // %%% TO BE WRITTEN %%%

//这里传递数据:
if(newFrameSize> fMaxSize){
fFrameSize = fMaxSize;
fNumTruncatedBytes = newFrameSize - fMaxSize;
} else {
fFrameSize = newFrameSize;
}
gettimeofday(& fPresentationTime,NULL);
memmove(fTo,newFrameDataStart,fFrameSize);
FramedSource :: afterGetting(this);
}

编译完这些修改后的代码后, vlc播放器。


I am using a custom Texas Instruments OMAP-L138 based board that basically consists of an ARM9 based SoC and a DSP processor. It is connected to a camera lens. What I'm trying to do is to capture live video stream which is sent to the dsp processor for H264 encoding which is sent over uPP in packets of 8192 bytes. I want to use the testH264VideoStreamer supplied by Live555 to live stream the H264 encoded video over RTSP. The code I have modified is shown below:

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;


//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------


//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward


//------------------------------------------------------------------------------
// MAIN FUNCTION / ENTRY POINT 
//------------------------------------------------------------------------------
int main(int argc, char** argv) 
{
    // Begin by setting up our live555 usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);

    // Create 'groupsocks' for RTP and RTCP:
    struct in_addr destinationAddress;
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
    // Note: This is a multicast address.  If you wish instead to stream
    // using unicast, then you should use the "testOnDemandRTSPServer"
    // test program - not this test program - as a model.

    const unsigned short rtpPortNum = 18888;
    const unsigned short rtcpPortNum = rtpPortNum+1;
    const unsigned char ttl = 255;

    const Port rtpPort(rtpPortNum);
    const Port rtcpPort(rtcpPortNum);

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
    rtpGroupsock.multicastSendOnly(); // we're a SSM source
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
    OutPacketBuffer::maxSize = 1000000;
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

    // Create (and start) a 'RTCP instance' for this RTP sink:
    const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    RTCPInstance* rtcp
    = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
    // Note: This starts RTCP running automatically

    /*Create RTSP SERVER*/
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
    if (rtspServer == NULL) 
    {
         *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
         exit(1);
    }
    ServerMediaSession* sms
        = ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
    rtspServer->addServerMediaSession(sms);

    char* url = rtspServer->rtspURL(sms);
    *env << "Play this stream using the URL \"" << url << "\"\n";
    delete[] url;

    // Start the streaming:
    *env << "Beginning streaming...\n";
    play();

    env->taskScheduler().doEventLoop(); // does not return

    return 0; // only to prevent compiler warning
}



//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 
{
    *env << "...done reading from upp buffer\n";
    //videoSink->stopPlaying();
    //Medium::close(videoSource);
    // Note that this also closes the input file that this source read from.

    // Start playing once again to get the next stream      
    play();

    /* We don't need to close the dev as long as we're reading from it. But if we do, use: close( "/dev/upp", O_RDWR);*/ 

}



//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream 
//----------------------------------------------------------------------------------------------
void play()
{



    /* Read nbytes of buffer (sizeof buf ) from the filedescriptor stream and assign them to address where buf is located */
    read(stream, &buf, sizeof buf);
    printf("Reading from UPP in to Buffer");

    /*Open the input file as a 'byte-stream file source': */
    ByteStreamMemoryBufferSource* buffSource
        = ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
    /*By passing False in the above creatNew() method means that the buffer would be read at once */

    if (buffSource == NULL) 
    {
      *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    }

    FramedSource* videoES = buffSource;
    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

The Problem is that the code though compiles successfully but I'm unable to get the desired output. the RTSP stream on VLC player is on play mode however I can't see any video. I'd be grateful for any assistance in this matter. I might come as a little vague in my description but I'm happy to further explain any part that is required.

解决方案

Okay so I figured out what needed to be done and am writing for the benefit of all who might face a similar issue. What I needed to do was modify my testH264VideoStreamer.cpp and DeviceSource.cpp file such that it directly reads data from the device (in my case it was the custom am1808 board), store it in a buffer and stream it. The changes I made were:

testH264VideoStreamer.cpp

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;

H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward
//-------------------------------------------------------------------------
//Entry Point -> Main FUNCTION  
//-------------------------------------------------------------------------

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
  OutPacketBuffer::maxSize = 600000;
  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp
  = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "ipcamera","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
//----------------------------------------------------------------------
//AFTER PLAY FUNCTION CALLED HERE
//----------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 
{

    play();
}
//------------------------------------------------------------------------
//PLAY FUNCTION () 
//------------------------------------------------------------------------
void play()
{


      // Open the input file as with Device as the source:
    DeviceSource* devSource
        = DeviceSource::createNew(*env);
    if (devSource == NULL) 
    {

          *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    }

    FramedSource* videoES = devSource;

    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);

    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

DeviceSource.cpp

#include "DeviceSource.hh"
#include <GroupsockHelper.hh> // for "gettimeofday()"
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h>

//static uint8_t *buf = (uint8_t*)malloc(102400);
static uint8_t buf[8192];
int upp_stream;
//static uint8_t *bufPtr = buf;

DeviceSource*
DeviceSource::createNew(UsageEnvironment& env)
{

  return new DeviceSource(env);
}

EventTriggerId DeviceSource::eventTriggerId = 0;

unsigned DeviceSource::referenceCount = 0;

DeviceSource::DeviceSource(UsageEnvironment& env):FramedSource(env) 
{ 

  if (referenceCount == 0) 
  {

      upp_stream = open("/dev/upp",O_RDWR);

  }
  ++referenceCount;

  if (eventTriggerId == 0) 
  {
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
  }
}

DeviceSource::~DeviceSource(void) {
  --referenceCount;
  envir().taskScheduler().deleteEventTrigger(eventTriggerId);
  eventTriggerId = 0;

  if (referenceCount == 0) 
  {

  }
}

int loop_count;

void DeviceSource::doGetNextFrame() 
{

    //for (loop_count=0; loop_count < 13; loop_count++)
    //{
        read(upp_stream,buf, 8192);

        //bufPtr+=8192;

    //}
    deliverFrame();

}

void DeviceSource::deliverFrame0(void* clientData) 
{
  ((DeviceSource*)clientData)->deliverFrame();
}

void DeviceSource::deliverFrame() 
{


  if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

  u_int8_t* newFrameDataStart = (u_int8_t*) buf;             //(u_int8_t*) buf; //%%% TO BE WRITTEN %%%
  unsigned newFrameSize = sizeof(buf); //%%% TO BE WRITTEN %%%

  // Deliver the data here:
  if (newFrameSize > fMaxSize) {
    fFrameSize = fMaxSize;
    fNumTruncatedBytes = newFrameSize - fMaxSize;
  } else {
    fFrameSize = newFrameSize;
  }
  gettimeofday(&fPresentationTime, NULL); 
  memmove(fTo, newFrameDataStart, fFrameSize);
  FramedSource::afterGetting(this);
}

After compiling the code with these modifications, I was able to receive video stream on vlc player.

这篇关于使用Live555从连接到H264编码器的IP摄像机流式传输实时视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆