Использование Live555 для потоковой передачи живого видео с IP-камеры, подключенной к кодеру H264

Я использую специальную плату на основе Texas Instruments OMAP-L138, которая в основном состоит из SoC на базе ARM9 и процессора DSP. Это связано с объективом камеры. То, что я пытаюсь сделать, это захватить поток живого видео, который отправляется процессору dsp для кодирования H264, который передается по протоколу uPP в пакетах по 8192 байта. Я хочу использовать testH264VideoStreamer, поставляемый Live555, для прямой трансляции видео в кодировке H264 через RTSP. Код, который я изменил, показан ниже:

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() functionUsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward//------------------------------------------------------------------------------
// MAIN FUNCTION / ENTRY POINT
//------------------------------------------------------------------------------
int main(int argc, char** argv)
{
// Begin by setting up our live555 usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);

// Create 'groupsocks' for RTP and RTCP:
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address.  If you wish instead to stream
// using unicast, then you should use the "testOnDemandRTSPServer"// test program - not this test program - as a model.

const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 255;

const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);

Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source

// Create a 'H264 Video RTP' sink from the RTP 'groupsock':
OutPacketBuffer::maxSize = 1000000;
videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
RTCPInstance* rtcp
= RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL /* we're a server */,
True /* we're a SSM source */);
// Note: This starts RTCP running automatically

/*Create RTSP SERVER*/
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
"Session streamed by \"testH264VideoStreamer\"",
True /*SSM*/);
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);

char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;

// Start the streaming:
*env << "Beginning streaming...\n";
play();

env->taskScheduler().doEventLoop(); // does not return

return 0; // only to prevent compiler warning
}//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/)
{
*env << "...done reading from upp buffer\n";
//videoSink->stopPlaying();
//Medium::close(videoSource);
// Note that this also closes the input file that this source read from.

// Start playing once again to get the next stream
play();

/* We don't need to close the dev as long as we're reading from it. But if we do, use: close( "/dev/upp", O_RDWR);*/

}//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream
//----------------------------------------------------------------------------------------------
void play()
{/* Read nbytes of buffer (sizeof buf ) from the filedescriptor stream and assign them to address where buf is located */
read(stream, &buf, sizeof buf);
printf("Reading from UPP in to Buffer");

/*Open the input file as a 'byte-stream file source': */
ByteStreamMemoryBufferSource* buffSource
= ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
/*By passing False in the above creatNew() method means that the buffer would be read at once */

if (buffSource == NULL)
{
*env << "Unable to read from\"" << "Buffer"<< "\" as a byte-stream source\n";
exit(1);
}

FramedSource* videoES = buffSource;
// Create a framer for the Video Elementary Stream:
videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
// Finally, start playing:
*env << "Beginning to read from UPP...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

Проблема в том, что код успешно компилируется, но я не могу получить желаемый результат. поток RTSP на проигрывателе VLC находится в режиме воспроизведения, однако я не вижу видео. Буду благодарен за любую помощь в этом вопросе. Я мог бы быть немного расплывчатым в своем описании, но я рад дополнительно объяснить любую часть, которая требуется.

5

Решение

Итак, я выяснил, что нужно сделать, и пишу для всех, кто может столкнуться с подобной проблемой. Мне нужно было изменить мои файлы testH264VideoStreamer.cpp и DeviceSource.cpp таким образом, чтобы они непосредственно считывали данные с устройства (в моем случае это была пользовательская плата am1808), сохраняли их в буфере и передавали их поток. Изменения, которые я сделал, были:

testH264VideoStreamer.cpp

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() functionUsageEnvironment* env;

H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward
//-------------------------------------------------------------------------
//Entry Point -> Main FUNCTION
//-------------------------------------------------------------------------

int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);

// Create 'groupsocks' for RTP and RTCP:
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address.  If you wish instead to stream
// using unicast, then you should use the "testOnDemandRTSPServer"// test program - not this test program - as a model.

const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 255;

const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);

Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source

// Create a 'H264 Video RTP' sink from the RTP 'groupsock':
OutPacketBuffer::maxSize = 600000;
videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
RTCPInstance* rtcp
= RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL /* we're a server */,
True /* we're a SSM source */);
// Note: This starts RTCP running automatically

RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, "ipcamera","UPP Buffer" ,
"Session streamed by \"testH264VideoStreamer\"",
True /*SSM*/);
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);

char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;

// Start the streaming:
*env << "Beginning streaming...\n";
play();

env->taskScheduler().doEventLoop(); // does not return

return 0; // only to prevent compiler warning
}
//----------------------------------------------------------------------
//AFTER PLAY FUNCTION CALLED HERE
//----------------------------------------------------------------------
void afterPlaying(void* /*clientData*/)
{

play();
}
//------------------------------------------------------------------------
//PLAY FUNCTION ()
//------------------------------------------------------------------------
void play()
{// Open the input file as with Device as the source:
DeviceSource* devSource
= DeviceSource::createNew(*env);
if (devSource == NULL)
{

*env << "Unable to read from\"" << "Buffer"<< "\" as a byte-stream source\n";
exit(1);
}

FramedSource* videoES = devSource;

// Create a framer for the Video Elementary Stream:
videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);

// Finally, start playing:
*env << "Beginning to read from UPP...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

DeviceSource.cpp

#include "DeviceSource.hh"#include <GroupsockHelper.hh> // for "gettimeofday()"#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h>

//static uint8_t *buf = (uint8_t*)malloc(102400);
static uint8_t buf[8192];
int upp_stream;
//static uint8_t *bufPtr = buf;

DeviceSource*
DeviceSource::createNew(UsageEnvironment& env)
{

return new DeviceSource(env);
}

EventTriggerId DeviceSource::eventTriggerId = 0;

unsigned DeviceSource::referenceCount = 0;

DeviceSource::DeviceSource(UsageEnvironment& env):FramedSource(env)
{

if (referenceCount == 0)
{

upp_stream = open("/dev/upp",O_RDWR);

}
++referenceCount;

if (eventTriggerId == 0)
{
eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
}
}

DeviceSource::~DeviceSource(void) {
--referenceCount;
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;

if (referenceCount == 0)
{

}
}

int loop_count;

void DeviceSource::doGetNextFrame()
{

//for (loop_count=0; loop_count < 13; loop_count++)
//{
read(upp_stream,buf, 8192);

//bufPtr+=8192;

//}
deliverFrame();

}

void DeviceSource::deliverFrame0(void* clientData)
{
((DeviceSource*)clientData)->deliverFrame();
}

void DeviceSource::deliverFrame()
{if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

u_int8_t* newFrameDataStart = (u_int8_t*) buf;             //(u_int8_t*) buf; //%%% TO BE WRITTEN %%%
unsigned newFrameSize = sizeof(buf); //%%% TO BE WRITTEN %%%

// Deliver the data here:
if (newFrameSize > fMaxSize) {
fFrameSize = fMaxSize;
fNumTruncatedBytes = newFrameSize - fMaxSize;
} else {
fFrameSize = newFrameSize;
}
gettimeofday(&fPresentationTime, NULL);
memmove(fTo, newFrameDataStart, fFrameSize);
FramedSource::afterGetting(this);
}

После компиляции кода с этими модификациями я смог получить видеопоток на VLC Player.

9

Другие решения


По вопросам рекламы ammmcru@yandex.ru
Adblock
detector