Hello,
I am developing RTSP server application using Live555 mediaServer. I am getting
latency getting increased for low frame rate camera when two camera played
simultaneously on VLC.
I am receiving video feed from IP camera and using Live555 deliver to client
application and also doing recording. If we play one source at a time then it
works fine. below are source configuration.
Stream -1 Camera 1 with 30 FPS
Stream - 2 Camera 2 with 24 FPS
I have created queue which is being dequeued by deliverFrame from derived class
of using FramedSource. I can see queue is getting increase for stream-2 over
time. Attached source file for reference.
if I use, trigger event, deliver frame function is being called but it prints
error for "we're not ready for the data yet" because given data is not being
processed.
Can you please suggest what could be the issue.
Thanks & Regards,
Renish Tala
Engineer | PES | e-infochips
*************************************************************************************************************************************************************
eInfochips Business Disclaimer: This e-mail message and all attachments
transmitted with it are intended solely for the use of the addressee and may
contain legally privileged and confidential information. If the reader of this
message is not the intended recipient, or an employee or agent responsible for
delivering this message to the intended recipient, you are hereby notified that
any dissemination, distribution, copying, or other use of this message or its
attachments is strictly prohibited. If you have received this message in error,
please notify the sender immediately by replying to this message and please
delete it from your computer. Any views expressed in this message are those of
the individual sender unless otherwise stated. Company has taken enough
precautions to prevent the spread of viruses. However the company accepts no
liability for any damage caused by any virus transmitted by this email.
*************************************************************************************************************************************************************
#include "H264LiveServerMediaSession.hh"
/*char *videoSPSData=NULL;
char *videoPPSData=NULL;
int spsSize;
int ppsSize;*/
H264LiveServerMediaSession*
H264LiveServerMediaSession::createNew(UsageEnvironment& env, bool
reuseFirstSource)
{
return new H264LiveServerMediaSession(env, reuseFirstSource);
}
H264LiveServerMediaSession::H264LiveServerMediaSession(UsageEnvironment& env,
bool reuseFirstSource)
:OnDemandServerMediaSubsession(env,reuseFirstSource),fAuxSDPLine(NULL),
fDoneFlag(0), fDummySink(NULL)
{
mtx.lock();
liveVideosource = nullptr;
liveSourceAvailable = 0;
eventTriggerId = 0;
eventTriggerId =
envir().taskScheduler().createEventTrigger(LiveSourceWithx264::deliverFrame0);
mtx.unlock();
}
H264LiveServerMediaSession::~H264LiveServerMediaSession(void)
{
delete[] fAuxSDPLine;
mtx.lock();
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;
mtx.unlock();
}
static void afterPlayingDummy(void* clientData)
{
H264LiveServerMediaSession *session =
(H264LiveServerMediaSession*)clientData;
session->afterPlayingDummy1();
}
void H264LiveServerMediaSession::afterPlayingDummy1()
{
envir().taskScheduler().unscheduleDelayedTask(nextTask());
setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData)
{
H264LiveServerMediaSession* session =
(H264LiveServerMediaSession*)clientData;
session->checkForAuxSDPLine1();
}
void H264LiveServerMediaSession::checkForAuxSDPLine1() {
char const* dasl;
int uSecsToDelay = 100000; // 100 ms
if (fAuxSDPLine != NULL) {
// Signal the event loop that we're done:
setDoneFlag();
} else if (fDummySink != NULL && (dasl = fDummySink->auxSDPLine()) != NULL)
{
fAuxSDPLine = strDup(dasl);
fDummySink = NULL;
// Signal the event loop that we're done:
setDoneFlag();
} else if (!fDoneFlag) {
// try again after a brief delay:
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForAuxSDPLine, this);
}
}
char const* H264LiveServerMediaSession::getAuxSDPLine(RTPSink* rtpSink,
FramedSource* inputSource) {
if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up
(for a previous client)
if (fDummySink == NULL) { // we're not already setting it up for another,
concurrent stream
// Note: For H264 video files, the 'config' information
("profile-level-id" and "sprop-parameter-sets") isn't known
// until we start reading the file. This means that "rtpSink"s
"auxSDPLine()" will be NULL initially,
// and we need to start reading data from our file until this changes.
fDummySink = rtpSink;
// Start reading the file:
fDummySink->startPlaying(*inputSource, afterPlayingDummy, this);
// Check whether the sink's 'auxSDPLine()' is ready:
checkForAuxSDPLine(this);
}
envir().taskScheduler().doEventLoop(&fDoneFlag);
return fAuxSDPLine;
}
FramedSource* H264LiveServerMediaSession::createNewStreamSource(unsigned
clientSessionID, unsigned& estBitRate)
{
mtx.lock();
estBitRate = 500; // kbps, estimate
liveSourceAvailable = 1;
liveVideosource = LiveSourceWithx264::createNew(envir(),this);
liveVideosource->m_plive555TaskScheduler = live555TaskScheduler;
mtx.unlock();
// are you trying to keep the reference of the source somewhere? you
shouldn't.
// Live555 will create and delete this class object many times. if you
store it somewhere
// you will get memory access violation. instead you should configure you
source to always read from your data source
//return H264VideoStreamDiscreteFramer::createNew(envir(),source);
return H264VideoStreamDiscreteFramer::createNew(envir(),liveVideosource);
// return H264VideoStreamFramer::createNew(envir(),liveVideosource);
}
RTPSink* H264LiveServerMediaSession::createNewRTPSink(Groupsock* rtpGroupsock,
unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
rtpPayloadTypeIfDynamic = 96;
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, 96,
(u_int8_t const*)videoSPSData, spsSize, (u_int8_t
const*)videoPPSData, ppsSize);
}
void H264LiveServerMediaSession::LiveStream_cameraFrameData(unsigned char*
mFrameByte, unsigned mFrameSize, struct timeval mPresentationTime, uint64_t
timeStamp)
{
mtx.lock();
if(liveSourceAvailable==0 && liveVideosource==NULL) {
// printf("liveSourceAvailable NULL liveSourceAvailable %d %X\n",
liveSourceAvailable, liveVideosource);
mtx.unlock();
return;
}
//printf("liveSourceAvailable NOT NULL liveSourceAvailable %d %X\n",
liveSourceAvailable, liveVideosource);
liveVideosource->LiveStream_cameraFrameData(mFrameByte,mFrameSize,mPresentationTime,timeStamp);
mtx.unlock();
return;
}
#ifndef LIVE_FRAME_SOURCE_HH_
#define LIVE_FRAME_SOURCE_HH_
/*#ifdef __cplusplus
#define __STDINT_MACROS
#define __STDC_CONSTANT_MACROS
#endif
*/
#ifndef _USAGE_ENVIRONMENT_HH
#include "UsageEnvironment.hh"
#endif
//#include <iostream>
//#include <concurrent_queue.h>
//#include <queue>
#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif
#include<time.h>
#include <iostream> // std::cout
#include <thread> // std::thread
#include <mutex>
#include <stdio.h>
#include <stdlib.h>
#include <queue>
#include<semaphore.h>
#include<fcntl.h>
#include<sys/stat.h>
//#define RTP_PACKET_TTL 256
typedef struct ll
{
unsigned char *Framedata;
unsigned framesize;
struct timeval PresentationTime;
uint64_t frameArrivalTime;
}QUEUE;
#define MAX_BUFFER_VIDEO (50)
#define MAX_FRAME_SIZE_VIDEO (1024*1024)
//#define CLOCK_FREQ_90KHZ 90000
//#define MILLION 1000000
class LiveSourceWithx264: public FramedSource {
public:
std::string streamName;
static LiveSourceWithx264* createNew(UsageEnvironment& env,void
*livesession);
EventTriggerId eventTriggerId;
void LiveStream_cameraFrameData(unsigned char* mFrameByte, unsigned
mFrameSize, struct timeval mPresentationTime,uint64_t timeStamp);
void stopLiveFrameSources();
// void normalizePresentationTime(struct timeval& toPT, struct timeval
const& fromPT);
static void deliverFrame0(void* clientData);
uint64_t getSystemDateTimeInMS(void);
TaskScheduler* m_plive555TaskScheduler = NULL;
protected:
LiveSourceWithx264(UsageEnvironment& env,void *livesession);
virtual ~LiveSourceWithx264(void);
private:
void doGetNextFrame();
void deliverFrame();
// void encodeNewFrame();
// void signalNewFrameData();
static unsigned referenceCount;
bool isContinue;
void *videoLiveSession;
std::mutex mtx;
QUEUE *head;
std::queue<QUEUE> camQueue;
sem_t *waitForFrame;
uint32_t frameTimeDiffDuration = 0;
uint64_t prevSystemTime = 0;
uint64_t currSystemTime = 0;
int frameCounter = 0;
// videoCaptureDevice is my BGR data source. You can have according to your
need
};
#endif
#include "LiveSourceWithx264.hh"
#include <GroupsockHelper.hh>
#include "InputFile.hh"
#include "H264VideoRTPSink.hh"
#include "H264VideoStreamFramer.hh"
#include "H264or5VideoRTPSink.hh"
#include "H264or5VideoStreamFramer.hh"
//#ifndef H264_LIVE_SERVER_MEDIASESSION_HH_
#include "H264LiveServerMediaSession.hh"
//#endif
uint64_t LiveSourceWithx264::getSystemDateTimeInMS(void) {
uint64_t timeInMS = 0;
struct timeval currentTime = { 0 }; /*< Current time */
gettimeofday(¤tTime, NULL);
timeInMS = ((uint64_t) (currentTime.tv_sec * (uint64_t) 1000)
+ (uint64_t) (currentTime.tv_usec / (uint64_t) 1000));
return timeInMS;
}
void LiveSourceWithx264::LiveStream_cameraFrameData(unsigned char* mFrameByte,
unsigned mFrameSize, struct timeval mPresentationTime,
uint64_t packetArrivalTime) {
isContinue = true;
QUEUE camFrame;
camFrame.Framedata = (unsigned char *) calloc(1, mFrameSize);
camFrame.framesize = mFrameSize;
camFrame.PresentationTime = mPresentationTime;
camFrame.frameArrivalTime = packetArrivalTime;
memcpy(camFrame.Framedata, mFrameByte, mFrameSize);
camQueue.push(camFrame);
//envir().taskScheduler().triggerEvent(((H264LiveServerMediaSession
*)videoLiveSession)->eventTriggerId,this);
if (isContinue) {
sem_post(waitForFrame); //wait for frame delivered
}
}
void LiveSourceWithx264::stopLiveFrameSources() {
isContinue = false;
}
LiveSourceWithx264* LiveSourceWithx264::createNew(UsageEnvironment& env,
void *livesession) {
return new LiveSourceWithx264(env, livesession);
}
LiveSourceWithx264::LiveSourceWithx264(UsageEnvironment& env, void
*livesession) :
FramedSource(env) {
isContinue = true;
head = NULL;
eventTriggerId = 0;
videoLiveSession = livesession;
frameTimeDiffDuration = 0;
if (videoLiveSession != NULL) {
((H264LiveServerMediaSession *)
videoLiveSession)->liveSourceAvailable =
false;
}
waitForFrame = sem_open("Aframe", O_CREAT, 0644, 0);
}
LiveSourceWithx264::~LiveSourceWithx264(void) {
((H264LiveServerMediaSession *) videoLiveSession)->mtx.lock();
((H264LiveServerMediaSession *) videoLiveSession)->liveSourceAvailable
= 0;
((H264LiveServerMediaSession *) videoLiveSession)->liveVideosource =
NULL;
isContinue = false;
frameTimeDiffDuration = 0;
sem_destroy(waitForFrame);
while (!camQueue.empty()) {
QUEUE camFrame;
camFrame = camQueue.front();
free(camFrame.Framedata);
camFrame.Framedata = NULL;
camQueue.pop();
}
((H264LiveServerMediaSession *) videoLiveSession)->mtx.unlock();
}
void LiveSourceWithx264::deliverFrame0(void* clientData) {
((LiveSourceWithx264*) clientData)->deliverFrame();
}
void LiveSourceWithx264::doGetNextFrame() {
deliverFrame();
}
void LiveSourceWithx264::deliverFrame() {
int fifoStat = 0;
unsigned FrameSize = 0;
unsigned char *FrameData = NULL;
int trancate = 0;
if (!isCurrentlyAwaitingData()) {
printf("we're not ready for the data yet StreamName %s size
%d\n",
streamName.c_str(), camQueue.size());
return; // we're not ready for the data yet
}
if (isContinue) {
QUEUE camFrame;
if ((!camQueue.empty()))
{
camFrame = camQueue.front();
FrameSize = camFrame.framesize;
FrameData = camFrame.Framedata;
gettimeofday(&fPresentationTime, NULL);
currSystemTime = getSystemDateTimeInMS();
printf("Queuesize %d delta %d %s\n", camQueue.size(),
(currSystemTime - prevSystemTime),
streamName.c_str());
prevSystemTime = currSystemTime;
fFrameSize = FrameSize;
memmove(fTo, FrameData + trancate, fFrameSize);
free(camFrame.Framedata);
camFrame.Framedata = NULL;
camQueue.pop();
}
else
{
int ret = sem_wait(waitForFrame);
if (ret < 0) {
printf("AACFramsource sem_wait failed\n");
}
return deliverFrame0(this);
}
isContinue = true;
FramedSource::afterGetting(this);
}
}
#ifndef H264_LIVE_SERVER_MEDIASESSION_HH_
#define H264_LIVE_SERVER_MEDIASESSION_HH_
#include<mutex>
#include "liveMedia.hh"
#include "OnDemandServerMediaSubsession.hh"
#include "LiveSourceWithx264.hh"
class H264LiveServerMediaSession:public OnDemandServerMediaSubsession{
public:
static H264LiveServerMediaSession* createNew(UsageEnvironment& env, bool
reuseFirstSource);
void checkForAuxSDPLine1();
void afterPlayingDummy1();
void LiveStream_cameraFrameData(unsigned char* mFrameByte, unsigned
mFrameSize, struct timeval mPresentationTime,uint64_t timeStamp);
protected:
H264LiveServerMediaSession(UsageEnvironment& env, bool reuseFirstSource);
virtual ~H264LiveServerMediaSession(void);
void setDoneFlag() { fDoneFlag = ~0; }
virtual char const* getAuxSDPLine(RTPSink* rtpSink, FramedSource*
inputSource);
virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char
rtpPayloadTypeIfDynamic, FramedSource* inputSource);
public:
LiveSourceWithx264 *liveVideosource;
TaskScheduler* live555TaskScheduler = NULL;
EventTriggerId eventTriggerId;
char videoSPSData[100];
char videoPPSData[100];
std::string streamName;
int spsSize;
int ppsSize;
std::mutex mtx;
char liveSourceAvailable;
std::queue<QUEUE> camQueue;
private:
char* fAuxSDPLine;
char fDoneFlag;
RTPSink* fDummySink;
};
#endif
_______________________________________________
live-devel mailing list
[email protected]
http://lists.live555.com/mailman/listinfo/live-devel