I am trying to create a rtsp Server that streams the OpenGL output of my program. I had a look at How to write a Live555 FramedSource to allow me to stream H.264 live, but I need the stream to be unicast. So I had a look at testOnDemandRTSPServer. Using the same Code fails. To my understanding I need to provide memory in which I store my h264 frames so the OnDemandServer can read them on Demand.
H264VideoStreamServerMediaSubsession.cpp
H264VideoStreamServerMediaSubsession*
H264VideoStreamServerMediaSubsession::createNew(UsageEnvironment& env,
Boolean reuseFirstSource) {
return new H264VideoStreamServerMediaSubsession(env, reuseFirstSource);
}
H264VideoStreamServerMediaSubsession::H264VideoStreamServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource)
: OnDemandServerMediaSubsession(env, reuseFirstSource), fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL) {
}
H264VideoStreamServerMediaSubsession::~H264VideoStreamServerMediaSubsession() {
delete[] fAuxSDPLine;
}
static void afterPlayingDummy(void* clientData) {
H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
subsess->afterPlayingDummy1();
}
void H264VideoStreamServerMediaSubsession::afterPlayingDummy1() {
// Unschedule any pending 'checking' task:
envir().taskScheduler().unscheduleDelayedTask(nextTask());
// Signal the event loop that we're done:
setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData) {
H264VideoStreamServerMediaSubsession* subsess = (H264VideoStreamServerMediaSubsession*)clientData;
subsess->checkForAuxSDPLine1();
}
void H264VideoStreamServerMediaSubsession::checkForAuxSDPLine1() {
char const* dasl;
if (fAuxSDPLine != NULL) {
// Signal the event loop that we're done:
setDoneFlag();
} else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
fAuxSDPLine = strDup(dasl);
fDummyRTPSink = NULL;
// Signal the event loop that we're done:
setDoneFlag();
} else {
// try again after a brief delay:
int uSecsToDelay = 100000; // 100 ms
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForAuxSDPLine, this);
}
}
char const* H264VideoStreamServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)
if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
// Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
// until we start reading the file. This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
// and we need to start reading data from our file until this changes.
fDummyRTPSink = rtpSink;
// Start reading the file:
fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
// Check whether the sink's 'auxSDPLine()' is ready:
checkForAuxSDPLine(this);
}
envir().taskScheduler().doEventLoop(&fDoneFlag);
return fAuxSDPLine;
}
FramedSource* H264VideoStreamServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
estBitrate = 500; // kb
megamol::remotecontrol::View3D_MRC *parent = (megamol::remotecontrol::View3D_MRC*)this->parent;
return H264VideoStreamFramer::createNew(envir(), parent->h264FramedSource);
}
RTPSink* H264VideoStreamServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) {
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
FramedSource.cpp
H264FramedSource* H264FramedSource::createNew(UsageEnvironment& env,
unsigned preferredFrameSize,
unsigned playTimePerFrame)
{
return new H264FramedSource(env, preferredFrameSize, playTimePerFrame);
}
H264FramedSource::H264FramedSource(UsageEnvironment& env,
unsigned preferredFrameSize,
unsigned playTimePerFrame)
: FramedSource(env),
fPreferredFrameSize(fMaxSize),
fPlayTimePerFrame(playTimePerFrame),
fLastPlayTime(0),
fCurIndex(0)
{
x264_param_default_preset(¶m, "veryfast", "zerolatency");
param.i_threads = 1;
param.i_width = 1024;
param.i_height = 768;
param.i_fps_num = 30;
param.i_fps_den = 1;
// Intra refres:
param.i_keyint_max = 60;
param.b_intra_refresh = 1;
//Rate control:
param.rc.i_rc_method = X264_RC_CRF;
param.rc.f_rf_constant = 25;
param.rc.f_rf_constant_max = 35;
param.i_sps_id = 7;
//For streaming:
param.b_repeat_headers = 1;
param.b_annexb = 1;
x264_param_apply_profile(¶m, "baseline");
param.i_log_level = X264_LOG_ERROR;
encoder = x264_encoder_open(¶m);
pic_in.i_type = X264_TYPE_AUTO;
pic_in.i_qpplus1 = 0;
pic_in.img.i_csp = X264_CSP_I420;
pic_in.img.i_plane = 3;
x264_picture_alloc(&pic_in, X264_CSP_I420, 1024, 768);
convertCtx = sws_getContext(1024, 768, PIX_FMT_RGBA, 1024, 768, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
}
H264FramedSource::~H264FramedSource()
{
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;
}
void H264FramedSource::AddToBuffer(uint8_t* buf, int surfaceSizeInBytes)
{
uint8_t* surfaceData = (new uint8_t[surfaceSizeInBytes]);
memcpy(surfaceData, buf, surfaceSizeInBytes);
int srcstride = 1024*4;
sws_scale(convertCtx, &surfaceData, &srcstride,0, 768, pic_in.img.plane, pic_in.img.i_stride);
x264_nal_t* nals = NULL;
int i_nals = 0;
int frame_size = -1;
frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);
static bool finished = false;
if (frame_size >= 0)
{
static bool alreadydone = false;
if(!alreadydone)
{
x264_encoder_headers(encoder, &nals, &i_nals);
alreadydone = true;
}
for(int i = 0; i < i_nals; ++i)
{
m_queue.push(nals[i]);
}
}
delete [] surfaceData;
surfaceData = nullptr;
envir().taskScheduler().triggerEvent(eventTriggerId, this);
}
void H264FramedSource::doGetNextFrame()
{
deliverFrame();
}
void H264FramedSource::deliverFrame0(void* clientData)
{
((H264FramedSource*)clientData)->deliverFrame();
}
void H264FramedSource::deliverFrame()
{
x264_nal_t nalToDeliver;
if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {
if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
// This is the first frame, so use the current time:
gettimeofday(&fPresentationTime, NULL);
} else {
// Increment by the play time of the previous data:
unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;
fPresentationTime.tv_sec += uSeconds/1000000;
fPresentationTime.tv_usec = uSeconds%1000000;
}
// Remember the play time of this data:
fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
fDurationInMicroseconds = fLastPlayTime;
} else {
// We don't know a specific play time duration for this data,
// so just record the current time as being the 'presentation time':
gettimeofday(&fPresentationTime, NULL);
}
if(!m_queue.empty())
{
m_queue.wait_and_pop(nalToDeliver);
uint8_t* newFrameDataStart = (uint8_t*)0xD15EA5E;
newFrameDataStart = (uint8_t*)(nalToDeliver.p_payload);
unsigned newFrameSize = nalToDeliver.i_payload;
// Deliver the data here:
if (newFrameSize > fMaxSize) {
fFrameSize = fMaxSize;
fNumTruncatedBytes = newFrameSize - fMaxSize;
}
else {
fFrameSize = newFrameSize;
}
memcpy(fTo, nalToDeliver.p_payload, nalToDeliver.i_payload);
FramedSource::afterGetting(this);
}
}
Relevant part of the RTSP-Server Therad
RTSPServer* rtspServer = RTSPServer::createNew(*(parent->env), 8554, NULL);
if (rtspServer == NULL) {
*(parent->env) << "Failed to create RTSP server: " << (parent->env)->getResultMsg() << "\n";
exit(1);
}
char const* streamName = "Stream";
parent->h264FramedSource = H264FramedSource::createNew(*(parent->env), 0, 0);
H264VideoStreamServerMediaSubsession *h264VideoStreamServerMediaSubsession = H264VideoStreamServerMediaSubsession::createNew(*(parent->env), true);
h264VideoStreamServerMediaSubsession->parent = parent;
sms->addSubsession(h264VideoStreamServerMediaSubsession);
rtspServer->addServerMediaSession(sms);
parent->env->taskScheduler().doEventLoop(); // does not return
Once a connection exists the render loop calls
h264FramedSource->AddToBuffer(videoData, 1024*768*4);
First thing You have to do is write a wrapper around x264 Encoder which you can use to encode RGB data with nice given interface. the following class will give you the idea how to do that. i have used this class to encode RAW BGR frame which i was getting from my opencv capture.
x264Encoder.h
x264Encoder.cpp
Now we have the encoder which will take BGR picture and encode it. My encoder will encode the frame and put all the output nals into the output queue which will be streamed by Live555. To Implement live video source you have to create two class which will be subclass of (subclass of OnDemandServerMediaSubsession and another FramedSource). both are there is live555 media library.this class will serve data to more than one client also.
To create subclass of these two classes you can refer the following classes.
H264LiveServerMediaSession.h (Subclass of OnDemandServerMediaSubsession)
H264LiveServerMediaSession.cpp
Now we have to subclass the FramedSource class which is there in LiveMedia. For Model you can refer to DeviceSource.cpp in live555 library. Following will show how i have done it.
LiveSourceWithx264.h
LiveSourceWithx264.cpp
Now we are done with the classes implementation. Now to do the streaming setup you can follow same as testOnDemandRTSPServer.cpp sample. Here is my main where i did the setup
and you have the URL for Your LiveSource. I had for my USB Cam :)