#include "PL_RTSPServer.h" #include #include class MyH264FramedSource : public FramedSource { public: static MyH264FramedSource* createNew(UsageEnvironment& env); protected: MyH264FramedSource(UsageEnvironment& env) virtual ~MyH264FramedSource() // overide FramedSource virtual void doGetNextFrame() { // deliverFrame //if (fFrameSize > 0) //{ // // send Frame to the consumer // FramedSource::afterGetting(this); //} // isCurrentlyAwaitingData //if (frame->m_size > fMaxSize) //{ // fFrameSize = fMaxSize; // fNumTruncatedBytes = frame->m_size - fMaxSize; //} //else //{ // fFrameSize = frame->m_size; //} //memcpy(fTo, frame->m_buffer, fFrameSize); //if (fFrameSize > 0) // FramedSource::afterGetting(this); } virtual void doStopGettingFrames() { FramedSource::doStopGettingFrames(); } }; struct PL_RTSPServer_Internal { //uint8_t buffer[1920*1080*4]; //size_t buffSize; //size_t buffSizeMax; bool payError; pthread_t live_daemon_thid; pthread_mutex_t* frame_mutex; bool live_daemon_running; UsageEnvironment* env; // To make the second and subsequent client for each stream reuse the same // input stream as the first client (rather than playing the file from the // start for each client), change the following "False" to "True": Boolean reuseFirstSource; // To stream *only* MPEG-1 or 2 video "I" frames // (e.g., to reduce network bandwidth), // change the following "False" to "True": Boolean iFramesOnly; UserAuthenticationDatabase* authDB; RTSPServer* rtspServer;//#todo delete char descriptionString[1024]; PL_RTSPServer_Internal() : //buffSize(0), buffSizeMax(sizeof(buffer)), payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false), env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr), rtspServer(nullptr); { pthread_mutex_init(frame_mutex, NULL); } ~PL_RTSPServer_Internal() { if (frame_mutex != nullptr) { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } } void reset() { //buffSize = 0; payError = true; if (frame_mutex != nullptr) { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } frame_mutex = new pthread_mutex_t; pthread_mutex_init(frame_mutex, NULL); live_daemon_thid = 0; live_daemon_running = false; env = nullptr; reuseFirstSource = False; iFramesOnly = False; authDB = nullptr; rtspServer = nullptr; strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\""); } }; PipeLineElem* create_PL_RTSPServer() { return new PL_RTSPServer; } PL_RTSPServer::PL_RTSPServer() : internal(new PL_RTSPServer_Internal) { } PL_RTSPServer::~PL_RTSPServer() { delete (PL_RTSPServer_Internal*)internal; internal= nullptr; } void* live_daemon_thd(void* arg) { RTSPClient_Internal* in = (RTSPClient_Internal*)arg; // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); in->env = BasicUsageEnvironment::createNew(*scheduler); #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: in->authDB = new UserAuthenticationDatabase; in->authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each , that you wish to allow // access to the server. #endif // Create the RTSP server: in->rtspServer = RTSPServer::createNew(*env, 8554, authDB); if (rtspServer == NULL) { *(in->env) << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; return; } // Set up each of the possible streams that can be served by the // RTSP server. Each such stream is implemented using a // "ServerMediaSession" object, plus one or more // "ServerMediaSubsession" objects for each audio/video substream. char const* streamName = "plH264Encoder"; ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString); sms->addSubsession(MyH264FramedSource::createNew(*(in->env), in)); in->rtspServer->addServerMediaSession(sms); // announceStream char* url = rtspServer->rtspURL(sms); *(in->env) << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n"; *(in->env) << "Play this stream using the URL \"" << url << "\"\n"; delete[] url; // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port numbers (8000 and 8080). if (rtspServer->setUpTunnelingOverHTTP(80)) *(in->env) << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n"; else *(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n"; in->live_daemon_running = true; env->taskScheduler().doEventLoop(); // does not return in->live_daemon_running = false; } bool PL_RTSPServer::init(void* args) { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; in->reset(); int ret = pthread_mutex_lock(in->frame_mutex); if(ret != 0) { printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret)); return false; } ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in); if(ret != 0) { printf("pthread_create: %s/n", strerror(ret)); return false; } return true; } void PL_RTSPServer::finit() { RTSPClient_Internal* in = (RTSPClient_Internal*)internal; pthread_join(in->live_daemon_thid, NULL); } bool PL_RTSPServer::pay(const PipeMaterial& pm) { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; return true; } bool PL_RTSPServer::gain(PipeMaterial& pm) { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; pm.buffer = nullptr; pm.buffSize = 0; pm.former = this; return true; }