| | |
| | | #include "PL_RTSPServer.h"
|
| | | #include "MaterialBuffer.h"
|
| | | #include "logger.h"
|
| | |
|
| | | #include <liveMedia.hh>
|
| | | #include <BasicUsageEnvironment.hh>
|
| | | #include <liveMedia/liveMedia.hh>
|
| | | #include <BasicUsageEnvironment/BasicUsageEnvironment.hh>
|
| | |
|
| | | class MyH264FramedSource;
|
| | | #include "FFmpegRTSPServer/IEncoder.h"
|
| | | #include "FFmpegRTSPServer/LiveRTSPServer.h"
|
| | | #include "FFmpegRTSPServer/FFmpegH264Source.h"
|
| | | #include "FFmpegRTSPServer/LiveServerMediaSubsession.h"
|
| | |
|
| | | class MyEncoderStub;
|
| | |
|
| | | struct RTSPServer_Internal
|
| | | {
|
| | | uint8_t* buffer;
|
| | | uint8_t buffer[1920*1080*3];
|
| | | size_t buffSize;
|
| | | size_t buffSizeMax;
|
| | | |
| | | RTSPServerConfig config;
|
| | |
|
| | | bool payError;
|
| | | pthread_t live_daemon_thid;
|
| | | pthread_mutex_t* frame_mutex;
|
| | | bool live_daemon_running;
|
| | |
|
| | | UsageEnvironment* env;
|
| | | MESAI::LiveRTSPServer * server;
|
| | | MyEncoderStub * encoderStub;
|
| | |
|
| | | // To make the second and subsequent client for each stream reuse the same
|
| | | // input stream as the first client (rather than playing the file from the
|
| | | // start for each client), change the following "False" to "True":
|
| | | Boolean reuseFirstSource;
|
| | |
|
| | | // To stream *only* MPEG-1 or 2 video "I" frames
|
| | | // (e.g., to reduce network bandwidth),
|
| | | // change the following "False" to "True":
|
| | | Boolean iFramesOnly;
|
| | | |
| | | UserAuthenticationDatabase* authDB;
|
| | |
|
| | | RTSPServer* rtspServer;//#todo delete
|
| | | |
| | | char descriptionString[1024];
|
| | | |
| | | MyH264FramedSource* pMyH264FramedSource;
|
| | | |
| | | RTSPServer_Internal() :
|
| | | buffer(nullptr), buffSize(0), |
| | | payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false), |
| | | env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr), |
| | | rtspServer(nullptr)
|
| | | buffSize(0), buffSizeMax(sizeof(buffer)), config(), |
| | | payError(true), live_daemon_thid(0), frame_mutex(new pthread_mutex_t), live_daemon_running(false), |
| | | server(nullptr), encoderStub(nullptr)
|
| | | {
|
| | | pthread_mutex_init(frame_mutex, NULL);
|
| | | }
|
| | |
| | |
|
| | | void reset()
|
| | | {
|
| | | buffer = nullptr;
|
| | | buffSize = 0;
|
| | | |
| | | RTSPServerConfig _config;
|
| | | config =_config;
|
| | |
|
| | | payError = true;
|
| | |
|
| | |
| | | live_daemon_thid = 0;
|
| | | live_daemon_running = false;
|
| | |
|
| | | env = nullptr;
|
| | | reuseFirstSource = False;
|
| | | iFramesOnly = False;
|
| | | authDB = nullptr; |
| | | rtspServer = nullptr;
|
| | | |
| | | strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\"");
|
| | | |
| | | pMyH264FramedSource = nullptr;
|
| | | server = nullptr;
|
| | | encoderStub = nullptr;
|
| | | }
|
| | | };
|
| | |
|
| | |
|
| | | class MyH264FramedSource : public FramedSource
|
| | | class MyEncoderStub : public MESAI::IEncoder
|
| | | {
|
| | | public:
|
| | | static MyH264FramedSource* createNew(UsageEnvironment& _env, RTSPServer_Internal& _in)
|
| | | {
|
| | | return new MyH264FramedSource(_env, _in);
|
| | | }
|
| | | |
| | | // deliver frame to the sink
|
| | | bool deliverFrame()
|
| | | {
|
| | | int ret = false;
|
| | | if (isCurrentlyAwaitingData()) |
| | | {
|
| | | fDurationInMicroseconds = 0;
|
| | | fFrameSize = 0;
|
| | | |
| | | if (in.buffSize > fMaxSize) |
| | | {
|
| | | fFrameSize = fMaxSize;
|
| | | fNumTruncatedBytes = in.buffSize - fMaxSize;
|
| | | } |
| | | else |
| | | {
|
| | | fFrameSize = in.buffSize;
|
| | | }
|
| | | |
| | | if (fFrameSize > 0)
|
| | | {
|
| | | memcpy(fTo, in.buffer, fFrameSize);
|
| | | |
| | | int ret = pthread_mutex_unlock(in.frame_mutex);
|
| | | if(ret != 0)
|
| | | {
|
| | | printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
|
| | | return false;
|
| | | }
|
| | | |
| | | ret = true;
|
| | | }
|
| | | }
|
| | |
|
| | | return ret;
|
| | | }
|
| | |
|
| | | protected:
|
| | | MyH264FramedSource(UsageEnvironment& _env, RTSPServer_Internal& _in) : |
| | | FramedSource(_env), env(_env), in(_in)
|
| | | MyEncoderStub(RTSPServer_Internal& _in) : in(_in)
|
| | | {
|
| | | }
|
| | |
|
| | | virtual ~MyH264FramedSource()
|
| | | virtual ~MyEncoderStub()
|
| | | {
|
| | | }
|
| | |
|
| | | // overide FramedSource
|
| | | virtual void doGetNextFrame()
|
| | | |
| | | virtual void setCallbackFunctionFrameIsReady(std::function<void()> func)
|
| | | {
|
| | | printf("MyH264FramedSource::doGetNextFrame\n");
|
| | | |
| | | int ret = pthread_mutex_lock(in.frame_mutex);
|
| | | if(ret != 0)
|
| | | onFrame = func;
|
| | | }
|
| | | |
| | | virtual char GetFrame(u_int8_t** FrameBuffer, unsigned int *FrameSize)
|
| | | {
|
| | | if (in.buffer == nullptr || in.buffSize <= 0)
|
| | | {
|
| | | printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
|
| | | return;
|
| | | ReleaseFrame();
|
| | | return 0;
|
| | | }
|
| | |
|
| | | // deliverFrame
|
| | | //if (fFrameSize > 0)
|
| | | //{
|
| | | // send Frame to the consumer
|
| | | FramedSource::afterGetting(this); |
| | | //static size_t f = 0;
|
| | | //static FILE *pFile = fopen("/data/bb.264", "wb");
|
| | | //fwrite(in.buffer, sizeof(char), in.buffSize, pFile);
|
| | | //if (++f > 400){
|
| | | // fclose(pFile);
|
| | | // exit(0);
|
| | | //}
|
| | |
|
| | | uint8_t* pBuffer = in.buffer;
|
| | | size_t newBufferSize = in.buffSize;
|
| | |
|
| | | if (in.config.payWithAux)
|
| | | {
|
| | | if (newBufferSize <= 4)
|
| | | {
|
| | | ReleaseFrame();
|
| | | return 0;
|
| | | }
|
| | | pBuffer += 4;
|
| | | newBufferSize -= 4;
|
| | | }
|
| | |
|
| | | *FrameBuffer = pBuffer;
|
| | | *FrameSize = newBufferSize;
|
| | |
|
| | | LOG_DEBUG << "send frame size=" << in.buffSize << std::endl;
|
| | | return 1;
|
| | | }
|
| | |
|
| | | virtual void doStopGettingFrames()
|
| | | virtual char ReleaseFrame()
|
| | | {
|
| | | FramedSource::doStopGettingFrames();
|
| | | in.buffSize = 0;
|
| | | |
| | | if (in.config.syncDeliverFrame)
|
| | | {
|
| | | int ret = pthread_mutex_unlock(in.frame_mutex);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOG_WARN << "pthread_mutex_unlock frame_mutex: " << strerror(ret) << std::endl;
|
| | | return 0;
|
| | | }
|
| | | }
|
| | | |
| | | return 1;
|
| | | }
|
| | | |
| | |
|
| | | void deliverFrame()
|
| | | {
|
| | | // write frame buffer of RTSPServer_Internal::buffer
|
| | | onFrame();
|
| | |
|
| | | if (in.config.syncDeliverFrame)
|
| | | {
|
| | | int ret = pthread_mutex_lock(in.frame_mutex);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOG_WARN << "pthread_mutex_lock frame_mutex: " << strerror(ret) << std::endl;
|
| | | return;
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | private:
|
| | | UsageEnvironment& env;
|
| | | RTSPServer_Internal& in;
|
| | | std::function<void()> onFrame;
|
| | | };
|
| | |
|
| | | PipeLineElem* create_PL_RTSPServer()
|
| | |
| | | PL_RTSPServer::~PL_RTSPServer()
|
| | | {
|
| | | delete (RTSPServer_Internal*)internal;
|
| | | internal= nullptr;
|
| | | internal = nullptr;
|
| | | }
|
| | |
|
| | | void* live_daemon_thd(void* arg)
|
| | | static void* live_daemon_thd(void* arg)
|
| | | {
|
| | | RTSPServer_Internal* in = (RTSPServer_Internal*)arg;
|
| | |
|
| | | MyEncoderStub encoder(*in);
|
| | | in->encoderStub = &encoder;
|
| | | in->server = new MESAI::LiveRTSPServer(&encoder, 8554, 8080);
|
| | |
|
| | | // Begin by setting up our usage environment:
|
| | | TaskScheduler* scheduler = BasicTaskScheduler::createNew();
|
| | | in->env = BasicUsageEnvironment::createNew(*scheduler);
|
| | |
|
| | | #ifdef ACCESS_CONTROL
|
| | | // To implement client access control to the RTSP server, do the following:
|
| | | in->authDB = new UserAuthenticationDatabase;
|
| | | in->authDB->addUserRecord("username1", "password1"); // replace these with real strings
|
| | | // Repeat the above with each <username>, <password> that you wish to allow
|
| | | // access to the server.
|
| | | #endif
|
| | |
|
| | | // Create the RTSP server:
|
| | | in->rtspServer = RTSPServer::createNew(*(in->env), 8554, in->authDB);
|
| | | if (in->rtspServer == NULL)
|
| | | {
|
| | | *(in->env) << "Failed to create RTSP server: " << in->env->getResultMsg() << "\n";
|
| | | return nullptr;
|
| | | }
|
| | |
|
| | | // Set up each of the possible streams that can be served by the
|
| | | // RTSP server. Each such stream is implemented using a
|
| | | // "ServerMediaSession" object, plus one or more
|
| | | // "ServerMediaSubsession" objects for each audio/video substream.
|
| | | |
| | | char const* streamName = "plH264Encoder";
|
| | | ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString);
|
| | | in->pMyH264FramedSource = MyH264FramedSource::createNew(*(in->env), *in);
|
| | | sms->addSubsession(in->pMyH264FramedSource);
|
| | | in->rtspServer->addServerMediaSession(sms);
|
| | | |
| | | // announceStream
|
| | | char* url = in->rtspServer->rtspURL(sms);
|
| | | *(in->env) << "Play this stream using the URL " << url << "\n";
|
| | | delete[] url;
|
| | | |
| | | // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
|
| | | // Try first with the default HTTP port (80), and then with the alternative HTTP
|
| | | // port numbers (8000 and 8080).
|
| | |
|
| | | if (in->rtspServer->setUpTunnelingOverHTTP(80))
|
| | | *(in->env) << "\n(We use port " << in->rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
|
| | | else
|
| | | *(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n";
|
| | |
|
| | | in->live_daemon_running = true;
|
| | | in->env->taskScheduler().doEventLoop(); // does not return
|
| | | in->server->run(); // does not return
|
| | | in->encoderStub = nullptr;
|
| | | in->live_daemon_running = false;
|
| | | }
|
| | |
|
| | |
| | | {
|
| | | RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
|
| | | in->reset();
|
| | | |
| | | int ret = pthread_mutex_lock(in->frame_mutex);
|
| | | if(ret != 0)
|
| | |
|
| | | if (args)
|
| | | {
|
| | | printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
|
| | | return false;
|
| | | RTSPServerConfig* config = (RTSPServerConfig*)args;
|
| | | in->config = *config;
|
| | | }
|
| | | |
| | | ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
|
| | |
|
| | | int ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
|
| | | if(ret != 0)
|
| | | {
|
| | | printf("pthread_create: %s/n", strerror(ret));
|
| | | LOG_ERROR << "pthread_create: " << strerror(ret) << std::endl;
|
| | | return false;
|
| | | }
|
| | |
|
| | |
| | | {
|
| | | RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
|
| | |
|
| | | in->buffer = pm.buffer;
|
| | | in->buffSize = pm.buffSize;
|
| | | if (pm.buffer == nullptr)
|
| | | return false;
|
| | |
|
| | | return in->pMyH264FramedSource->deliverFrame();
|
| | | if (pm.type != PipeMaterial::PMT_FRAME)
|
| | | {
|
| | | LOG_ERROR << "PL_RTSPServer::pay only support PMT_FRAME" << std::endl;
|
| | | return false;
|
| | | }
|
| | | |
| | | if (in->buffSize > 0)
|
| | | LOG_WARN << "PL_RTSPServer::pay may lost data size=" << in->buffSize << std::endl;
|
| | | |
| | | MB_Frame* frame = (MB_Frame*)pm.buffer;
|
| | | if (frame->buffer == nullptr)
|
| | | return false;
|
| | | memcpy(in->buffer, frame->buffer, frame->buffSize);
|
| | | in->buffSize = frame->buffSize;
|
| | |
|
| | | if (in->encoderStub == nullptr)
|
| | | return false;
|
| | | |
| | | in->encoderStub->deliverFrame();
|
| | | return true;
|
| | | }
|
| | |
|
| | | bool PL_RTSPServer::gain(PipeMaterial& pm)
|
| | | {
|
| | | RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
|
| | |
|
| | | pm.type = PipeMaterial::PMT_NONE;
|
| | | pm.buffer = nullptr;
|
| | | pm.buffSize = 0;
|
| | | pm.former = this;
|