From cc445067d1f61e12dbea4e6458f2c85ba58f01bf Mon Sep 17 00:00:00 2001 From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674> Date: 星期五, 30 十二月 2016 14:28:14 +0800 Subject: [PATCH] fix config, fix some log and todo --- RtspFace/live555/testProgs/testRTSPClient.hpp | 702 +++++++++++++++++++++++++++++++-------------------------- 1 files changed, 379 insertions(+), 323 deletions(-) diff --git a/RtspFace/live555/testProgs/testRTSPClient.hpp b/RtspFace/live555/testProgs/testRTSPClient.hpp index 8f33f1a..ff4a861 100644 --- a/RtspFace/live555/testProgs/testRTSPClient.hpp +++ b/RtspFace/live555/testProgs/testRTSPClient.hpp @@ -33,10 +33,11 @@ // Even though we're not going to be doing anything with the incoming data, we still need to receive it. // Define the size of the buffer that we'll use: -#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 1920*1080*3 +#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 1920*1080*3//#todo // If you don't want to see debugging output for each received frame, then comment out the following line: -#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1 +//#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1 +//#define DEBUG_PRINT_NPT 1 // Forward function definitions: @@ -49,10 +50,10 @@ void subsessionAfterPlaying(void* clientData); // called when a stream's subsession (e.g., audio or video substream) ends void subsessionByeHandler(void* clientData); // called when a RTCP "BYE" is received for a subsession void streamTimerHandler(void* clientData); - // called at the end of a stream's expected duration (if the stream has not already signaled its end using a RTCP "BYE") +// called at the end of a stream's expected duration (if the stream has not already signaled its end using a RTCP "BYE") // The main streaming routine (for each "rtsp://" URL): -void openURL(UsageEnvironment& env, const RTSPConfig& _rtspConfig); +void openURL(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig); // Used to iterate through each stream's 'subsessions', setting up each one: void setupNextSubsession(RTSPClient* rtspClient); @@ -61,75 +62,82 @@ void shutdownStream(RTSPClient* rtspClient, int exitCode = 1); // A function that outputs a string that identifies each stream (for debugging output). Modify this if you wish: -UsageEnvironment& operator<<(UsageEnvironment& env, const RTSPClient& rtspClient) { - return env << "[URL:\"" << rtspClient.url() << "\"]: "; +log4cpp::CategoryStream& operator<<(log4cpp::CategoryStream& logRoot, const RTSPClient& rtspClient) +{ + return logRoot << "[URL:\"" << rtspClient.url() << "\"]: "; } // A function that outputs a string that identifies each subsession (for debugging output). Modify this if you wish: -UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) { - return env << subsession.mediumName() << "/" << subsession.codecName(); +log4cpp::CategoryStream& operator<<(log4cpp::CategoryStream& logRoot, const MediaSubsession& subsession) +{ + return logRoot << subsession.mediumName() << "/" << subsession.codecName(); } -void usage(UsageEnvironment& env, char const* progName) { - env << "Usage: " << progName << " <rtsp-url-1> ... <rtsp-url-N>\n"; - env << "\t(where each <rtsp-url-i> is a \"rtsp://\" URL)\n"; +void usage(UsageEnvironment& env, char const* progName) +{ + LOG_DEBUG << "Usage: " << progName << " <rtsp-url-1> ... <rtsp-url-N>"; + LOG_DEBUG << "\t(where each <rtsp-url-i> is a \"rtsp://\" URL)"; } char eventLoopWatchVariable = 0; -int test_main(int argc, char** argv) { - // Begin by setting up our usage environment: - TaskScheduler* scheduler = BasicTaskScheduler::createNew(); - UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); +int test_main(int argc, char** argv) +{ + // Begin by setting up our usage environment: + TaskScheduler* scheduler = BasicTaskScheduler::createNew(); + UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); - // We need at least one "rtsp://" URL argument: - if (argc < 2) { - usage(*env, argv[0]); - return 1; - } + // We need at least one "rtsp://" URL argument: + if (argc < 2) + { + usage(*env, argv[0]); + return 1; + } - RTSPConfig rtspConfig; + PL_RTSPClient_Config rtspConfig; rtspConfig.progName = argv[0]; rtspConfig.rtspURL = ""; rtspConfig.aux = false; rtspConfig.verbosityLevel = RTSP_CLIENT_VERBOSITY_LEVEL; rtspConfig.tunnelOverHTTPPortNum = 0; rtspConfig.args = nullptr; - - // There are argc-1 URLs: argv[1] through argv[argc-1]. Open and start streaming each one: - for (int i = 1; i <= argc-1; ++i) { - rtspConfig.rtspURL = argv[i]; - openURL(*env, rtspConfig); - } - // All subsequent activity takes place within the event loop: - env->taskScheduler().doEventLoop(&eventLoopWatchVariable); - // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. + // There are argc-1 URLs: argv[1] through argv[argc-1]. Open and start streaming each one: + for (int i = 1; i <= argc-1; ++i) + { + rtspConfig.rtspURL = argv[i]; + openURL(*env, rtspConfig); + } - return 0; + // All subsequent activity takes place within the event loop: + env->taskScheduler().doEventLoop(&eventLoopWatchVariable); + // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. - // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), - // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, - // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: - /* - env->reclaim(); env = NULL; - delete scheduler; scheduler = NULL; - */ + return 0; + + // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), + // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, + // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: + /* + env->reclaim(); env = NULL; + delete scheduler; scheduler = NULL; + */ } // Define a class to hold per-stream state that we maintain throughout each stream's lifetime: -class StreamClientState { +class StreamClientState +{ public: - StreamClientState(); - virtual ~StreamClientState(); + StreamClientState(); + virtual ~StreamClientState(); public: - MediaSubsessionIterator* iter; - MediaSession* session; - MediaSubsession* subsession; - TaskToken streamTimerTask; - double duration; + MediaSubsessionIterator* iter; + MediaSession* session; + MediaSubsession* subsession; + TaskToken streamTimerTask; + double duration; }; // If you're streaming just a single stream (i.e., just from a single URL, once), then you can define and use just a single @@ -137,18 +145,19 @@ // showing how to play multiple streams, concurrently, we can't do that. Instead, we have to have a separate "StreamClientState" // structure for each "RTSPClient". To do this, we subclass "RTSPClient", and add a "StreamClientState" field to the subclass: -class ourRTSPClient: public RTSPClient { +class ourRTSPClient: public RTSPClient +{ public: - static ourRTSPClient* createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig); + static ourRTSPClient* createNew(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig); protected: - ourRTSPClient(UsageEnvironment& env, const RTSPConfig& _rtspConfig); - // called only by createNew(); - virtual ~ourRTSPClient(); + ourRTSPClient(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig); + // called only by createNew(); + virtual ~ourRTSPClient(); public: - StreamClientState scs; - const RTSPConfig& rtspConfig; + StreamClientState scs; + const PL_RTSPClient_Config& rtspConfig; }; // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream'). @@ -159,25 +168,25 @@ class DummySink: public MediaSink { public: - static DummySink* createNew(UsageEnvironment& env, - const RTSPConfig& _rtspConfig, - MediaSubsession& subsession, // identifies the kind of data that's being received - char const* streamId = NULL); // identifies the stream itself (optional) + static DummySink* createNew(UsageEnvironment& env, + const PL_RTSPClient_Config& _rtspConfig, + MediaSubsession& subsession, // identifies the kind of data that's being received + char const* streamId = NULL); // identifies the stream itself (optional) private: - DummySink(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId); + DummySink(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig, MediaSubsession& subsession, char const* streamId); // called only by "createNew()" virtual ~DummySink(); static void afterGettingFrame(void* clientData, unsigned frameSize, - unsigned numTruncatedBytes, - struct timeval presentationTime, - unsigned durationInMicroseconds); + unsigned numTruncatedBytes, + struct timeval presentationTime, + unsigned durationInMicroseconds); void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, - struct timeval presentationTime, unsigned durationInMicroseconds); + struct timeval presentationTime, unsigned durationInMicroseconds); public: - const RTSPConfig& rtspConfig; + const PL_RTSPClient_Config& rtspConfig; private: // redefined virtual functions: @@ -191,23 +200,23 @@ static unsigned rtspClientCount = 0; // Counts how many streams (i.e., "RTSPClient"s) are currently in use. -void openURL(UsageEnvironment& env, const RTSPConfig& _rtspConfig) +void openURL(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig) { // Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish // to receive (even if more than stream uses the same "rtsp://" URL). RTSPClient* rtspClient = ourRTSPClient::createNew(env, _rtspConfig); if (rtspClient == NULL) - { - env << "Failed to create a RTSP client for URL \"" << _rtspConfig.rtspURL.c_str() << "\": " << env.getResultMsg() << "\n"; - return; - } + { + LOG_ERROR << "Failed to create a RTSP client for URL \"" << _rtspConfig.rtspURL.c_str() << "\": " << env.getResultMsg(); + return; + } ++rtspClientCount; // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream. // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response. // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop: - rtspClient->sendDescribeCommand(continueAfterDESCRIBE); + rtspClient->sendDescribeCommand(continueAfterDESCRIBE); } @@ -216,41 +225,42 @@ void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString) { do - { - UsageEnvironment& env = rtspClient->envir(); // alias - StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - - if (resultCode != 0) { - env << *rtspClient << "Failed to get a SDP description: " << resultString << "\n"; - delete[] resultString; - break; - } + UsageEnvironment& env = rtspClient->envir(); // alias + StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - char* const sdpDescription = resultString; - env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n"; + if (resultCode != 0) + { + LOG_WARN << *rtspClient << "Failed to get a SDP description: " << resultString; + delete[] resultString; + break; + } - // Create a media session object from this SDP description: - scs.session = MediaSession::createNew(env, sdpDescription); - delete[] sdpDescription; // because we don't need it anymore - if (scs.session == NULL) - { - env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n"; - break; - } - else if (!scs.session->hasSubsessions()) - { - env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n"; - break; - } + char* const sdpDescription = resultString; + LOG_INFO << *rtspClient << "Got a SDP description:\n" << sdpDescription; - // Then, create and set up our data source objects for the session. We do this by iterating over the session's 'subsessions', - // calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one. - // (Each 'subsession' will have its own data source.) - scs.iter = new MediaSubsessionIterator(*scs.session); - setupNextSubsession(rtspClient); - return; - } while (0); + // Create a media session object from this SDP description: + scs.session = MediaSession::createNew(env, sdpDescription); + delete[] sdpDescription; // because we don't need it anymore + if (scs.session == NULL) + { + LOG_ERROR << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg(); + break; + } + else if (!scs.session->hasSubsessions()) + { + LOG_WARN << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)"; + break; + } + + // Then, create and set up our data source objects for the session. We do this by iterating over the session's 'subsessions', + // calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one. + // (Each 'subsession' will have its own data source.) + scs.iter = new MediaSubsessionIterator(*scs.session); + setupNextSubsession(rtspClient); + return; + } + while (0); // An unrecoverable error occurred with this stream. shutdownStream(rtspClient); @@ -258,319 +268,365 @@ void setupNextSubsession(RTSPClient* rtspClient) { - UsageEnvironment& env = rtspClient->envir(); // alias - StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - - scs.subsession = scs.iter->next(); - if (scs.subsession != NULL) { - if (!scs.subsession->initiate()) { - env << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n"; - setupNextSubsession(rtspClient); // give up on this subsession; go to the next one - } else { - env << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession ("; - if (scs.subsession->rtcpIsMuxed()) { - env << "client port " << scs.subsession->clientPortNum(); - } else { - env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; - } - env << ")\n"; + UsageEnvironment& env = rtspClient->envir(); // alias + StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - // Continue setting up this subsession, by sending a RTSP "SETUP" command: - rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP); - } - return; - } + scs.subsession = scs.iter->next(); + if (scs.subsession != NULL) + { + if (!scs.subsession->initiate()) + { + LOG_ERROR << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg(); + setupNextSubsession(rtspClient); // give up on this subsession; go to the next one + } + else + { + LOG_INFO << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession ("; + if (scs.subsession->rtcpIsMuxed()) + LOG_INFO << "client port " << scs.subsession->clientPortNum(); + else + LOG_INFO << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; + LOG_INFO << ")"; - // We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming: - if (scs.session->absStartTime() != NULL) { - // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command: - rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime()); - } else { - scs.duration = scs.session->playEndTime() - scs.session->playStartTime(); - rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY); - } + // Continue setting up this subsession, by sending a RTSP "SETUP" command: + rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP); + } + return; + } + + // We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming: + if (scs.session->absStartTime() != NULL) + { + // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command: + rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime()); + } + else + { + scs.duration = scs.session->playEndTime() - scs.session->playStartTime(); + rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY); + } } -void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) { - do { - UsageEnvironment& env = rtspClient->envir(); // alias - StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias +void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) +{ + do + { + UsageEnvironment& env = rtspClient->envir(); // alias + StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - if (resultCode != 0) { - env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n"; - break; - } + if (resultCode != 0) + { + LOG_ERROR << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString; + break; + } - env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession ("; - if (scs.subsession->rtcpIsMuxed()) { - env << "client port " << scs.subsession->clientPortNum(); - } else { - env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; - } - env << ")\n"; + LOG_INFO << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession ("; + if (scs.subsession->rtcpIsMuxed()) + { + LOG_INFO << "client port " << scs.subsession->clientPortNum(); + } + else + { + LOG_INFO << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1; + } + LOG_INFO << ")"; - // Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it. - // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later, - // after we've sent a RTSP "PLAY" command.) + // Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it. + // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later, + // after we've sent a RTSP "PLAY" command.) - scs.subsession->sink = DummySink::createNew(env, ((ourRTSPClient*)rtspClient)->rtspConfig, - *scs.subsession, rtspClient->url()); - // perhaps use your own custom "MediaSink" subclass instead - if (scs.subsession->sink == NULL) { - env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession - << "\" subsession: " << env.getResultMsg() << "\n"; - break; - } + scs.subsession->sink = DummySink::createNew(env, ((ourRTSPClient*)rtspClient)->rtspConfig, + *scs.subsession, rtspClient->url()); + // perhaps use your own custom "MediaSink" subclass instead + if (scs.subsession->sink == NULL) + { + LOG_ERROR << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession + << "\" subsession: " << env.getResultMsg(); + break; + } - env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n"; - scs.subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession - scs.subsession->sink->startPlaying(*(scs.subsession->readSource()), - subsessionAfterPlaying, scs.subsession); - // Also set a handler to be called if a RTCP "BYE" arrives for this subsession: - if (scs.subsession->rtcpInstance() != NULL) { - scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession); - } - } while (0); - delete[] resultString; + LOG_INFO << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession"; + scs.subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession + scs.subsession->sink->startPlaying(*(scs.subsession->readSource()), + subsessionAfterPlaying, scs.subsession); + // Also set a handler to be called if a RTCP "BYE" arrives for this subsession: + if (scs.subsession->rtcpInstance() != NULL) + { + scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession); + } + } + while (0); + delete[] resultString; - // Set up the next subsession, if any: - setupNextSubsession(rtspClient); + // Set up the next subsession, if any: + setupNextSubsession(rtspClient); } -void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString) { - Boolean success = False; +void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString) +{ + Boolean success = False; - do { - UsageEnvironment& env = rtspClient->envir(); // alias - StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias + do + { + UsageEnvironment& env = rtspClient->envir(); // alias + StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - if (resultCode != 0) { - env << *rtspClient << "Failed to start playing session: " << resultString << "\n"; - break; - } + if (resultCode != 0) + { + LOG_ERROR << *rtspClient << "Failed to start playing session: " << resultString; + break; + } - // Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end - // using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later - // 'seek' back within it and do another RTSP "PLAY" - then you can omit this code. - // (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.) - if (scs.duration > 0) { - unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration. (This is optional.) - scs.duration += delaySlop; - unsigned uSecsToDelay = (unsigned)(scs.duration*1000000); - scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient); - } + // Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end + // using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later + // 'seek' back within it and do another RTSP "PLAY" - then you can omit this code. + // (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.) + if (scs.duration > 0) + { + unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration. (This is optional.) + scs.duration += delaySlop; + unsigned uSecsToDelay = (unsigned)(scs.duration*1000000); + scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient); + } - env << *rtspClient << "Started playing session"; - if (scs.duration > 0) { - env << " (for up to " << scs.duration << " seconds)"; - } - env << "...\n"; + LOG_INFO << *rtspClient << "Started playing session"; + if (scs.duration > 0) + { + LOG_INFO << " (for up to " << scs.duration << " seconds)"; + } + LOG_INFO << "..."; - success = True; - } while (0); - delete[] resultString; + success = True; + } + while (0); + delete[] resultString; - if (!success) { - // An unrecoverable error occurred with this stream. - shutdownStream(rtspClient); - } + if (!success) + { + // An unrecoverable error occurred with this stream. + shutdownStream(rtspClient); + } } // Implementation of the other event handlers: -void subsessionAfterPlaying(void* clientData) { - MediaSubsession* subsession = (MediaSubsession*)clientData; - RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr); +void subsessionAfterPlaying(void* clientData) +{ + MediaSubsession* subsession = (MediaSubsession*)clientData; + RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr); - // Begin by closing this subsession's stream: - Medium::close(subsession->sink); - subsession->sink = NULL; - - // Next, check whether *all* subsessions' streams have now been closed: - MediaSession& session = subsession->parentSession(); - MediaSubsessionIterator iter(session); - while ((subsession = iter.next()) != NULL) { - if (subsession->sink != NULL) return; // this subsession is still active - } - - // All subsessions' streams have now been closed, so shutdown the client: - shutdownStream(rtspClient); -} - -void subsessionByeHandler(void* clientData) { - MediaSubsession* subsession = (MediaSubsession*)clientData; - RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr; - UsageEnvironment& env = rtspClient->envir(); // alias - - env << *rtspClient << "Received RTCP \"BYE\" on \"" << *subsession << "\" subsession\n"; - - // Now act as if the subsession had closed: - subsessionAfterPlaying(subsession); -} - -void streamTimerHandler(void* clientData) { - ourRTSPClient* rtspClient = (ourRTSPClient*)clientData; - StreamClientState& scs = rtspClient->scs; // alias - - scs.streamTimerTask = NULL; - - // Shut down the stream: - shutdownStream(rtspClient); -} - -void shutdownStream(RTSPClient* rtspClient, int exitCode) { - UsageEnvironment& env = rtspClient->envir(); // alias - StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias - - // First, check whether any subsessions have still to be closed: - if (scs.session != NULL) { - Boolean someSubsessionsWereActive = False; - MediaSubsessionIterator iter(*scs.session); - MediaSubsession* subsession; - - while ((subsession = iter.next()) != NULL) { - if (subsession->sink != NULL) { + // Begin by closing this subsession's stream: Medium::close(subsession->sink); subsession->sink = NULL; - if (subsession->rtcpInstance() != NULL) { - subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN" - } + // Next, check whether *all* subsessions' streams have now been closed: + MediaSession& session = subsession->parentSession(); + MediaSubsessionIterator iter(session); + while ((subsession = iter.next()) != NULL) + { + if (subsession->sink != NULL) return; // this subsession is still active + } - someSubsessionsWereActive = True; - } - } + // All subsessions' streams have now been closed, so shutdown the client: + shutdownStream(rtspClient); +} - if (someSubsessionsWereActive) { - // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream. - // Don't bother handling the response to the "TEARDOWN". - rtspClient->sendTeardownCommand(*scs.session, NULL); - } - } +void subsessionByeHandler(void* clientData) +{ + MediaSubsession* subsession = (MediaSubsession*)clientData; + RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr; + UsageEnvironment& env = rtspClient->envir(); // alias - env << *rtspClient << "Closing the stream.\n"; - Medium::close(rtspClient); - // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed. + LOG_INFO << *rtspClient << "Received RTCP \"BYE\" on \"" << *subsession << "\" subsession"; - if (--rtspClientCount == 0) { - // The final stream has ended, so exit the application now. - // (Of course, if you're embedding this code into your own application, you might want to comment this out, - // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".) - exit(exitCode); - } + // Now act as if the subsession had closed: + subsessionAfterPlaying(subsession); +} + +void streamTimerHandler(void* clientData) +{ + ourRTSPClient* rtspClient = (ourRTSPClient*)clientData; + StreamClientState& scs = rtspClient->scs; // alias + + scs.streamTimerTask = NULL; + + // Shut down the stream: + shutdownStream(rtspClient); +} + +void shutdownStream(RTSPClient* rtspClient, int exitCode) +{ + UsageEnvironment& env = rtspClient->envir(); // alias + StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias + + // First, check whether any subsessions have still to be closed: + if (scs.session != NULL) + { + Boolean someSubsessionsWereActive = False; + MediaSubsessionIterator iter(*scs.session); + MediaSubsession* subsession; + + while ((subsession = iter.next()) != NULL) + { + if (subsession->sink != NULL) + { + Medium::close(subsession->sink); + subsession->sink = NULL; + + if (subsession->rtcpInstance() != NULL) + { + subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN" + } + + someSubsessionsWereActive = True; + } + } + + if (someSubsessionsWereActive) + { + // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream. + // Don't bother handling the response to the "TEARDOWN". + rtspClient->sendTeardownCommand(*scs.session, NULL); + } + } + + LOG_NOTICE << *rtspClient << "Closing the stream."; + Medium::close(rtspClient); + // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed. + + if (--rtspClientCount == 0) + { + // The final stream has ended, so exit the application now. + // (Of course, if you're embedding this code into your own application, you might want to comment this out, + // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".) + exit(exitCode); + } } // Implementation of "ourRTSPClient": -ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig) +ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig) { - return new ourRTSPClient(env, _rtspConfig); + return new ourRTSPClient(env, _rtspConfig); } -ourRTSPClient::ourRTSPClient(UsageEnvironment& env, const RTSPConfig& _rtspConfig) - : RTSPClient(env, _rtspConfig.rtspURL.c_str(), _rtspConfig.verbosityLevel, _rtspConfig.progName.c_str(), - _rtspConfig.tunnelOverHTTPPortNum, -1), rtspConfig(_rtspConfig) +ourRTSPClient::ourRTSPClient(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig) + : RTSPClient(env, _rtspConfig.rtspURL.c_str(), _rtspConfig.verbosityLevel, _rtspConfig.progName.c_str(), + _rtspConfig.tunnelOverHTTPPortNum, -1), rtspConfig(_rtspConfig) { } -ourRTSPClient::~ourRTSPClient() { +ourRTSPClient::~ourRTSPClient() +{ } // Implementation of "StreamClientState": StreamClientState::StreamClientState() - : iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0) { + : iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0) +{ } -StreamClientState::~StreamClientState() { - delete iter; - if (session != NULL) { - // We also need to delete "session", and unschedule "streamTimerTask" (if set) - UsageEnvironment& env = session->envir(); // alias +StreamClientState::~StreamClientState() +{ + delete iter; + if (session != NULL) + { + // We also need to delete "session", and unschedule "streamTimerTask" (if set) + UsageEnvironment& env = session->envir(); // alias - env.taskScheduler().unscheduleDelayedTask(streamTimerTask); - Medium::close(session); - } + env.taskScheduler().unscheduleDelayedTask(streamTimerTask); + Medium::close(session); + } } // Implementation of "DummySink": -DummySink* DummySink::createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId) +DummySink* DummySink::createNew(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig, MediaSubsession& subsession, char const* streamId) { - return new DummySink(env, _rtspConfig, subsession, streamId); + return new DummySink(env, _rtspConfig, subsession, streamId); } -DummySink::DummySink(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId) - : MediaSink(env), rtspConfig(_rtspConfig), fSubsession(subsession) +DummySink::DummySink(UsageEnvironment& env, const PL_RTSPClient_Config& _rtspConfig, MediaSubsession& subsession, char const* streamId) + : MediaSink(env), rtspConfig(_rtspConfig), fSubsession(subsession) { fStreamId = strDup(streamId); fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE]; // ffmpeg need AUX header if (rtspConfig.aux) - { - fReceiveBuffer[0]=0x00; fReceiveBuffer[1]=0x00; fReceiveBuffer[2]=0x00; fReceiveBuffer[3]=0x01; - } + { + fReceiveBuffer[0]=0x00; + fReceiveBuffer[1]=0x00; + fReceiveBuffer[2]=0x00; + fReceiveBuffer[3]=0x01; + } //parse sdp const char* strSDP = fSubsession.savedSDPLines(); rtsp_client_sdp_callback(rtspConfig.args, strSDP); - + const char* strFmtp = fSubsession.fmtp_spropparametersets(); rtsp_client_fmtp_callback(rtspConfig.args, strFmtp); //std::cout << strFmtp << std::endl; } -DummySink::~DummySink() { - delete[] fReceiveBuffer; - delete[] fStreamId; +DummySink::~DummySink() +{ + delete[] fReceiveBuffer; + delete[] fStreamId; } void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, - struct timeval presentationTime, unsigned durationInMicroseconds) + struct timeval presentationTime, unsigned durationInMicroseconds) { DummySink* sink = (DummySink*)clientData; if (frameSize > 0) - { - unsigned s = frameSize; - if (sink->rtspConfig.aux) - s += 4; - rtsp_client_frame_callback(sink->rtspConfig.args, sink->fReceiveBuffer, s, presentationTime); - } - + { + unsigned s = frameSize; + if (sink->rtspConfig.aux) + s += 4; + rtsp_client_frame_callback(sink->rtspConfig.args, sink->fReceiveBuffer, s, presentationTime); + } + sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); } void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, - struct timeval presentationTime, unsigned /*durationInMicroseconds*/) { - // We've just received a frame of data. (Optionally) print out information about it: + struct timeval presentationTime, unsigned /*durationInMicroseconds*/) +{ + // We've just received a frame of data. (Optionally) print out information about it: #ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME - if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; "; - envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes"; - if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)"; - char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time - sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); - envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr; - if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) { - envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized - } + if (fStreamId != NULL) + LOG_DEBUG << "Stream \"" << fStreamId << "\"; "; + LOG_DEBUG << "\t" << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes"; + if (numTruncatedBytes > 0) + LOG_DEBUG << " (with " << numTruncatedBytes << " bytes truncated)"; + + char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time + sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); + LOG_DEBUG << "\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr; + if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) + { + LOG_DEBUG << "\tPTS not RTCP-synchronized"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized + } #ifdef DEBUG_PRINT_NPT - envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime); + LOG_DEBUG << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime); #endif - envir() << "\n"; #endif - - // Then continue, to request the next frame of data: - continuePlaying(); + + // Then continue, to request the next frame of data: + continuePlaying(); } Boolean DummySink::continuePlaying() { - if (fSource == NULL) return False; // sanity check (should not happen) + if (fSource == NULL) + return False; // sanity check (should not happen) rtsp_client_continue_callback(rtspConfig.args); @@ -580,8 +636,8 @@ // Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives: fSource->getNextFrame(b, DUMMY_SINK_RECEIVE_BUFFER_SIZE, - afterGettingFrame, this, - onSourceClosure, this); + afterGettingFrame, this, + onSourceClosure, this); return True; } -- Gitblit v1.8.0