From e53992ed5cc1615ac99ac3ba2146de0175d3c770 Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期一, 26 十二月 2016 11:16:40 +0800
Subject: [PATCH] begin to remove MyH264FramedSource

---
 RtspFace/PL_RTSPServer.cpp |  192 ++++++++++++++++++++++++++++++-----------------
 1 files changed, 123 insertions(+), 69 deletions(-)

diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index 936b31d..5ea97c2 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -3,52 +3,12 @@
 #include <liveMedia.hh>
 #include <BasicUsageEnvironment.hh>
 
-class MyH264FramedSource : public FramedSource
+class MyH264FramedSource;
+
+struct RTSPServer_Internal
 {
-public:
-	static MyH264FramedSource* createNew(UsageEnvironment& env);
-
-protected:
-	MyH264FramedSource(UsageEnvironment& env)
-	virtual ~MyH264FramedSource()
-
-	// overide FramedSource
-	virtual void doGetNextFrame()
-	{
-		// deliverFrame
-		//if (fFrameSize > 0)
-		//{
-		//	// send Frame to the consumer
-		//	FramedSource::afterGetting(this);			
-		//}
-		
-		
-		// isCurrentlyAwaitingData
-			//if (frame->m_size > fMaxSize) 
-			//{
-			//	fFrameSize = fMaxSize;
-			//	fNumTruncatedBytes = frame->m_size - fMaxSize;
-			//} 
-			//else 
-			//{
-			//	fFrameSize = frame->m_size;
-			//}
-		//memcpy(fTo, frame->m_buffer, fFrameSize);
-		//if (fFrameSize > 0)
-		//	FramedSource::afterGetting(this);
-	}
-	
-	virtual void doStopGettingFrames()
-	{
-		FramedSource::doStopGettingFrames();
-	}
-};
-
-struct PL_RTSPServer_Internal
-{
-	//uint8_t buffer[1920*1080*4];
-	//size_t buffSize;
-	//size_t buffSizeMax;
+	uint8_t* buffer;
+	size_t buffSize;
 
 	bool payError;
 	pthread_t live_daemon_thid;
@@ -73,16 +33,18 @@
 	
 	char descriptionString[1024];
 	
-	PL_RTSPServer_Internal() : 
-		//buffSize(0), buffSizeMax(sizeof(buffer)), 
+	MyH264FramedSource* pMyH264FramedSource;
+	
+	RTSPServer_Internal() : 
+		buffer(nullptr), buffSize(0), 
 		payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false), 
 		env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr), 
-		rtspServer(nullptr);
+		rtspServer(nullptr)
 	{
 		pthread_mutex_init(frame_mutex, NULL);
 	}
 	
-	~PL_RTSPServer_Internal()
+	~RTSPServer_Internal()
 	{
 		if (frame_mutex != nullptr)
 		{
@@ -94,7 +56,8 @@
 	
 	void reset()
 	{
-		//buffSize = 0;
+		buffer = nullptr;
+		buffSize = 0;
 
 		payError = true;
 
@@ -118,7 +81,95 @@
 		rtspServer = nullptr;
 		
 		strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\"");
+		
+		pMyH264FramedSource = nullptr;
 	}
+};
+
+
+class MyH264FramedSource : public FramedSource
+{
+public:
+	static MyH264FramedSource* createNew(UsageEnvironment& _env, RTSPServer_Internal& _in)
+	{
+		return new MyH264FramedSource(_env, _in);
+	}
+	
+	// deliver frame to the sink
+	bool deliverFrame()
+	{
+		int ret = false;
+		if (isCurrentlyAwaitingData()) 
+		{
+			fDurationInMicroseconds = 0;
+			fFrameSize = 0;
+			
+			if (in.buffSize > fMaxSize) 
+			{
+				fFrameSize = fMaxSize;
+				fNumTruncatedBytes = in.buffSize - fMaxSize;
+			} 
+			else 
+			{
+				fFrameSize = in.buffSize;
+			}
+			
+			if (fFrameSize > 0)
+			{
+				memcpy(fTo, in.buffer, fFrameSize);
+				
+				int ret = pthread_mutex_unlock(in.frame_mutex);
+				if(ret != 0)
+				{
+					printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
+					return false;
+				}
+				
+				ret = true;
+			}
+		}
+
+		return ret;
+	}
+
+protected:
+	MyH264FramedSource(UsageEnvironment& _env, RTSPServer_Internal& _in) : 
+		FramedSource(_env), env(_env), in(_in)
+	{
+	}
+	
+	virtual ~MyH264FramedSource()
+	{
+	}
+
+	// overide FramedSource
+	virtual void doGetNextFrame()
+	{
+		printf("MyH264FramedSource::doGetNextFrame\n");
+		
+		int ret = pthread_mutex_lock(in.frame_mutex);
+		if(ret != 0)
+		{
+			printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
+			return;
+		}
+
+		// deliverFrame
+		//if (fFrameSize > 0)
+		//{
+			// send Frame to the consumer
+			FramedSource::afterGetting(this);			
+		//}
+	}
+	
+	virtual void doStopGettingFrames()
+	{
+		FramedSource::doStopGettingFrames();
+	}
+	
+private:
+	UsageEnvironment& env;
+	RTSPServer_Internal& in;
 };
 
 PipeLineElem* create_PL_RTSPServer()
@@ -126,19 +177,19 @@
 	return new PL_RTSPServer;
 }
 
-PL_RTSPServer::PL_RTSPServer() : internal(new PL_RTSPServer_Internal)
+PL_RTSPServer::PL_RTSPServer() : internal(new RTSPServer_Internal)
 {
 }
 
 PL_RTSPServer::~PL_RTSPServer()
 {
-	delete (PL_RTSPServer_Internal*)internal;
+	delete (RTSPServer_Internal*)internal;
 	internal= nullptr;
 }
 
 void* live_daemon_thd(void* arg)
 {
-	RTSPClient_Internal* in = (RTSPClient_Internal*)arg;
+	RTSPServer_Internal* in = (RTSPServer_Internal*)arg;
 	
 	// Begin by setting up our usage environment:
 	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
@@ -153,11 +204,11 @@
 #endif
 
 	// Create the RTSP server:
-	in->rtspServer = RTSPServer::createNew(*env, 8554, authDB);
-	if (rtspServer == NULL)
+	in->rtspServer = RTSPServer::createNew(*(in->env), 8554, in->authDB);
+	if (in->rtspServer == NULL)
 	{
-		*(in->env) << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
-		return;
+		*(in->env) << "Failed to create RTSP server: " << in->env->getResultMsg() << "\n";
+		return nullptr;
 	}
 
 	// Set up each of the possible streams that can be served by the
@@ -167,32 +218,32 @@
 	
     char const* streamName = "plH264Encoder";
     ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString);
-    sms->addSubsession(MyH264FramedSource::createNew(*(in->env), in));
+	in->pMyH264FramedSource = MyH264FramedSource::createNew(*(in->env), *in);
+    sms->addSubsession(in->pMyH264FramedSource);
     in->rtspServer->addServerMediaSession(sms);
 	
 	// announceStream
-	char* url = rtspServer->rtspURL(sms);
-	*(in->env) << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n";
-	*(in->env) << "Play this stream using the URL \"" << url << "\"\n";
+	char* url = in->rtspServer->rtspURL(sms);
+	*(in->env) << "Play this stream using the URL " << url << "\n";
 	delete[] url;
 	
 	// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
 	// Try first with the default HTTP port (80), and then with the alternative HTTP
 	// port numbers (8000 and 8080).
 
-	if (rtspServer->setUpTunnelingOverHTTP(80))
-		*(in->env) << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
+	if (in->rtspServer->setUpTunnelingOverHTTP(80))
+		*(in->env) << "\n(We use port " << in->rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
 	else
 		*(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n";
 
 	in->live_daemon_running = true;
-	env->taskScheduler().doEventLoop(); // does not return
+	in->env->taskScheduler().doEventLoop(); // does not return
 	in->live_daemon_running = false;
 }
 
 bool PL_RTSPServer::init(void* args)
 {
-	PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 	in->reset();
 	
 	int ret = pthread_mutex_lock(in->frame_mutex);
@@ -214,21 +265,24 @@
 
 void PL_RTSPServer::finit()
 {
-	RTSPClient_Internal* in = (RTSPClient_Internal*)internal;
+	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 
 	pthread_join(in->live_daemon_thid, NULL);
 }
 
 bool PL_RTSPServer::pay(const PipeMaterial& pm)
 {
-	PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 
-	return true;
+	in->buffer = pm.buffer;
+	in->buffSize = pm.buffSize;
+	
+	return in->pMyH264FramedSource->deliverFrame();
 }
 
 bool PL_RTSPServer::gain(PipeMaterial& pm)
 {
-	PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 
 	pm.buffer = nullptr;
 	pm.buffSize = 0;

--
Gitblit v1.8.0