From e53992ed5cc1615ac99ac3ba2146de0175d3c770 Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期一, 26 十二月 2016 11:16:40 +0800
Subject: [PATCH] begin to remove MyH264FramedSource
---
RtspFace/make.sh | 9 +
RtspFace/PL_RTSPServer.cpp | 192 ++++++++++++++++++++++++++++++-----------------
RtspFace/main.cpp | 9 +
3 files changed, 136 insertions(+), 74 deletions(-)
diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index 936b31d..5ea97c2 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -3,52 +3,12 @@
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
-class MyH264FramedSource : public FramedSource
+class MyH264FramedSource;
+
+struct RTSPServer_Internal
{
-public:
- static MyH264FramedSource* createNew(UsageEnvironment& env);
-
-protected:
- MyH264FramedSource(UsageEnvironment& env)
- virtual ~MyH264FramedSource()
-
- // overide FramedSource
- virtual void doGetNextFrame()
- {
- // deliverFrame
- //if (fFrameSize > 0)
- //{
- // // send Frame to the consumer
- // FramedSource::afterGetting(this);
- //}
-
-
- // isCurrentlyAwaitingData
- //if (frame->m_size > fMaxSize)
- //{
- // fFrameSize = fMaxSize;
- // fNumTruncatedBytes = frame->m_size - fMaxSize;
- //}
- //else
- //{
- // fFrameSize = frame->m_size;
- //}
- //memcpy(fTo, frame->m_buffer, fFrameSize);
- //if (fFrameSize > 0)
- // FramedSource::afterGetting(this);
- }
-
- virtual void doStopGettingFrames()
- {
- FramedSource::doStopGettingFrames();
- }
-};
-
-struct PL_RTSPServer_Internal
-{
- //uint8_t buffer[1920*1080*4];
- //size_t buffSize;
- //size_t buffSizeMax;
+ uint8_t* buffer;
+ size_t buffSize;
bool payError;
pthread_t live_daemon_thid;
@@ -73,16 +33,18 @@
char descriptionString[1024];
- PL_RTSPServer_Internal() :
- //buffSize(0), buffSizeMax(sizeof(buffer)),
+ MyH264FramedSource* pMyH264FramedSource;
+
+ RTSPServer_Internal() :
+ buffer(nullptr), buffSize(0),
payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false),
env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr),
- rtspServer(nullptr);
+ rtspServer(nullptr)
{
pthread_mutex_init(frame_mutex, NULL);
}
- ~PL_RTSPServer_Internal()
+ ~RTSPServer_Internal()
{
if (frame_mutex != nullptr)
{
@@ -94,7 +56,8 @@
void reset()
{
- //buffSize = 0;
+ buffer = nullptr;
+ buffSize = 0;
payError = true;
@@ -118,7 +81,95 @@
rtspServer = nullptr;
strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\"");
+
+ pMyH264FramedSource = nullptr;
}
+};
+
+
+class MyH264FramedSource : public FramedSource
+{
+public:
+ static MyH264FramedSource* createNew(UsageEnvironment& _env, RTSPServer_Internal& _in)
+ {
+ return new MyH264FramedSource(_env, _in);
+ }
+
+ // deliver frame to the sink
+ bool deliverFrame()
+ {
+ int ret = false;
+ if (isCurrentlyAwaitingData())
+ {
+ fDurationInMicroseconds = 0;
+ fFrameSize = 0;
+
+ if (in.buffSize > fMaxSize)
+ {
+ fFrameSize = fMaxSize;
+ fNumTruncatedBytes = in.buffSize - fMaxSize;
+ }
+ else
+ {
+ fFrameSize = in.buffSize;
+ }
+
+ if (fFrameSize > 0)
+ {
+ memcpy(fTo, in.buffer, fFrameSize);
+
+ int ret = pthread_mutex_unlock(in.frame_mutex);
+ if(ret != 0)
+ {
+ printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
+ return false;
+ }
+
+ ret = true;
+ }
+ }
+
+ return ret;
+ }
+
+protected:
+ MyH264FramedSource(UsageEnvironment& _env, RTSPServer_Internal& _in) :
+ FramedSource(_env), env(_env), in(_in)
+ {
+ }
+
+ virtual ~MyH264FramedSource()
+ {
+ }
+
+ // overide FramedSource
+ virtual void doGetNextFrame()
+ {
+ printf("MyH264FramedSource::doGetNextFrame\n");
+
+ int ret = pthread_mutex_lock(in.frame_mutex);
+ if(ret != 0)
+ {
+ printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
+ return;
+ }
+
+ // deliverFrame
+ //if (fFrameSize > 0)
+ //{
+ // send Frame to the consumer
+ FramedSource::afterGetting(this);
+ //}
+ }
+
+ virtual void doStopGettingFrames()
+ {
+ FramedSource::doStopGettingFrames();
+ }
+
+private:
+ UsageEnvironment& env;
+ RTSPServer_Internal& in;
};
PipeLineElem* create_PL_RTSPServer()
@@ -126,19 +177,19 @@
return new PL_RTSPServer;
}
-PL_RTSPServer::PL_RTSPServer() : internal(new PL_RTSPServer_Internal)
+PL_RTSPServer::PL_RTSPServer() : internal(new RTSPServer_Internal)
{
}
PL_RTSPServer::~PL_RTSPServer()
{
- delete (PL_RTSPServer_Internal*)internal;
+ delete (RTSPServer_Internal*)internal;
internal= nullptr;
}
void* live_daemon_thd(void* arg)
{
- RTSPClient_Internal* in = (RTSPClient_Internal*)arg;
+ RTSPServer_Internal* in = (RTSPServer_Internal*)arg;
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
@@ -153,11 +204,11 @@
#endif
// Create the RTSP server:
- in->rtspServer = RTSPServer::createNew(*env, 8554, authDB);
- if (rtspServer == NULL)
+ in->rtspServer = RTSPServer::createNew(*(in->env), 8554, in->authDB);
+ if (in->rtspServer == NULL)
{
- *(in->env) << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- return;
+ *(in->env) << "Failed to create RTSP server: " << in->env->getResultMsg() << "\n";
+ return nullptr;
}
// Set up each of the possible streams that can be served by the
@@ -167,32 +218,32 @@
char const* streamName = "plH264Encoder";
ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString);
- sms->addSubsession(MyH264FramedSource::createNew(*(in->env), in));
+ in->pMyH264FramedSource = MyH264FramedSource::createNew(*(in->env), *in);
+ sms->addSubsession(in->pMyH264FramedSource);
in->rtspServer->addServerMediaSession(sms);
// announceStream
- char* url = rtspServer->rtspURL(sms);
- *(in->env) << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n";
- *(in->env) << "Play this stream using the URL \"" << url << "\"\n";
+ char* url = in->rtspServer->rtspURL(sms);
+ *(in->env) << "Play this stream using the URL " << url << "\n";
delete[] url;
// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port numbers (8000 and 8080).
- if (rtspServer->setUpTunnelingOverHTTP(80))
- *(in->env) << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
+ if (in->rtspServer->setUpTunnelingOverHTTP(80))
+ *(in->env) << "\n(We use port " << in->rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
else
*(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n";
in->live_daemon_running = true;
- env->taskScheduler().doEventLoop(); // does not return
+ in->env->taskScheduler().doEventLoop(); // does not return
in->live_daemon_running = false;
}
bool PL_RTSPServer::init(void* args)
{
- PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+ RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
in->reset();
int ret = pthread_mutex_lock(in->frame_mutex);
@@ -214,21 +265,24 @@
void PL_RTSPServer::finit()
{
- RTSPClient_Internal* in = (RTSPClient_Internal*)internal;
+ RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
pthread_join(in->live_daemon_thid, NULL);
}
bool PL_RTSPServer::pay(const PipeMaterial& pm)
{
- PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+ RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
- return true;
+ in->buffer = pm.buffer;
+ in->buffSize = pm.buffSize;
+
+ return in->pMyH264FramedSource->deliverFrame();
}
bool PL_RTSPServer::gain(PipeMaterial& pm)
{
- PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
+ RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
pm.buffer = nullptr;
pm.buffSize = 0;
diff --git a/RtspFace/main.cpp b/RtspFace/main.cpp
index b31af5c..c7e3c2b 100644
--- a/RtspFace/main.cpp
+++ b/RtspFace/main.cpp
@@ -1,8 +1,10 @@
#include "PipeLine.h"
#include "PL_RTSPClient.h"
+#include "PL_RTSPServer.h"
#include "PL_H264Decoder.h"
#include "PL_H264Encoder.h"
#include "PL_AVFrameYUV420.h"
+#include "PL_AVFrameBGRA.h"
#include <iostream>
using namespace std;
@@ -33,8 +35,11 @@
//PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420");
//avFrameYUV420->init(nullptr);
- PL_H264Encoder* h264Encoder = (PL_H264Encoder*)pipeLine.push_elem("PL_H264Encoder");
- h264Encoder->init(nullptr);
+ //PL_H264Encoder* h264Encoder = (PL_H264Encoder*)pipeLine.push_elem("PL_H264Encoder");
+ //h264Encoder->init(nullptr);
+
+ PL_RTSPServer* rtspServer = (PL_RTSPServer*)pipeLine.push_elem("PL_RTSPServer");
+ rtspServer->init(nullptr);
while(true)
{
diff --git a/RtspFace/make.sh b/RtspFace/make.sh
index 4286377..bd54b93 100644
--- a/RtspFace/make.sh
+++ b/RtspFace/make.sh
@@ -28,12 +28,15 @@
g++ -g -c -std=c++11 main.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_RTSPClient.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 PL_RTSPServer.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_H264Decoder.cpp $CFLAGS $CPPFLAGS
-g++ -g -c -std=c++11 PL_AVFrameBGRA.cpp $CFLAGS $CPPFLAGS
-g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_H264Encoder.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 PL_AVFrameBGRA.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PipeLine.cpp $CFLAGS $CPPFLAGS
-g++ -g -std=c++11 main.o PL_RTSPClient.o PL_H264Decoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PL_H264Encoder.o PipeLine.o $LDFLAGS -o rtsp_face
+g++ -g -std=c++11 \
+ main.o PL_RTSPClient.o PL_RTSPServer.o PL_H264Decoder.o PL_H264Encoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PipeLine.o \
+ $LDFLAGS -o rtsp_face
#export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib
#./rtsp_face rtsp://admin:admin12345@192.168.1.63:554/h264/ch1/main/av_stream
--
Gitblit v1.8.0