From 993a7850d6cffb341fefabb68fbb97168c4a461c Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期一, 26 十二月 2016 16:27:16 +0800
Subject: [PATCH] rtsp server ok

---
 RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.cpp |   29 +
 RtspFace/main.cpp                                       |    9 
 RtspFace/FFmpegRTSPServer/main.cpp                      |   78 +++
 RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.h           |  102 +++
 RtspFace/FFmpegRTSPServer/FFmpegH264Source.cpp          |   84 +++
 RtspFace/FFmpegRTSPServer/LiveRTSPServer.h              |   39 +
 RtspFace/FFmpegRTSPServer/FFmpegDecoder.cpp             |  148 +++++
 RtspFace/PL_RTSPClient.cpp                              |   15 
 RtspFace/FFmpegRTSPServer/LiveRTSPServer.cpp            |   79 +++
 RtspFace/make.sh                                        |   14 
 RtspFace/PL_RTSPServer.cpp                              |  219 ++-----
 RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.cpp         |  291 +++++++++++
 RtspFace/FFmpegRTSPServer/IEncoder.h                    |   19 
 RtspFace/FFmpegRTSPServer/FFmpegDecoder.h               |   77 ++
 RtspFace/live555/testProgs/testRTSPClient.hpp           |  229 ++++---
 RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.h   |   39 +
 RtspFace/PL_RTSPClient.h                                |    8 
 RtspFace/FFmpegRTSPServer/FFmpegH264Source.h            |   43 +
 18 files changed, 1,258 insertions(+), 264 deletions(-)

diff --git a/RtspFace/FFmpegRTSPServer/FFmpegDecoder.cpp b/RtspFace/FFmpegRTSPServer/FFmpegDecoder.cpp
new file mode 100644
index 0000000..dbe1feb
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegDecoder.cpp
@@ -0,0 +1,148 @@
+//
+//  FFmpegDecoder.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "FFmpegDecoder.h"
+
+namespace MESAI
+{
+    FFmpegDecoder::FFmpegDecoder(std::string path)
+	{
+        this->path = path;
+	}
+
+
+	void FFmpegDecoder::intialize()
+	{
+
+		// Intialize FFmpeg enviroment
+	    av_register_all();
+	    avdevice_register_all();
+	    avcodec_register_all();
+	    avformat_network_init();
+	 
+	    const char  *filenameSrc = path.c_str();
+	 
+	    pFormatCtx = avformat_alloc_context();
+	 
+	    AVCodec * pCodec;
+	 
+	    if(avformat_open_input(&pFormatCtx,filenameSrc,NULL,NULL) != 0)
+	    {
+			//exception
+			return;
+	    } 
+	    
+	    if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
+	    {
+	    	//exception
+	    	return;
+	    }
+	    
+	    av_dump_format(pFormatCtx, 0, filenameSrc, 0);
+	    
+	    videoStream = -1;
+	    
+	    for(int i=0; i < pFormatCtx->nb_streams; i++)
+	    {
+	        AVStream *st = pFormatCtx->streams[i];
+			enum AVMediaType type = st->codec->codec_type;
+			if (videoStream == -1)
+				if (avformat_match_stream_specifier(pFormatCtx, st, "vst") > 0)
+					videoStream = i;
+	    }
+
+	   	videoStream = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO,videoStream, -1, NULL, 0);
+        
+	    if(videoStream == -1) 
+	    {
+	     	//exception
+	    	return;
+	    }
+
+	    pCodecCtx = pFormatCtx->streams[videoStream]->codec;
+	 
+	    pCodec =avcodec_find_decoder(pCodecCtx->codec_id);
+	    if(pCodec==NULL)
+	    {
+	    	//exception
+	    	return;
+	    }
+
+	    pCodecCtx->codec_id = pCodec->id;
+	    pCodecCtx->workaround_bugs   = 1;
+	 
+	    if(avcodec_open2(pCodecCtx,pCodec,NULL) < 0)
+	    {
+	    	 //exception
+	    	return;
+	    }
+        
+        pFrameRGB = av_frame_alloc();
+        AVPixelFormat  pFormat = AV_PIX_FMT_BGR24;
+        uint8_t *fbuffer;
+        int numBytes;
+        numBytes = avpicture_get_size(pFormat,pCodecCtx->width,pCodecCtx->height) ; //AV_PIX_FMT_RGB24
+        fbuffer = (uint8_t *) av_malloc(numBytes*sizeof(uint8_t));
+        avpicture_fill((AVPicture *) pFrameRGB,fbuffer,pFormat,pCodecCtx->width,pCodecCtx->height);
+        
+        img_convert_ctx = sws_getCachedContext(NULL,pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,   pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL,NULL);
+        
+        height = pCodecCtx->height;
+        width =  pCodecCtx->width;
+        bitrate =pCodecCtx->bit_rate;
+        GOP = pCodecCtx->gop_size;
+        frameRate = (int )pFormatCtx->streams[videoStream]->avg_frame_rate.num/pFormatCtx->streams[videoStream]->avg_frame_rate.den;
+        
+
+    }
+    
+    void FFmpegDecoder::setOnframeCallbackFunction(std::function<void(uint8_t *)> func)
+    {
+        onFrame = func;
+    }
+
+	
+	void FFmpegDecoder::playMedia()
+	{
+		AVPacket packet;
+        AVFrame * pFrame;
+		while((av_read_frame(pFormatCtx,&packet)>=0))
+    	{
+        	if(packet.buf != NULL & packet.stream_index == videoStream)
+        	{
+                pFrame = av_frame_alloc();
+                int frameFinished;
+                int decode_ret = avcodec_decode_video2(pCodecCtx,pFrame,&frameFinished,&packet);
+                av_free_packet(&packet);
+                if(frameFinished)
+                {
+                    sws_scale(img_convert_ctx, ((AVPicture*)pFrame)->data, ((AVPicture*)pFrame)->linesize, 0, pCodecCtx->height, ((AVPicture *)pFrameRGB)->data, ((AVPicture *)pFrameRGB)->linesize);
+                    onFrame(((AVPicture *)pFrameRGB)->data[0]);
+                }
+                av_frame_unref(pFrame);
+                av_free(pFrame);
+        	}
+            usleep(((double)(1.0/frameRate))*1000000);
+       
+    	}
+    	av_free_packet(&packet);
+
+
+	}
+	
+	void FFmpegDecoder::finalize()
+	{
+        sws_freeContext(img_convert_ctx);
+        av_freep(&(pFrameRGB->data[0]));
+        av_frame_unref(pFrameRGB);
+        av_free(pFrameRGB);
+        avcodec_close(pCodecCtx);
+        avformat_close_input(&pFormatCtx);
+    }
+
+}
diff --git a/RtspFace/FFmpegRTSPServer/FFmpegDecoder.h b/RtspFace/FFmpegRTSPServer/FFmpegDecoder.h
new file mode 100644
index 0000000..86dfc87
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegDecoder.h
@@ -0,0 +1,77 @@
+//
+//  FFmpegDecoder.h
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#ifndef MESAI_FFmpegDecoder_H
+#define MESAI_FFmpegDecoder_H
+
+#include <iostream>
+#include <string>
+#include <pthread.h>
+#include <functional>
+#include <unistd.h>
+
+extern "C" {
+#include <libavutil/mathematics.h>
+#include <libavutil/imgutils.h>
+#include <libswscale/swscale.h>
+#include <libavutil/pixdesc.h>
+#include <libavdevice/avdevice.h>
+}
+
+
+
+namespace MESAI
+{
+	class FFmpegDecoder
+	{
+		public:
+            FFmpegDecoder(std::string);
+			
+			~FFmpegDecoder();
+        
+			void intialize();
+
+			void playMedia();
+
+			void finalize();
+        
+            void setOnframeCallbackFunction(std::function<void(uint8_t *)> func);
+        
+            int width;
+        
+            int height;
+        
+            int GOP;
+        
+            int frameRate;
+        
+            int bitrate;
+        
+            std::function<void(uint8_t *)> onFrame;
+
+		private:
+        
+            std::string path;
+        
+			AVCodecContext  *pCodecCtx;
+
+			AVFormatContext *pFormatCtx;
+        
+            AVFrame *pFrameRGB;
+        
+            struct SwsContext * img_convert_ctx;
+
+			int videoStream;
+        
+
+	};
+
+
+}
+
+#endif
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.cpp b/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.cpp
new file mode 100644
index 0000000..0b38140
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.cpp
@@ -0,0 +1,291 @@
+//
+//  FFmpegH264Encoder.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "FFmpegH264Encoder.h"
+
+namespace MESAI
+{
+	FFmpegH264Encoder::FFmpegH264Encoder()
+	{
+		pthread_mutex_init(&inqueue_mutex,NULL);
+		pthread_mutex_init(&outqueue_mutex,NULL);
+
+	}
+
+	void FFmpegH264Encoder::setCallbackFunctionFrameIsReady(std::function<void()> func)
+	{
+		onFrame = func;
+	}
+
+
+	void FFmpegH264Encoder::SendNewFrame(uint8_t * RGBFrame) {
+		pthread_mutex_lock(&inqueue_mutex);
+		if(inqueue.size()<30)
+		{
+			inqueue.push(RGBFrame);
+		}
+		pthread_mutex_unlock(&inqueue_mutex);
+	}
+
+	void FFmpegH264Encoder::run()
+	{
+		while(true)
+		{
+			if(!inqueue.empty())
+			{
+				uint8_t * frame;
+				pthread_mutex_lock(&inqueue_mutex);
+				frame = inqueue.front();
+				inqueue.pop();
+				pthread_mutex_unlock(&inqueue_mutex);
+				if(frame != NULL)
+				{
+					WriteFrame(frame);
+				}
+			}
+        }
+	}
+
+	void FFmpegH264Encoder::SetupCodec(const char *filename, int codec_id)
+	{
+		int ret;
+		m_sws_flags = SWS_BICUBIC;
+		m_frame_count=0;
+		
+		avcodec_register_all();
+		av_register_all();
+		
+		avformat_alloc_output_context2(&m_oc, NULL, NULL, filename);
+		
+		if (!m_oc) {
+			avformat_alloc_output_context2(&m_oc, NULL, "avi", filename);
+		}
+
+		if (!m_oc) {
+			return;
+		}
+
+		m_fmt = m_oc->oformat;
+		m_video_st = NULL;
+		m_fmt->video_codec = (AVCodecID)codec_id;
+		m_fmt->audio_codec = AV_CODEC_ID_NONE;
+
+		AVStream *st;
+
+		m_video_codec = avcodec_find_encoder(m_fmt->video_codec);
+		if (!(m_video_codec)) {
+				return;
+		}
+
+		st = avformat_new_stream(m_oc, m_video_codec);
+
+		if (!st) {
+				return;
+		}
+
+		st->id = m_oc->nb_streams-1;
+
+		m_c = st->codec;
+		
+		m_c->codec_id = m_fmt->video_codec;
+		m_c->bit_rate = m_AVIMOV_BPS;			//Bits Per Second 
+		m_c->width    = m_AVIMOV_WIDTH;			//Note Resolution must be a multiple of 2!!
+		m_c->height   = m_AVIMOV_HEIGHT;		//Note Resolution must be a multiple of 2!!
+		m_c->time_base.den = m_AVIMOV_FPS;		//Frames per second
+		m_c->time_base.num = 1;
+		m_c->gop_size      = m_AVIMOV_GOB;		// Intra frames per x P frames
+		m_c->pix_fmt       = AV_PIX_FMT_YUV420P;//Do not change this, H264 needs YUV format not RGB
+	
+
+		if (m_oc->oformat->flags & AVFMT_GLOBALHEADER)
+			m_c->flags |= CODEC_FLAG_GLOBAL_HEADER;
+
+		m_video_st=st;
+		
+
+		AVCodecContext *c = m_video_st->codec;
+
+		ret = avcodec_open2(c, m_video_codec, NULL);
+		if (ret < 0) {
+			return;
+		}
+
+		//ret = avpicture_alloc(&m_dst_picture, c->pix_fmt, c->width, c->height);
+		m_dst_picture = av_frame_alloc();
+		m_dst_picture->format = c->pix_fmt;
+		m_dst_picture->data[0] = NULL;
+        m_dst_picture->linesize[0] = -1;
+		m_dst_picture->pts = 0;
+        m_dst_picture->width = m_c->width;
+        m_dst_picture->height = m_c->height;
+
+		ret = av_image_alloc(m_dst_picture->data, m_dst_picture->linesize, c->width, c->height, (AVPixelFormat)m_dst_picture->format, 32);
+		if (ret < 0) {
+			return;
+		}
+
+		//ret = avpicture_alloc(&m_src_picture, AV_PIX_FMT_BGR24, c->width, c->height);
+		m_src_picture = av_frame_alloc();
+		m_src_picture->format = c->pix_fmt;
+		ret = av_image_alloc(m_src_picture->data, m_src_picture->linesize, c->width, c->height, AV_PIX_FMT_BGR24, 24);
+
+		if (ret < 0) {
+			return;
+		}
+
+		bufferSize = ret;
+		
+		av_dump_format(m_oc, 0, filename, 1);
+
+		if (!(m_fmt->flags & AVFMT_NOFILE)) {
+			ret = avio_open(&m_oc->pb, filename, AVIO_FLAG_WRITE);
+			if (ret < 0) {
+				return;
+			}
+		}
+		
+		ret = avformat_write_header(m_oc, NULL);
+		
+		if (ret < 0) {
+			return;
+		}
+
+		sws_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_BGR24,
+								 c->width, c->height, AV_PIX_FMT_YUV420P,
+								 SWS_BICUBIC, NULL, NULL, NULL);
+		if (!sws_ctx) {
+			return;
+		}
+	}
+
+	void FFmpegH264Encoder::WriteFrame(uint8_t * RGBFrame )
+	{	
+
+		memcpy(m_src_picture->data[0], RGBFrame, bufferSize);
+
+		sws_scale(sws_ctx,
+					m_src_picture->data, m_src_picture->linesize,
+					0, m_c->height, m_dst_picture->data, m_dst_picture->linesize);
+		
+
+        AVPacket pkt = { 0 };
+		int got_packet;
+		av_init_packet(&pkt);
+
+		int ret = 0;
+
+		ret = avcodec_encode_video2(m_c, &pkt, m_dst_picture, &got_packet);
+		
+		if (ret < 0) {
+			return;
+		}
+
+		if (!ret && got_packet && pkt.size) 
+		{
+			pkt.stream_index = m_video_st->index;
+			FrameStructure * frame = new FrameStructure();
+			frame->dataPointer = new uint8_t[pkt.size];
+			frame->dataSize = pkt.size-4;
+			frame->frameID = m_frame_count;
+
+			memcpy(frame->dataPointer,pkt.data+4,pkt.size-4);
+
+			pthread_mutex_lock(&outqueue_mutex);
+
+			if(outqueue.size()<30)
+			{
+				outqueue.push(frame);
+			}
+			else
+			{
+				delete frame;
+			}
+
+			pthread_mutex_unlock(&outqueue_mutex);
+
+		}
+
+		av_free_packet(&pkt);
+
+		m_frame_count++;
+		m_dst_picture->pts += av_rescale_q(1, m_video_st->codec->time_base, m_video_st->time_base);
+		
+		onFrame();
+	}
+
+	void FFmpegH264Encoder::SetupVideo(std::string filename, int Width, int Height, int FPS, int GOB, int BitPerSecond)
+	{
+		m_filename = filename;
+		m_AVIMOV_WIDTH=Width;	//Movie width
+		m_AVIMOV_HEIGHT=Height;	//Movie height
+		m_AVIMOV_FPS=FPS;		//Movie frames per second
+		m_AVIMOV_GOB=GOB;		//I frames per no of P frames, see note below!
+		m_AVIMOV_BPS=BitPerSecond; //Bits per second, if this is too low then movie will become garbled
+		
+		SetupCodec(m_filename.c_str(),AV_CODEC_ID_H264);	
+	}
+
+	void FFmpegH264Encoder::CloseCodec()
+	{
+
+		av_write_trailer(m_oc);
+	    avcodec_close(m_video_st->codec);
+
+	    av_freep(&(m_dst_picture->data[0]));
+        av_frame_unref(m_dst_picture);
+        av_free(m_dst_picture);
+        av_freep(&(m_src_picture->data[0]));
+        av_frame_unref(m_src_picture);
+        av_free(m_src_picture);
+
+	    if (!(m_fmt->flags & AVFMT_NOFILE))
+	        avio_close(m_oc->pb);
+
+        m_oc->pb = NULL;
+
+	    avformat_free_context(m_oc);
+	    sws_freeContext(sws_ctx);
+
+	}
+
+	void FFmpegH264Encoder::CloseVideo()
+	{
+		CloseCodec();	
+	}
+
+	char FFmpegH264Encoder::GetFrame(u_int8_t** FrameBuffer, unsigned int *FrameSize)
+	{	
+		if(!outqueue.empty())
+		{	
+			FrameStructure * frame;
+			frame  = outqueue.front();
+			*FrameBuffer = (uint8_t*)frame->dataPointer;
+			*FrameSize = frame->dataSize;
+			return 1;
+		}
+		else
+		{
+			*FrameBuffer = 0;
+			*FrameSize = 0;
+			return 0;
+		}
+	}
+
+	char FFmpegH264Encoder::ReleaseFrame()
+	{
+		pthread_mutex_lock(&outqueue_mutex);
+		if(!outqueue.empty())
+		{
+			FrameStructure * frame = outqueue.front();
+			outqueue.pop();	
+			delete frame;
+		}
+		pthread_mutex_unlock(&outqueue_mutex);
+		return 1;
+	}
+}
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.h b/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.h
new file mode 100644
index 0000000..31e345d
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegH264Encoder.h
@@ -0,0 +1,102 @@
+//
+//  FFmpegH264Encoder.h
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#ifndef MESAI_FFMPEGH264_ENCODER_H
+#define MESAI_FFMPEGH264_ENCODER_H
+
+#include <string>
+#include <queue>
+#include <pthread.h>
+#include <functional>
+
+#include "IEncoder.h"
+
+extern "C" {
+	
+	#include <stdlib.h>
+	#include <stdio.h>
+	#include <string.h>
+	#include <math.h>
+	#include <libavutil/opt.h>
+	#include <libavutil/mathematics.h>
+	#include <libavformat/avformat.h>
+	#include <libswscale/swscale.h>
+	#include <libswresample/swresample.h>
+	#include <libavutil/imgutils.h>
+	#include <libavcodec/avcodec.h>
+
+}
+
+namespace MESAI
+{
+	class FrameStructure {
+		public:
+			uint8_t * dataPointer;
+			int dataSize;
+			int frameID;
+			~FrameStructure()
+			{
+				delete dataPointer;
+			}
+	};
+
+	class FFmpegH264Encoder : public IEncoder
+	{
+	public:
+		FFmpegH264Encoder();
+		~FFmpegH264Encoder();
+		
+		virtual void setCallbackFunctionFrameIsReady(std::function<void()> func);
+		
+		void SetupVideo(std::string filename, int Width, int Height, int FPS, int GOB, int BitPerSecond);
+		void CloseVideo();
+		void SetupCodec(const char *filename, int codec_id);
+		void CloseCodec();
+		
+
+		void SendNewFrame(uint8_t * RGBFrame);		
+		void WriteFrame(uint8_t * RGBFrame);
+		virtual char ReleaseFrame();
+
+		void run();	
+		virtual char GetFrame(u_int8_t** FrameBuffer, unsigned int *FrameSize);
+
+	private:
+
+
+		std::queue<uint8_t*> inqueue;
+		pthread_mutex_t inqueue_mutex;
+		std::queue<FrameStructure *> outqueue;
+		pthread_mutex_t outqueue_mutex;
+
+
+		int m_sws_flags;
+		int	m_AVIMOV_FPS;
+		int	m_AVIMOV_GOB;
+		int	m_AVIMOV_BPS;
+		int m_frame_count;
+		int	m_AVIMOV_WIDTH;
+		int	m_AVIMOV_HEIGHT;
+		std::string m_filename;
+
+	    double m_video_time;
+
+	    AVCodecContext *m_c;
+		AVStream *m_video_st;
+		AVOutputFormat *m_fmt;
+		AVFormatContext *m_oc;
+		AVCodec *m_video_codec;
+	    AVFrame * m_src_picture, * m_dst_picture;
+	    SwsContext *sws_ctx;
+	    int bufferSize;
+
+		std::function<void()> onFrame;
+
+	};
+}
+#endif
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/FFmpegH264Source.cpp b/RtspFace/FFmpegRTSPServer/FFmpegH264Source.cpp
new file mode 100644
index 0000000..c2d5ab5
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegH264Source.cpp
@@ -0,0 +1,84 @@
+//
+//  FFmpegH264Source.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "FFmpegH264Source.h"
+
+namespace MESAI
+{
+    FFmpegH264Source * FFmpegH264Source::createNew(UsageEnvironment& env, IEncoder * E_Source) {
+		return new FFmpegH264Source(env, E_Source);
+	}
+
+	FFmpegH264Source::FFmpegH264Source(UsageEnvironment& env, IEncoder * E_Source) : FramedSource(env), Encoding_Source(E_Source)
+	{
+		m_eventTriggerId = envir().taskScheduler().createEventTrigger(FFmpegH264Source::deliverFrameStub);
+		std::function<void()> callback1 = std::bind(&FFmpegH264Source::onFrame,this);
+		Encoding_Source -> setCallbackFunctionFrameIsReady(callback1);
+	}
+
+	FFmpegH264Source::~FFmpegH264Source()
+	{
+
+	}
+
+	void FFmpegH264Source::doStopGettingFrames()
+	{
+        FramedSource::doStopGettingFrames();
+	}
+
+	void FFmpegH264Source::onFrame()
+	{
+		envir().taskScheduler().triggerEvent(m_eventTriggerId, this);
+	}
+
+	void FFmpegH264Source::doGetNextFrame()
+	{
+		deliverFrame();
+	}
+
+	void FFmpegH264Source::deliverFrame()
+	{
+		if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet
+
+		static uint8_t* newFrameDataStart;
+		static unsigned newFrameSize = 0;
+
+		/* get the data frame from the Encoding thread.. */
+		if (Encoding_Source->GetFrame(&newFrameDataStart, &newFrameSize)){
+			if (newFrameDataStart!=NULL) {
+				/* This should never happen, but check anyway.. */
+				if (newFrameSize > fMaxSize) {
+					fFrameSize = fMaxSize;
+					fNumTruncatedBytes = newFrameSize - fMaxSize;
+				} else {
+					fFrameSize = newFrameSize;
+				}
+
+				gettimeofday(&fPresentationTime, NULL);
+				memcpy(fTo, newFrameDataStart, fFrameSize);
+				
+				//delete newFrameDataStart;
+				//newFrameSize = 0;
+				
+				Encoding_Source->ReleaseFrame();
+			}
+			else {
+				fFrameSize=0;
+				fTo=NULL;
+				handleClosure(this);
+			}
+		}else
+		{
+			fFrameSize = 0;
+		}
+		
+		if(fFrameSize>0)
+			FramedSource::afterGetting(this);
+
+	}
+}
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/FFmpegH264Source.h b/RtspFace/FFmpegRTSPServer/FFmpegH264Source.h
new file mode 100644
index 0000000..8b7a660
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/FFmpegH264Source.h
@@ -0,0 +1,43 @@
+//
+//  FFmpegH264Source.h
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#ifndef MESAI_FFMPEGH264_SOURCE_HH
+#define MESAI_FFMPEGH264_SOURCE_HH
+
+
+#include <functional>
+#include <FramedSource.hh>
+#include <UsageEnvironment.hh>
+#include <Groupsock.hh>
+#include "IEncoder.h"
+
+namespace MESAI
+{
+    
+  class FFmpegH264Source : public FramedSource {
+  public:
+    static FFmpegH264Source* createNew(UsageEnvironment& env, IEncoder * E_Source);
+    FFmpegH264Source(UsageEnvironment& env, IEncoder *  E_Source);
+    ~FFmpegH264Source();
+
+  private:
+    static void deliverFrameStub(void* clientData) {((FFmpegH264Source*) clientData)->deliverFrame();};
+    virtual void doGetNextFrame();
+    void deliverFrame();
+    virtual void doStopGettingFrames();
+    void onFrame();
+
+    
+  private:
+    IEncoder * Encoding_Source;
+    EventTriggerId m_eventTriggerId;
+
+  };
+
+}
+#endif
diff --git a/RtspFace/FFmpegRTSPServer/IEncoder.h b/RtspFace/FFmpegRTSPServer/IEncoder.h
new file mode 100644
index 0000000..f71cb72
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/IEncoder.h
@@ -0,0 +1,19 @@
+#ifndef MESAI_I_ENCODER_H
+#define MESAI_I_ENCODER_H
+
+#include <functional>
+
+namespace MESAI
+{
+	class IEncoder
+	{
+	public:
+		IEncoder() { }
+		virtual ~IEncoder() { }
+		virtual void setCallbackFunctionFrameIsReady(std::function<void()> func) = 0;
+		virtual char GetFrame(u_int8_t** FrameBuffer, unsigned int *FrameSize) = 0;
+		virtual char ReleaseFrame() = 0;
+	};
+}
+
+#endif
diff --git a/RtspFace/FFmpegRTSPServer/LiveRTSPServer.cpp b/RtspFace/FFmpegRTSPServer/LiveRTSPServer.cpp
new file mode 100644
index 0000000..2f6790a
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/LiveRTSPServer.cpp
@@ -0,0 +1,79 @@
+//
+//  LiveRTSPServer.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "LiveRTSPServer.h"
+
+namespace MESAI
+{
+	LiveRTSPServer::LiveRTSPServer( IEncoder * a_Encoder, int port, int httpPort )
+		: m_Encoder (a_Encoder), portNumber(port), httpTunnelingPort(httpPort)
+	{
+		quit = 0;
+	}
+
+	LiveRTSPServer::~LiveRTSPServer()
+	{
+
+	}
+
+	void LiveRTSPServer::run()
+	{
+		TaskScheduler    *scheduler;
+		UsageEnvironment *env ;
+		char RTSP_Address[1024];
+		RTSP_Address[0]=0x00;
+
+        scheduler = BasicTaskScheduler::createNew();
+        env = BasicUsageEnvironment::createNew(*scheduler);
+        
+        UserAuthenticationDatabase* authDB = NULL;
+        
+        // if (m_Enable_Pass){
+        // 	authDB = new UserAuthenticationDatabase;
+        // 	authDB->addUserRecord(UserN, PassW);
+        // }
+        
+        OutPacketBuffer::maxSize = 2000000;
+        RTSPServer* rtspServer = RTSPServer::createNew(*env, portNumber, authDB);
+        
+        if (rtspServer == NULL)
+        {
+            *env <<"LIVE555: Failed to create RTSP server: %s\n", env->getResultMsg();
+        }
+        else {
+            
+            
+            if(httpTunnelingPort)
+            {
+                rtspServer->setUpTunnelingOverHTTP(httpTunnelingPort);
+            }
+            
+            char const* descriptionString = "MESAI Streaming Session";
+            
+            FFmpegH264Source * source = FFmpegH264Source::createNew(*env,m_Encoder);
+            StreamReplicator * inputDevice = StreamReplicator::createNew(*env, source, false);
+            
+            ServerMediaSession* sms = ServerMediaSession::createNew(*env, RTSP_Address, RTSP_Address, descriptionString);
+            sms->addSubsession(MESAI::LiveServerMediaSubsession::createNew(*env, inputDevice));
+            rtspServer->addServerMediaSession(sms);
+            
+            char* url = rtspServer->rtspURL(sms);
+            *env << "Play this stream using the URL \"" << url << "\"\n";
+            delete [] url;
+            
+            //signal(SIGNIT,sighandler);
+            env->taskScheduler().doEventLoop(&quit); // does not return
+            
+            Medium::close(rtspServer);
+            Medium::close(inputDevice);
+        }
+        
+        env->reclaim();
+        delete scheduler;
+	}
+}
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/LiveRTSPServer.h b/RtspFace/FFmpegRTSPServer/LiveRTSPServer.h
new file mode 100644
index 0000000..e1f3eed
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/LiveRTSPServer.h
@@ -0,0 +1,39 @@
+//
+//  LiveRTSPServer.h
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#ifndef MESAI_LIVE_RTSP_SERVER_HH
+#define MESAI_LIVE_RTSP_SERVER_HH
+
+#include <UsageEnvironment.hh>
+#include <BasicUsageEnvironment.hh>
+#include <GroupsockHelper.hh>
+#include <liveMedia.hh>
+#include "LiveServerMediaSubsession.h"
+#include "FFmpegH264Source.h"
+#include "IEncoder.h"
+
+namespace MESAI {
+
+	class LiveRTSPServer
+	{
+	public:
+
+		LiveRTSPServer(IEncoder  * a_Encoder, int port, int httpPort );
+		~LiveRTSPServer();
+		void run();
+
+	private:
+		int portNumber;
+		int httpTunnelingPort;
+		IEncoder * m_Encoder;
+		char quit;
+
+	};
+}
+
+#endif
diff --git a/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.cpp b/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.cpp
new file mode 100644
index 0000000..6d16082
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.cpp
@@ -0,0 +1,29 @@
+//
+//  LiveServerMediaSubsession.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "LiveServerMediaSubsession.h"
+
+namespace MESAI
+{
+	LiveServerMediaSubsession * LiveServerMediaSubsession::createNew(UsageEnvironment& env, StreamReplicator* replicator)
+	{ 
+		return new LiveServerMediaSubsession(env,replicator);
+	}
+					
+	FramedSource* LiveServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
+	{
+		FramedSource* source = m_replicator->createStreamReplica();
+		return H264VideoStreamDiscreteFramer::createNew(envir(), source);
+	}
+		
+	RTPSink* LiveServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,  unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
+	{
+		return H264VideoRTPSink::createNew(envir(), rtpGroupsock,rtpPayloadTypeIfDynamic);
+	}
+
+}
diff --git a/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.h b/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.h
new file mode 100644
index 0000000..22bfbc5
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/LiveServerMediaSubsession.h
@@ -0,0 +1,39 @@
+//
+//  LiveServerMediaSubsession.h
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#ifndef MESAI_Live_SERVER_MEDIA_SUBSESSION_HH
+#define MESAI_Live_SERVER_MEDIA_SUBSESSION_HH
+
+#include <OnDemandServerMediaSubsession.hh>
+#include <StreamReplicator.hh>
+#include <H264VideoRTPSink.hh>
+#include <H264VideoStreamFramer.hh>
+#include <H264VideoStreamDiscreteFramer.hh>
+#include <UsageEnvironment.hh>
+#include <Groupsock.hh>
+
+namespace MESAI 
+{
+
+  class LiveServerMediaSubsession: public OnDemandServerMediaSubsession
+  {
+    public:
+      static LiveServerMediaSubsession* createNew(UsageEnvironment& env, StreamReplicator* replicator);
+    
+    protected:
+      LiveServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator)
+          : OnDemandServerMediaSubsession(env, False), m_replicator(replicator) {};
+      
+      virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
+      virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,  unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);    
+
+      StreamReplicator * m_replicator;
+  };
+
+}
+#endif
\ No newline at end of file
diff --git a/RtspFace/FFmpegRTSPServer/main.cpp b/RtspFace/FFmpegRTSPServer/main.cpp
new file mode 100644
index 0000000..38d22fc
--- /dev/null
+++ b/RtspFace/FFmpegRTSPServer/main.cpp
@@ -0,0 +1,78 @@
+//
+//  main.cpp
+//  FFmpegRTSPServer
+//
+//  Created by Mina Saad on 9/22/15.
+//  Copyright (c) 2015 Mina Saad. All rights reserved.
+//
+
+#include "LiveRTSPServer.h"
+#include "FFmpegH264Encoder.h"
+#include "FFmpegDecoder.h"
+
+MESAI::FFmpegH264Encoder * encoder;
+MESAI::LiveRTSPServer * server;
+MESAI::FFmpegDecoder * decoder;
+
+int UDPPort;
+int HTTPTunnelPort;
+pthread_t thread1;
+pthread_t thread2;
+
+
+void * runServer(void * server)
+{
+    ((MESAI::LiveRTSPServer * ) server)->run();
+    pthread_exit(NULL);
+}
+
+void * runEncoder(void * encoder)
+{
+    ((MESAI::FFmpegH264Encoder * ) encoder)->run();
+    pthread_exit(NULL);
+}
+
+void onFrame(uint8_t * data)
+{
+    encoder->SendNewFrame(data);
+}
+
+int test_main(int argc, const char * argv[])
+{
+    if(argc==2)
+        decoder = new MESAI::FFmpegDecoder(argv[1]);
+    if(argc==3)
+        UDPPort = atoi(argv[2]);
+    if(argc==4)
+        HTTPTunnelPort = atoi(argv[3]);
+    decoder->intialize();
+    decoder->setOnframeCallbackFunction(onFrame);
+    encoder = new MESAI::FFmpegH264Encoder();
+    encoder->SetupVideo("dummy.avi",decoder->width,decoder->height,decoder->frameRate,decoder->GOP,decoder->bitrate);
+    server = new MESAI::LiveRTSPServer(encoder, UDPPort, HTTPTunnelPort);
+    
+    pthread_attr_t attr1;
+    pthread_attr_init(&attr1);
+    pthread_attr_setdetachstate(&attr1, PTHREAD_CREATE_DETACHED);
+    int rc1 = pthread_create(&thread1, &attr1,  runServer, server);
+    
+    if (rc1){
+        //exception
+        return -1;
+    }
+    
+    pthread_attr_t attr2;
+    pthread_attr_init(&attr2);
+    pthread_attr_setdetachstate(&attr2, PTHREAD_CREATE_DETACHED);
+    int rc2 = pthread_create(&thread2, &attr2,  runEncoder, encoder);
+    
+    if (rc2){
+        //exception
+        return -1;
+    }
+    
+    // Play Media Here
+    decoder->playMedia();
+    decoder->finalize();
+    
+}
diff --git a/RtspFace/PL_RTSPClient.cpp b/RtspFace/PL_RTSPClient.cpp
index b26b2c1..41dad6a 100644
--- a/RtspFace/PL_RTSPClient.cpp
+++ b/RtspFace/PL_RTSPClient.cpp
@@ -5,11 +5,11 @@
 void rtsp_client_fmtp_callback(void* arg, const char* val);
 void rtsp_client_frame_callback(void* arg, uint8_t* buffer, size_t buffSize);
 void rtsp_client_continue_callback(void* arg);
+//struct RTSPConfig;
 #include "live555/testProgs/testRTSPClient.hpp"
 
 struct RTSPClient_Internal
 {
-	PL_RTSPClient* client;
 	RTSPConfig rtspConfig;
 	pthread_t live_daemon_thid;
 	char eventLoopWatchVariable;
@@ -21,7 +21,7 @@
 	size_t lastBuffSize;
 	
 	RTSPClient_Internal() : 
-		client(nullptr), rtspConfig(), live_daemon_thid(0), 
+		rtspConfig(), live_daemon_thid(0), 
 		eventLoopWatchVariable(0), live_daemon_running(false), 
 		frame_mutex(new pthread_mutex_t), continue_mutex(new pthread_mutex_t), 
 		lastBuffer(nullptr), lastBuffSize(0)
@@ -49,9 +49,8 @@
 	
 	void reset()
 	{
-		client = nullptr;
-		rtspConfig.progName = "";
-		rtspConfig.rtspURL = "";
+		RTSPConfig _rtspConfig;
+		rtspConfig = _rtspConfig;
 		live_daemon_thid = 0;
 		eventLoopWatchVariable = 0;
 		live_daemon_running = false;
@@ -81,7 +80,7 @@
 	}
 };
 
-void* live_daemon_thd(void* arg)
+static void* live_daemon_thd(void* arg)
 {
 	RTSPClient_Internal* in = (RTSPClient_Internal*)arg;
 	
@@ -90,7 +89,7 @@
 
 	usage(*env, in->rtspConfig.progName.c_str());
 	
-	openURL(*env, in->client, in->rtspConfig.progName.c_str(), in->rtspConfig.rtspURL.c_str());
+	openURL(*env, in->rtspConfig);
 	
 	in->live_daemon_running = true;
 	env->taskScheduler().doEventLoop(&(in->eventLoopWatchVariable));
@@ -120,8 +119,8 @@
 	const RTSPConfig* config = reinterpret_cast<const RTSPConfig*>(args);
 	RTSPClient_Internal* in = (RTSPClient_Internal*)internal;
 	in->reset();
-	in->client = this;
 	in->rtspConfig = *config;
+	in->rtspConfig.args = this;
 	
 	int ret = pthread_mutex_lock(in->frame_mutex);
 	if(ret != 0)
diff --git a/RtspFace/PL_RTSPClient.h b/RtspFace/PL_RTSPClient.h
index b5367fb..e464586 100644
--- a/RtspFace/PL_RTSPClient.h
+++ b/RtspFace/PL_RTSPClient.h
@@ -8,8 +8,14 @@
 {
 	std::string progName;
 	std::string rtspURL;
+	bool aux; // frame data start with 0x00000001
+	int verbosityLevel;
+	int tunnelOverHTTPPortNum; // portNumBits
+	void* args;
 	
-	RTSPConfig() : progName(), rtspURL() { }
+	RTSPConfig() : 
+		progName(), rtspURL() ,aux(true), verbosityLevel(1), tunnelOverHTTPPortNum(0), args(nullptr)
+	{ }
 };
 
 class PL_RTSPClient : public PipeLineElem
diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index 5ea97c2..feb25ac 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -3,7 +3,12 @@
 #include <liveMedia.hh>
 #include <BasicUsageEnvironment.hh>
 
-class MyH264FramedSource;
+#include "FFmpegRTSPServer/IEncoder.h"
+#include "FFmpegRTSPServer/LiveRTSPServer.h"
+#include "FFmpegRTSPServer/FFmpegH264Source.h"
+#include "FFmpegRTSPServer/LiveServerMediaSubsession.h"
+
+class MyEncoderStub;
 
 struct RTSPServer_Internal
 {
@@ -15,31 +20,13 @@
 	pthread_mutex_t* frame_mutex;
 	bool live_daemon_running;
 	
-	UsageEnvironment* env;
+	MESAI::LiveRTSPServer * server;
+	MyEncoderStub * encoderStub;
 
-	// To make the second and subsequent client for each stream reuse the same
-	// input stream as the first client (rather than playing the file from the
-	// start for each client), change the following "False" to "True":
-	Boolean reuseFirstSource;
-
-	// To stream *only* MPEG-1 or 2 video "I" frames
-	// (e.g., to reduce network bandwidth),
-	// change the following "False" to "True":
-	Boolean iFramesOnly;
-	
-	UserAuthenticationDatabase* authDB;
-
-	RTSPServer* rtspServer;//#todo delete
-	
-	char descriptionString[1024];
-	
-	MyH264FramedSource* pMyH264FramedSource;
-	
 	RTSPServer_Internal() : 
 		buffer(nullptr), buffSize(0), 
-		payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false), 
-		env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr), 
-		rtspServer(nullptr)
+		payError(true), live_daemon_thid(0), frame_mutex(new pthread_mutex_t), live_daemon_running(false), 
+		server(nullptr), encoderStub(nullptr)
 	{
 		pthread_mutex_init(frame_mutex, NULL);
 	}
@@ -74,102 +61,76 @@
 		live_daemon_thid = 0;
 		live_daemon_running = false;
 		
-		env = nullptr;
-		reuseFirstSource = False;
-		iFramesOnly = False;
-		authDB = nullptr; 
-		rtspServer = nullptr;
-		
-		strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\"");
-		
-		pMyH264FramedSource = nullptr;
+		server = nullptr;
+		encoderStub = nullptr;
 	}
 };
 
-
-class MyH264FramedSource : public FramedSource
+class MyEncoderStub : public MESAI::IEncoder
 {
 public:
-	static MyH264FramedSource* createNew(UsageEnvironment& _env, RTSPServer_Internal& _in)
+	MyEncoderStub(RTSPServer_Internal& _in) : in(_in)
 	{
-		return new MyH264FramedSource(_env, _in);
 	}
 	
-	// deliver frame to the sink
-	bool deliverFrame()
+	virtual ~MyEncoderStub()
 	{
-		int ret = false;
-		if (isCurrentlyAwaitingData()) 
+	}
+	
+	virtual void setCallbackFunctionFrameIsReady(std::function<void()> func)
+	{
+		onFrame = func;
+	}
+	
+	virtual char GetFrame(u_int8_t** FrameBuffer, unsigned int *FrameSize)
+	{
+		if (in.buffer != nullptr && in.buffSize > 0)
 		{
-			fDurationInMicroseconds = 0;
-			fFrameSize = 0;
+			*FrameBuffer = in.buffer;
+			*FrameSize = in.buffSize;
+
+			printf("send frame size=%u\n", in.buffSize);
 			
-			if (in.buffSize > fMaxSize) 
-			{
-				fFrameSize = fMaxSize;
-				fNumTruncatedBytes = in.buffSize - fMaxSize;
-			} 
-			else 
-			{
-				fFrameSize = in.buffSize;
-			}
+			in.buffer = nullptr;
+			in.buffSize = 0;
 			
-			if (fFrameSize > 0)
-			{
-				memcpy(fTo, in.buffer, fFrameSize);
-				
-				int ret = pthread_mutex_unlock(in.frame_mutex);
-				if(ret != 0)
-				{
-					printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
-					return false;
-				}
-				
-				ret = true;
-			}
+			return 1;
 		}
-
-		return ret;
-	}
-
-protected:
-	MyH264FramedSource(UsageEnvironment& _env, RTSPServer_Internal& _in) : 
-		FramedSource(_env), env(_env), in(_in)
-	{
+		else
+		{
+			ReleaseFrame();
+			return 0;
+		}
 	}
 	
-	virtual ~MyH264FramedSource()
+	virtual char ReleaseFrame()
 	{
+		int ret = pthread_mutex_unlock(in.frame_mutex);
+		if(ret != 0)
+		{
+			printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
+			return 0;
+		}
+		
+		return 1;
 	}
 
-	// overide FramedSource
-	virtual void doGetNextFrame()
+	void deliverFrame()
 	{
-		printf("MyH264FramedSource::doGetNextFrame\n");
-		
+		// write frame buffer of RTSPServer_Internal::buffer
+		onFrame();
+
 		int ret = pthread_mutex_lock(in.frame_mutex);
 		if(ret != 0)
 		{
 			printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
 			return;
 		}
+	}
 
-		// deliverFrame
-		//if (fFrameSize > 0)
-		//{
-			// send Frame to the consumer
-			FramedSource::afterGetting(this);			
-		//}
-	}
-	
-	virtual void doStopGettingFrames()
-	{
-		FramedSource::doStopGettingFrames();
-	}
-	
 private:
-	UsageEnvironment& env;
 	RTSPServer_Internal& in;
+	std::function<void()> onFrame;
 };
 
 PipeLineElem* create_PL_RTSPServer()
@@ -184,60 +145,20 @@
 PL_RTSPServer::~PL_RTSPServer()
 {
 	delete (RTSPServer_Internal*)internal;
-	internal= nullptr;
+	internal = nullptr;
 }
 
-void* live_daemon_thd(void* arg)
+static void* live_daemon_thd(void* arg)
 {
 	RTSPServer_Internal* in = (RTSPServer_Internal*)arg;
+
+	MyEncoderStub encoder(*in);
+	in->encoderStub = &encoder;
+	in->server = new MESAI::LiveRTSPServer(&encoder, 8554, 8080);
 	
-	// Begin by setting up our usage environment:
-	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
-	in->env = BasicUsageEnvironment::createNew(*scheduler);
-
-#ifdef ACCESS_CONTROL
-	// To implement client access control to the RTSP server, do the following:
-	in->authDB = new UserAuthenticationDatabase;
-	in->authDB->addUserRecord("username1", "password1"); // replace these with real strings
-	// Repeat the above with each <username>, <password> that you wish to allow
-	// access to the server.
-#endif
-
-	// Create the RTSP server:
-	in->rtspServer = RTSPServer::createNew(*(in->env), 8554, in->authDB);
-	if (in->rtspServer == NULL)
-	{
-		*(in->env) << "Failed to create RTSP server: " << in->env->getResultMsg() << "\n";
-		return nullptr;
-	}
-
-	// Set up each of the possible streams that can be served by the
-	// RTSP server.  Each such stream is implemented using a
-	// "ServerMediaSession" object, plus one or more
-	// "ServerMediaSubsession" objects for each audio/video substream.
-	
-    char const* streamName = "plH264Encoder";
-    ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString);
-	in->pMyH264FramedSource = MyH264FramedSource::createNew(*(in->env), *in);
-    sms->addSubsession(in->pMyH264FramedSource);
-    in->rtspServer->addServerMediaSession(sms);
-	
-	// announceStream
-	char* url = in->rtspServer->rtspURL(sms);
-	*(in->env) << "Play this stream using the URL " << url << "\n";
-	delete[] url;
-	
-	// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
-	// Try first with the default HTTP port (80), and then with the alternative HTTP
-	// port numbers (8000 and 8080).
-
-	if (in->rtspServer->setUpTunnelingOverHTTP(80))
-		*(in->env) << "\n(We use port " << in->rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
-	else
-		*(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n";
-
 	in->live_daemon_running = true;
-	in->env->taskScheduler().doEventLoop(); // does not return
+	in->server->run(); // does not return
+	in->encoderStub = nullptr;
 	in->live_daemon_running = false;
 }
 
@@ -245,15 +166,8 @@
 {
 	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 	in->reset();
-	
-	int ret = pthread_mutex_lock(in->frame_mutex);
-	if(ret != 0)
-	{
-		printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
-		return false;
-	}
-	
-	ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
+
+	int ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
 	if(ret != 0)
 	{
 		printf("pthread_create: %s/n", strerror(ret));
@@ -274,10 +188,17 @@
 {
 	RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
 
+	if (pm.buffer == nullptr || pm.buffSize <= 0)
+		return false;
+	
 	in->buffer = pm.buffer;
 	in->buffSize = pm.buffSize;
 	
-	return in->pMyH264FramedSource->deliverFrame();
+	if (in->encoderStub == nullptr)
+		return false;
+	
+	in->encoderStub->deliverFrame();
+	return true;
 }
 
 bool PL_RTSPServer::gain(PipeMaterial& pm)
diff --git a/RtspFace/live555/testProgs/testRTSPClient.hpp b/RtspFace/live555/testProgs/testRTSPClient.hpp
index bf37d55..f7f5c4f 100644
--- a/RtspFace/live555/testProgs/testRTSPClient.hpp
+++ b/RtspFace/live555/testProgs/testRTSPClient.hpp
@@ -25,6 +25,19 @@
 
 #include <iostream>
 
+#define RTSP_CLIENT_VERBOSITY_LEVEL 1 // by default, print verbose output from each "RTSPClient"
+
+// By default, we request that the server stream its data using RTP/UDP.
+// If, instead, you want to request that the server stream via RTP-over-TCP, change the following to True:
+#define REQUEST_STREAMING_OVER_TCP False
+
+// Even though we're not going to be doing anything with the incoming data, we still need to receive it.
+// Define the size of the buffer that we'll use:
+#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 1920*1080*3
+
+// If you don't want to see debugging output for each received frame, then comment out the following line:
+#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1
+
 // Forward function definitions:
 
 // RTSP 'response handlers':
@@ -39,7 +52,7 @@
   // called at the end of a stream's expected duration (if the stream has not already signaled its end using a RTCP "BYE")
 
 // The main streaming routine (for each "rtsp://" URL):
-void openURL(UsageEnvironment& env, void* args, char const* progName, char const* rtspURL);
+void openURL(UsageEnvironment& env, const RTSPConfig& _rtspConfig);
 
 // Used to iterate through each stream's 'subsessions', setting up each one:
 void setupNextSubsession(RTSPClient* rtspClient);
@@ -75,9 +88,18 @@
     return 1;
   }
 
+	RTSPConfig rtspConfig;
+	rtspConfig.progName = argv[0];
+	rtspConfig.rtspURL = "";
+	rtspConfig.aux = false;
+	rtspConfig.verbosityLevel = RTSP_CLIENT_VERBOSITY_LEVEL;
+	rtspConfig.tunnelOverHTTPPortNum = 0;
+	rtspConfig.args = nullptr;
+  
   // There are argc-1 URLs: argv[1] through argv[argc-1].  Open and start streaming each one:
   for (int i = 1; i <= argc-1; ++i) {
-    openURL(*env, NULL, argv[0], argv[i]);
+	rtspConfig.rtspURL = argv[i];
+	openURL(*env, rtspConfig);
   }
 
   // All subsequent activity takes place within the event loop:
@@ -117,20 +139,16 @@
 
 class ourRTSPClient: public RTSPClient {
 public:
-  static ourRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL,
-				  int verbosityLevel = 0,
-				  char const* applicationName = NULL,
-				  portNumBits tunnelOverHTTPPortNum = 0);
+  static ourRTSPClient* createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig);
 
 protected:
-  ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
-		int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum);
+  ourRTSPClient(UsageEnvironment& env, const RTSPConfig& _rtspConfig);
     // called only by createNew();
   virtual ~ourRTSPClient();
 
 public:
   StreamClientState scs;
-  void* args;
+  const RTSPConfig& rtspConfig;
 };
 
 // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream').
@@ -142,12 +160,12 @@
 {
 public:
 	static DummySink* createNew(UsageEnvironment& env, 
-					void* _args, 
+				  const RTSPConfig& _rtspConfig,
 				  MediaSubsession& subsession, // identifies the kind of data that's being received
 				  char const* streamId = NULL); // identifies the stream itself (optional)
 
 private:
-	DummySink(UsageEnvironment& env, void* _args, MediaSubsession& subsession, char const* streamId);
+	DummySink(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId);
 	// called only by "createNew()"
 	virtual ~DummySink();
 
@@ -159,7 +177,7 @@
 			 struct timeval presentationTime, unsigned durationInMicroseconds);
 
 public:
-	void* args;
+	const RTSPConfig& rtspConfig;
 
 private:
 	// redefined virtual functions:
@@ -171,74 +189,75 @@
 	char* fStreamId;
 };
 
-#define RTSP_CLIENT_VERBOSITY_LEVEL 1 // by default, print verbose output from each "RTSPClient"
-
 static unsigned rtspClientCount = 0; // Counts how many streams (i.e., "RTSPClient"s) are currently in use.
 
-void openURL(UsageEnvironment& env, void* args, char const* progName, char const* rtspURL) {
-  // Begin by creating a "RTSPClient" object.  Note that there is a separate "RTSPClient" object for each stream that we wish
-  // to receive (even if more than stream uses the same "rtsp://" URL).
-  RTSPClient* rtspClient = ourRTSPClient::createNew(env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
-  if (rtspClient == NULL) {
-    env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n";
-    return;
-  }
-  
-  ((ourRTSPClient*)rtspClient)->args = args;
+void openURL(UsageEnvironment& env, const RTSPConfig& _rtspConfig)
+{
+	// Begin by creating a "RTSPClient" object.  Note that there is a separate "RTSPClient" object for each stream that we wish
+	// to receive (even if more than stream uses the same "rtsp://" URL).
+	RTSPClient* rtspClient = ourRTSPClient::createNew(env, _rtspConfig);
+	if (rtspClient == NULL)
+	{
+		env << "Failed to create a RTSP client for URL \"" << _rtspConfig.rtspURL.c_str() << "\": " << env.getResultMsg() << "\n";
+		return;
+	}
 
-  ++rtspClientCount;
+	++rtspClientCount;
 
-  // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
-  // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
-  // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
-  rtspClient->sendDescribeCommand(continueAfterDESCRIBE); 
+	// Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
+	// Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
+	// Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
+	rtspClient->sendDescribeCommand(continueAfterDESCRIBE); 
 }
 
 
 // Implementation of the RTSP 'response handlers':
 
-void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString) {
-  do {
-    UsageEnvironment& env = rtspClient->envir(); // alias
-    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
+void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString)
+{
+	do
+	{
+		UsageEnvironment& env = rtspClient->envir(); // alias
+		StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
 
-    if (resultCode != 0) {
-      env << *rtspClient << "Failed to get a SDP description: " << resultString << "\n";
-      delete[] resultString;
-      break;
-    }
+		if (resultCode != 0)
+		{
+			env << *rtspClient << "Failed to get a SDP description: " << resultString << "\n";
+			delete[] resultString;
+			break;
+		}
 
-    char* const sdpDescription = resultString;
-    env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n";
+		char* const sdpDescription = resultString;
+		env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n";
 
-    // Create a media session object from this SDP description:
-    scs.session = MediaSession::createNew(env, sdpDescription);
-    delete[] sdpDescription; // because we don't need it anymore
-    if (scs.session == NULL) {
-      env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n";
-      break;
-    } else if (!scs.session->hasSubsessions()) {
-      env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
-      break;
-    }
+		// Create a media session object from this SDP description:
+		scs.session = MediaSession::createNew(env, sdpDescription);
+		delete[] sdpDescription; // because we don't need it anymore
+		if (scs.session == NULL)
+		{
+			env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n";
+			break;
+		}
+		else if (!scs.session->hasSubsessions())
+		{
+			env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
+			break;
+		}
 
-    // Then, create and set up our data source objects for the session.  We do this by iterating over the session's 'subsessions',
-    // calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
-    // (Each 'subsession' will have its own data source.)
-    scs.iter = new MediaSubsessionIterator(*scs.session);
-    setupNextSubsession(rtspClient);
-    return;
-  } while (0);
+		// Then, create and set up our data source objects for the session.  We do this by iterating over the session's 'subsessions',
+		// calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
+		// (Each 'subsession' will have its own data source.)
+		scs.iter = new MediaSubsessionIterator(*scs.session);
+		setupNextSubsession(rtspClient);
+		return;
+	} while (0);
 
-  // An unrecoverable error occurred with this stream.
-  shutdownStream(rtspClient);
+	// An unrecoverable error occurred with this stream.
+	shutdownStream(rtspClient);
 }
 
-// By default, we request that the server stream its data using RTP/UDP.
-// If, instead, you want to request that the server stream via RTP-over-TCP, change the following to True:
-#define REQUEST_STREAMING_OVER_TCP False
-
-void setupNextSubsession(RTSPClient* rtspClient) {
+void setupNextSubsession(RTSPClient* rtspClient)
+{
   UsageEnvironment& env = rtspClient->envir(); // alias
   StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
   
@@ -294,9 +313,8 @@
     // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
     // after we've sent a RTSP "PLAY" command.)
 
-	DummySink* mySink;
-    scs.subsession->sink = mySink = DummySink::createNew(env, ((ourRTSPClient*)rtspClient)->args, 
-			*scs.subsession, rtspClient->url());
+    scs.subsession->sink = DummySink::createNew(env, ((ourRTSPClient*)rtspClient)->rtspConfig, 
+												*scs.subsession, rtspClient->url());
       // perhaps use your own custom "MediaSink" subclass instead
     if (scs.subsession->sink == NULL) {
       env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
@@ -446,15 +464,14 @@
 
 // Implementation of "ourRTSPClient":
 
-ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, char const* rtspURL,
-					int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum) {
-  return new ourRTSPClient(env, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum);
+ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig)
+{
+  return new ourRTSPClient(env, _rtspConfig);
 }
 
-ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
-			     int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
-  : RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1),
-	args(nullptr)
+ourRTSPClient::ourRTSPClient(UsageEnvironment& env, const RTSPConfig& _rtspConfig)
+  : RTSPClient(env, _rtspConfig.rtspURL.c_str(), _rtspConfig.verbosityLevel, _rtspConfig.progName.c_str(), 
+				_rtspConfig.tunnelOverHTTPPortNum, -1), rtspConfig(_rtspConfig)
 {
 }
 
@@ -481,30 +498,29 @@
 
 // Implementation of "DummySink":
 
-// Even though we're not going to be doing anything with the incoming data, we still need to receive it.
-// Define the size of the buffer that we'll use:
-#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 1920*1080*3
-
-DummySink* DummySink::createNew(UsageEnvironment& env, void* _args, MediaSubsession& subsession, char const* streamId)
+DummySink* DummySink::createNew(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId)
 {
-  return new DummySink(env, _args, subsession, streamId);
+  return new DummySink(env, _rtspConfig, subsession, streamId);
 }
 
-DummySink::DummySink(UsageEnvironment& env, void* _args, MediaSubsession& subsession, char const* streamId)
-  : MediaSink(env), args(_args), fSubsession(subsession)
+DummySink::DummySink(UsageEnvironment& env, const RTSPConfig& _rtspConfig, MediaSubsession& subsession, char const* streamId)
+  : MediaSink(env), rtspConfig(_rtspConfig), fSubsession(subsession)
 {
 	fStreamId = strDup(streamId);
 	fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
 
 	// ffmpeg need AUX header
-	fReceiveBuffer[0]=0x00; fReceiveBuffer[1]=0x00; fReceiveBuffer[2]=0x00; fReceiveBuffer[3]=0x01;
+	if (rtspConfig.aux)
+	{
+		fReceiveBuffer[0]=0x00; fReceiveBuffer[1]=0x00; fReceiveBuffer[2]=0x00; fReceiveBuffer[3]=0x01;
+	}
 
 	//parse sdp
 	const char* strSDP = fSubsession.savedSDPLines();
-	rtsp_client_sdp_callback(args, strSDP);
+	rtsp_client_sdp_callback(rtspConfig.args, strSDP);
 	
 	const char* strFmtp = fSubsession.fmtp_spropparametersets();
-	rtsp_client_fmtp_callback(args, strFmtp);
+	rtsp_client_fmtp_callback(rtspConfig.args, strFmtp);
 	//std::cout << strFmtp << std::endl;
 }
 
@@ -514,17 +530,20 @@
 }
 
 void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes,
-				  struct timeval presentationTime, unsigned durationInMicroseconds) {
-  DummySink* sink = (DummySink*)clientData;
+				  struct timeval presentationTime, unsigned durationInMicroseconds)
+{
+	DummySink* sink = (DummySink*)clientData;
 
-  if (frameSize > 0)
-	rtsp_client_frame_callback(sink->args, sink->fReceiveBuffer, frameSize + 4);
+	if (frameSize > 0)
+	{
+		unsigned s = frameSize;
+		if (sink->rtspConfig.aux)
+			s += 4;
+		rtsp_client_frame_callback(sink->rtspConfig.args, sink->fReceiveBuffer, s);
+	}
   
-  sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
+	sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
 }
-
-// If you don't want to see debugging output for each received frame, then comment out the following line:
-#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1
 
 void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
 				  struct timeval presentationTime, unsigned /*durationInMicroseconds*/) {
@@ -549,14 +568,20 @@
   continuePlaying();
 }
 
-Boolean DummySink::continuePlaying() {
-  if (fSource == NULL) return False; // sanity check (should not happen)
+Boolean DummySink::continuePlaying()
+{
+	if (fSource == NULL) return False; // sanity check (should not happen)
 
-	  rtsp_client_continue_callback(args);
-	  
-  // Request the next frame of data from our input source.  "afterGettingFrame()" will get called later, when it arrives:
-  fSource->getNextFrame(fReceiveBuffer + 4, DUMMY_SINK_RECEIVE_BUFFER_SIZE,
-                        afterGettingFrame, this,
-                        onSourceClosure, this);
-  return True;
+	rtsp_client_continue_callback(rtspConfig.args);
+
+	u_int8_t* b = fReceiveBuffer;
+	if (rtspConfig.aux)
+		b += 4;
+
+	// Request the next frame of data from our input source.  "afterGettingFrame()" will get called later, when it arrives:
+	fSource->getNextFrame(b, DUMMY_SINK_RECEIVE_BUFFER_SIZE,
+							afterGettingFrame, this,
+							onSourceClosure, this);
+
+	return True;
 }
diff --git a/RtspFace/main.cpp b/RtspFace/main.cpp
index c7e3c2b..ea5505d 100644
--- a/RtspFace/main.cpp
+++ b/RtspFace/main.cpp
@@ -14,6 +14,7 @@
 	PipeLine pipeLine;
 	
 	pipeLine.register_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
+	pipeLine.register_elem_creator("PL_RTSPServer", create_PL_RTSPServer);
 	pipeLine.register_elem_creator("PL_H264Decoder", create_PL_H264Decoder);
 	pipeLine.register_elem_creator("PL_AVFrameYUV420", create_PL_AVFrameYUV420);
 	pipeLine.register_elem_creator("PL_H264Encoder", create_PL_H264Encoder);
@@ -22,6 +23,10 @@
 	RTSPConfig rtspConfig;
 	rtspConfig.progName = argv[0];
 	rtspConfig.rtspURL = argv[1];
+	rtspConfig.aux = false; // ffmpeg need aux
+	rtspConfig.verbosityLevel = 1;
+	rtspConfig.tunnelOverHTTPPortNum = 0;
+	rtspConfig.args = nullptr;
 	bool ret = rtspClient->init(&rtspConfig);
 	if (!ret)
 	{
@@ -29,8 +34,8 @@
 		exit(EXIT_FAILURE);
 	}
 	
-	PL_H264Decoder* h264Decoder = (PL_H264Decoder*)pipeLine.push_elem("PL_H264Decoder");
-	h264Decoder->init(nullptr);
+	//PL_H264Decoder* h264Decoder = (PL_H264Decoder*)pipeLine.push_elem("PL_H264Decoder");
+	//h264Decoder->init(nullptr);
 	
 	//PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420");
 	//avFrameYUV420->init(nullptr);
diff --git a/RtspFace/make.sh b/RtspFace/make.sh
index bd54b93..6a122db 100644
--- a/RtspFace/make.sh
+++ b/RtspFace/make.sh
@@ -18,6 +18,9 @@
 LIBYUV_INC="-I$LIBYUV_BASE/include"
 LIBYUV_LIB="-L$LIBYUV_BASE -lyuv"
 
+FFMPEGRTSPSERVER_BASE=./FFmpegRTSPServer
+FFMPEGRTSPSERVER_OBJ="FFmpegH264Source.o LiveRTSPServer.o LiveServerMediaSubsession.o"
+
 CPPFLAGS+="-pthread $LIVEMEDIA_INC $FFMPEG_INC $LIBBASE64_INC $LIBYUV_INC"
 LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB $LIBX264_LIB"
 
@@ -34,9 +37,16 @@
 g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS
 g++ -g -c -std=c++11 PL_AVFrameBGRA.cpp $CFLAGS $CPPFLAGS
 g++ -g -c -std=c++11 PipeLine.cpp $CFLAGS $CPPFLAGS
+
+g++ -g -c -std=c++11 $FFMPEGRTSPSERVER_BASE/LiveRTSPServer.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 $FFMPEGRTSPSERVER_BASE/FFmpegH264Source.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 $FFMPEGRTSPSERVER_BASE/LiveServerMediaSubsession.cpp $CFLAGS $CPPFLAGS
+
 g++ -g -std=c++11 \
-  main.o PL_RTSPClient.o PL_RTSPServer.o PL_H264Decoder.o PL_H264Encoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PipeLine.o \
+  main.o PipeLine.o \
+  PL_RTSPClient.o PL_H264Decoder.o PL_H264Encoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o \
+  $FFMPEGRTSPSERVER_OBJ PL_RTSPServer.o \
   $LDFLAGS -o rtsp_face
 
 #export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib
-#./rtsp_face rtsp://admin:admin12345@192.168.1.63:554/h264/ch1/main/av_stream
+#./rtsp_face rtsp://admin:admin12345@192.168.1.64:554/h264/ch1/main/av_stream

--
Gitblit v1.8.0