From 1abced42eb3997ef9ef675bfe990f7913ea73f2f Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期五, 23 十二月 2016 18:34:10 +0800
Subject: [PATCH] add h264 encoder and rtsp server
---
RtspFace/make.sh | 13
RtspFace/PL_RTSPServer.cpp | 194 +++++++++++-
/dev/null | 455 ------------------------------
RtspFace/PL_H264Decoder.cpp | 8
RtspFace/main.cpp | 9
RtspFace/PL_H264Encoder.cpp | 196 +++++++++++-
RtspFace/live555/testProgs/testRTSPClient.hpp | 4
RtspFace/PL_RTSPClient.cpp | 4
8 files changed, 369 insertions(+), 514 deletions(-)
diff --git a/RtspFace/PL_H264Decoder.cpp b/RtspFace/PL_H264Decoder.cpp
index 9f51b43..879fa6f 100644
--- a/RtspFace/PL_H264Decoder.cpp
+++ b/RtspFace/PL_H264Decoder.cpp
@@ -126,7 +126,7 @@
in->pAVCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
in->pAVCodecContext->bit_rate = 0;
in->pAVCodecContext->time_base.den = 25;
- in->pAVCodecContext->width = 1920;
+ in->pAVCodecContext->width = 1920;//#todo get from pm
in->pAVCodecContext->height = 1080;
if (in->pAVCodecContext->extradata == NULL)
@@ -159,7 +159,7 @@
bool decodeH264(H264Decoder_Internal* in, uint8_t* buffer, size_t buffSize)
{
AVPacket packet = {0};
- int frameFinished = buffSize;
+ int gotPicture = buffSize; // frameFinished
if (av_packet_from_data(&packet, buffer, buffSize) != 0)
{
@@ -168,8 +168,8 @@
}
// decode
- avcodec_decode_video2(in->pAVCodecContext, in->pAVFrame, &frameFinished, &packet);
- if(frameFinished)
+ avcodec_decode_video2(in->pAVCodecContext, in->pAVFrame, &gotPicture, &packet);
+ if(gotPicture)
{
// decode ok
return true;
diff --git a/RtspFace/PL_H264Encoder.cpp b/RtspFace/PL_H264Encoder.cpp
index ebdfb7f..9fc0a0b 100644
--- a/RtspFace/PL_H264Encoder.cpp
+++ b/RtspFace/PL_H264Encoder.cpp
@@ -2,24 +2,33 @@
extern "C"
{
- #include <libyuv.h>
+ #include <libavcodec/avcodec.h>
+ #include <libavutil/frame.h>
+ #include <libavformat/avformat.h>
+ #include "libavutil/imgutils.h"
}
-struct PL_H264Encoder_Internal
+struct H264Encoder_Internal
{
- uint8_t buffer[1920*1080*4];
+ uint8_t buffer[1920*1080*3];
size_t buffSize;
size_t buffSizeMax;
-
bool payError;
+ bool ffmpegInited;
+ size_t frameCount;
+
+ AVCodecContext* pAVCodecContext;
+ AVFrame* pAVFrame;//#todo delete
- PL_H264Encoder_Internal() :
+ H264Encoder_Internal() :
buffSize(0), buffSizeMax(sizeof(buffer)),
- payError(true)
+ payError(true), ffmpegInited(false), frameCount(0),
+ pAVCodecContext(nullptr), pAVFrame(nullptr)
+
{
}
- ~PL_H264Encoder_Internal()
+ ~H264Encoder_Internal()
{
}
@@ -27,6 +36,11 @@
{
buffSize = 0;
payError = true;
+ ffmpegInited = false;
+ frameCount = 0;
+
+ pAVCodecContext = nullptr;
+ pAVFrame = nullptr;
}
};
@@ -35,52 +49,180 @@
return new PL_H264Encoder;
}
-PL_H264Encoder::PL_H264Encoder() : internal(new PL_H264Encoder_Internal)
+PL_H264Encoder::PL_H264Encoder() : internal(new H264Encoder_Internal)
{
}
PL_H264Encoder::~PL_H264Encoder()
{
- delete (PL_H264Encoder_Internal*)internal;
+ delete (H264Encoder_Internal*)internal;
internal= nullptr;
}
bool PL_H264Encoder::init(void* args)
{
- PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal;
+ H264Encoder_Internal* in = (H264Encoder_Internal*)internal;
in->reset();
-
+
return true;
}
void PL_H264Encoder::finit()
{
- PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal;
+ H264Encoder_Internal* in = (H264Encoder_Internal*)internal;
+}
+
+bool initH264EncoderEnv(H264Encoder_Internal* in)
+{
+ av_register_all();
+
+ // find the video encoder
+ AVCodec* avCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
+
+ if (!avCodec)
+ {
+ printf("codec not found!\n");
+ return false;
+ }
+
+ in->pAVCodecContext = avcodec_alloc_context3(avCodec);
+
+ in->pAVCodecContext->bit_rate = 3*1024*1024*8; // 3MB
+ in->pAVCodecContext->width = 1920;
+ in->pAVCodecContext->height = 1080;//#todo from config
+ in->pAVCodecContext->time_base.num=1;
+ in->pAVCodecContext->time_base.den=25;
+ in->pAVCodecContext->gop_size = 20;
+ in->pAVCodecContext->max_b_frames = 0;
+ in->pAVCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
+
+ if(avcodec_open2(in->pAVCodecContext, avCodec, NULL) >= 0)
+ {
+ in->pAVFrame = av_frame_alloc(); // Allocate video frame
+
+ in->pAVFrame->format = in->pAVCodecContext->pix_fmt;
+ in->pAVFrame->width = in->pAVCodecContext->width;
+ in->pAVFrame->height = in->pAVCodecContext->height;
+
+ int ret = av_image_alloc(in->pAVFrame->data, in->pAVFrame->linesize, in->pAVCodecContext->width, in->pAVCodecContext->height,
+ in->pAVCodecContext->pix_fmt, 16);
+ if (ret < 0)
+ {
+ printf("av_image_alloc error\n");
+ return false;
+ }
+ }
+ else
+ {
+ printf("avcodec_open2 error\n");
+ return false;
+ }
+
+ return true;
+}
+
+void copyAVFrame(AVFrame* dest, AVFrame* src)
+{
+ int height = dest->height;
+ int width = dest->width;
+
+ memcpy(dest->data[0], src->data[0], height * width); // Y
+ memcpy(dest->data[1], src->data[1], height * width / 4); // U
+ memcpy(dest->data[2], src->data[2], height * width / 4); // V
+}
+
+bool encodeH264(H264Encoder_Internal* in, AVFrame* pAVFrame, size_t buffSize)
+{
+ in->buffSize = 0;
+ in->frameCount++;
+
+ copyAVFrame(in->pAVFrame, pAVFrame);
+ in->pAVFrame->pts = in->frameCount;
+
+ AVPacket pAVPacket = {0};
+ av_init_packet(&pAVPacket);
+
+ // encode the image
+ int gotPacket = 0;
+ int ret = avcodec_encode_video2(in->pAVCodecContext, &pAVPacket, in->pAVFrame, &gotPacket);
+ if (ret < 0)
+ {
+ printf("avcodec_encode_video2 (1) error=%d\n", ret);
+ return false;
+ }
+
+ if (gotPacket > 0)
+ {
+ printf("Succeed to encode (1) frame=%d, size=%d\n", in->pAVFrame->pts, pAVPacket.size);
+ memcpy(in->buffer + in->buffSize, pAVPacket.data, pAVPacket.size);
+ in->buffSize += pAVPacket.size;
+ av_free_packet(&pAVPacket);
+ }
+
+ //#todo finit
+ //Flush Encoder
+ //while (gotPacket > 0)
+ //{
+ // ret = avcodec_encode_video2(in->pAVCodecContext, &pAVPacket, NULL, &gotPacket);
+ // if (ret < 0)
+ // {
+ // printf("avcodec_encode_video2 (2) error=%d\n", ret);
+ // return false;
+ // }
+ // if (gotPacket > 0)
+ // {
+ // printf("Succeed to encode (2) frame=%d, size=%d\n", in->pAVFrame->pts, pAVPacket.size);
+ // memcpy(in->buffer + in->buffSize, pAVPacket.data, pAVPacket.size);
+ // in->buffSize += pAVPacket.size;
+ // av_free_packet(&pAVPacket);
+ // }
+ //}
+
+ //#test
+ if (in->buffSize > 0)
+ {
+ static FILE * pFile = fopen("out.h264","wba+");
+ fwrite (in->buffer , sizeof(char), in->buffSize, pFile);
+ fflush(pFile);
+ }
+
+ in->payError = (in->buffSize == 0);
+ return !(in->payError);
}
bool PL_H264Encoder::pay(const PipeMaterial& pm)
{
- PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal;
+ H264Encoder_Internal* in = (H264Encoder_Internal*)internal;
- //in->buffer readly
-
- //static size_t f=0;
- //char fname[50];
- //sprintf(fname, "%u.bgra", ++f);
- //FILE * pFile = fopen (fname,"wb");
- //fwrite (in->buffer , sizeof(char), in->buffSize, pFile);
- //fclose(pFile);
-
- return true;
+ in->payError = true;
+
+ if (!in->ffmpegInited)
+ {
+ bool ret = initH264EncoderEnv(in);
+ if (!ret)
+ {
+ printf("initH264EncoderEnv error");
+ return false;
+ }
+ else
+ in->ffmpegInited = true;
+ }
+
+ bool ret = encodeH264(in, (AVFrame*)pm.buffer, pm.buffSize);
+ in->payError = !ret;
+ return ret;
}
bool PL_H264Encoder::gain(PipeMaterial& pm)
{
- PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal;
+ H264Encoder_Internal* in = (H264Encoder_Internal*)internal;
- pm.buffer = in->buffer;
- pm.buffSize = in->buffSize;
+ if (!in->payError)
+ {
+ pm.buffer = in->buffer;
+ pm.buffSize = in->buffSize;
+ }
pm.former = this;
- return true;
+ return !in->payError;
}
diff --git a/RtspFace/PL_RTSPClient.cpp b/RtspFace/PL_RTSPClient.cpp
index b83ec88..b26b2c1 100644
--- a/RtspFace/PL_RTSPClient.cpp
+++ b/RtspFace/PL_RTSPClient.cpp
@@ -36,12 +36,14 @@
{
pthread_mutex_destroy(frame_mutex);
delete frame_mutex;
+ frame_mutex = nullptr;
}
if (continue_mutex != nullptr)
{
pthread_mutex_destroy(continue_mutex);
delete continue_mutex;
+ continue_mutex = nullptr;
}
}
@@ -58,6 +60,7 @@
{
pthread_mutex_destroy(frame_mutex);
delete frame_mutex;
+ frame_mutex = nullptr;
}
frame_mutex = new pthread_mutex_t;
@@ -67,6 +70,7 @@
{
pthread_mutex_destroy(continue_mutex);
delete continue_mutex;
+ continue_mutex = nullptr;
}
continue_mutex = new pthread_mutex_t;
diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index ad27c00..936b31d 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -1,29 +1,123 @@
#include "PL_RTSPServer.h"
-#include "testOnDemandRTSPServer.hpp"
+#include <liveMedia.hh>
+#include <BasicUsageEnvironment.hh>
+
+class MyH264FramedSource : public FramedSource
+{
+public:
+ static MyH264FramedSource* createNew(UsageEnvironment& env);
+
+protected:
+ MyH264FramedSource(UsageEnvironment& env)
+ virtual ~MyH264FramedSource()
+
+ // overide FramedSource
+ virtual void doGetNextFrame()
+ {
+ // deliverFrame
+ //if (fFrameSize > 0)
+ //{
+ // // send Frame to the consumer
+ // FramedSource::afterGetting(this);
+ //}
+
+
+ // isCurrentlyAwaitingData
+ //if (frame->m_size > fMaxSize)
+ //{
+ // fFrameSize = fMaxSize;
+ // fNumTruncatedBytes = frame->m_size - fMaxSize;
+ //}
+ //else
+ //{
+ // fFrameSize = frame->m_size;
+ //}
+ //memcpy(fTo, frame->m_buffer, fFrameSize);
+ //if (fFrameSize > 0)
+ // FramedSource::afterGetting(this);
+ }
+
+ virtual void doStopGettingFrames()
+ {
+ FramedSource::doStopGettingFrames();
+ }
+};
struct PL_RTSPServer_Internal
{
- uint8_t buffer[1920*1080*4];
- size_t buffSize;
- size_t buffSizeMax;
+ //uint8_t buffer[1920*1080*4];
+ //size_t buffSize;
+ //size_t buffSizeMax;
bool payError;
+ pthread_t live_daemon_thid;
+ pthread_mutex_t* frame_mutex;
+ bool live_daemon_running;
+
+ UsageEnvironment* env;
+
+ // To make the second and subsequent client for each stream reuse the same
+ // input stream as the first client (rather than playing the file from the
+ // start for each client), change the following "False" to "True":
+ Boolean reuseFirstSource;
+
+ // To stream *only* MPEG-1 or 2 video "I" frames
+ // (e.g., to reduce network bandwidth),
+ // change the following "False" to "True":
+ Boolean iFramesOnly;
+
+ UserAuthenticationDatabase* authDB;
+
+ RTSPServer* rtspServer;//#todo delete
+
+ char descriptionString[1024];
PL_RTSPServer_Internal() :
- buffSize(0), buffSizeMax(sizeof(buffer)),
- payError(true)
+ //buffSize(0), buffSizeMax(sizeof(buffer)),
+ payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false),
+ env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr),
+ rtspServer(nullptr);
{
+ pthread_mutex_init(frame_mutex, NULL);
}
~PL_RTSPServer_Internal()
{
+ if (frame_mutex != nullptr)
+ {
+ pthread_mutex_destroy(frame_mutex);
+ delete frame_mutex;
+ frame_mutex = nullptr;
+ }
}
void reset()
{
- buffSize = 0;
+ //buffSize = 0;
+
payError = true;
+
+ if (frame_mutex != nullptr)
+ {
+ pthread_mutex_destroy(frame_mutex);
+ delete frame_mutex;
+ frame_mutex = nullptr;
+ }
+
+ frame_mutex = new pthread_mutex_t;
+ pthread_mutex_init(frame_mutex, NULL);
+
+ live_daemon_thid = 0;
+ live_daemon_running = false;
+
+ env = nullptr;
+ reuseFirstSource = False;
+ iFramesOnly = False;
+ authDB = nullptr;
+ rtspServer = nullptr;
+
+ strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\"");
}
};
@@ -42,32 +136,92 @@
internal= nullptr;
}
+void* live_daemon_thd(void* arg)
+{
+ RTSPClient_Internal* in = (RTSPClient_Internal*)arg;
+
+ // Begin by setting up our usage environment:
+ TaskScheduler* scheduler = BasicTaskScheduler::createNew();
+ in->env = BasicUsageEnvironment::createNew(*scheduler);
+
+#ifdef ACCESS_CONTROL
+ // To implement client access control to the RTSP server, do the following:
+ in->authDB = new UserAuthenticationDatabase;
+ in->authDB->addUserRecord("username1", "password1"); // replace these with real strings
+ // Repeat the above with each <username>, <password> that you wish to allow
+ // access to the server.
+#endif
+
+ // Create the RTSP server:
+ in->rtspServer = RTSPServer::createNew(*env, 8554, authDB);
+ if (rtspServer == NULL)
+ {
+ *(in->env) << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
+ return;
+ }
+
+ // Set up each of the possible streams that can be served by the
+ // RTSP server. Each such stream is implemented using a
+ // "ServerMediaSession" object, plus one or more
+ // "ServerMediaSubsession" objects for each audio/video substream.
+
+ char const* streamName = "plH264Encoder";
+ ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString);
+ sms->addSubsession(MyH264FramedSource::createNew(*(in->env), in));
+ in->rtspServer->addServerMediaSession(sms);
+
+ // announceStream
+ char* url = rtspServer->rtspURL(sms);
+ *(in->env) << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n";
+ *(in->env) << "Play this stream using the URL \"" << url << "\"\n";
+ delete[] url;
+
+ // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
+ // Try first with the default HTTP port (80), and then with the alternative HTTP
+ // port numbers (8000 and 8080).
+
+ if (rtspServer->setUpTunnelingOverHTTP(80))
+ *(in->env) << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
+ else
+ *(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n";
+
+ in->live_daemon_running = true;
+ env->taskScheduler().doEventLoop(); // does not return
+ in->live_daemon_running = false;
+}
+
bool PL_RTSPServer::init(void* args)
{
PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
in->reset();
+
+ int ret = pthread_mutex_lock(in->frame_mutex);
+ if(ret != 0)
+ {
+ printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
+ return false;
+ }
+
+ ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
+ if(ret != 0)
+ {
+ printf("pthread_create: %s/n", strerror(ret));
+ return false;
+ }
return true;
}
void PL_RTSPServer::finit()
{
- PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
-
+ RTSPClient_Internal* in = (RTSPClient_Internal*)internal;
+
+ pthread_join(in->live_daemon_thid, NULL);
}
bool PL_RTSPServer::pay(const PipeMaterial& pm)
{
PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
-
- //in->buffer readly
-
- //static size_t f=0;
- //char fname[50];
- //sprintf(fname, "%u.bgra", ++f);
- //FILE * pFile = fopen (fname,"wb");
- //fwrite (in->buffer , sizeof(char), in->buffSize, pFile);
- //fclose(pFile);
return true;
}
@@ -76,8 +230,8 @@
{
PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal;
- pm.buffer = in->buffer;
- pm.buffSize = in->buffSize;
+ pm.buffer = nullptr;
+ pm.buffSize = 0;
pm.former = this;
return true;
}
diff --git a/RtspFace/live555/testProgs/testH264VideoStreamer.hpp b/RtspFace/live555/testProgs/testH264VideoStreamer.hpp
deleted file mode 100644
index 74fe461..0000000
--- a/RtspFace/live555/testProgs/testH264VideoStreamer.hpp
+++ /dev/null
@@ -1,132 +0,0 @@
-/**********
-This library is free software; you can redistribute it and/or modify it under
-the terms of the GNU Lesser General Public License as published by the
-Free Software Foundation; either version 3 of the License, or (at your
-option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
-
-This library is distributed in the hope that it will be useful, but WITHOUT
-ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
-more details.
-
-You should have received a copy of the GNU Lesser General Public License
-along with this library; if not, write to the Free Software Foundation, Inc.,
-51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-**********/
-// Copyright (c) 1996-2017, Live Networks, Inc. All rights reserved
-// A test program that reads a H.264 Elementary Stream video file
-// and streams it using RTP
-// main program
-//
-// NOTE: For this application to work, the H.264 Elementary Stream video file *must* contain SPS and PPS NAL units,
-// ideally at or near the start of the file. These SPS and PPS NAL units are used to specify 'configuration' information
-// that is set in the output stream's SDP description (by the RTSP server that is built in to this application).
-// Note also that - unlike some other "*Streamer" demo applications - the resulting stream can be received only using a
-// RTSP client (such as "openRTSP")
-
-#include <liveMedia.hh>
-#include <BasicUsageEnvironment.hh>
-#include <GroupsockHelper.hh>
-
-UsageEnvironment* env;
-char const* inputFileName = "test.264";
-H264VideoStreamFramer* videoSource;
-RTPSink* videoSink;
-
-void play(); // forward
-
-int main(int argc, char** argv) {
- // Begin by setting up our usage environment:
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- env = BasicUsageEnvironment::createNew(*scheduler);
-
- // Create 'groupsocks' for RTP and RTCP:
- struct in_addr destinationAddress;
- destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
- // Note: This is a multicast address. If you wish instead to stream
- // using unicast, then you should use the "testOnDemandRTSPServer"
- // test program - not this test program - as a model.
-
- const unsigned short rtpPortNum = 18888;
- const unsigned short rtcpPortNum = rtpPortNum+1;
- const unsigned char ttl = 255;
-
- const Port rtpPort(rtpPortNum);
- const Port rtcpPort(rtcpPortNum);
-
- Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
- rtpGroupsock.multicastSendOnly(); // we're a SSM source
- Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
- rtcpGroupsock.multicastSendOnly(); // we're a SSM source
-
- // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
- OutPacketBuffer::maxSize = 100000;
- videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);
-
- // Create (and start) a 'RTCP instance' for this RTP sink:
- const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
- const unsigned maxCNAMElen = 100;
- unsigned char CNAME[maxCNAMElen+1];
- gethostname((char*)CNAME, maxCNAMElen);
- CNAME[maxCNAMElen] = '\0'; // just in case
- RTCPInstance* rtcp
- = RTCPInstance::createNew(*env, &rtcpGroupsock,
- estimatedSessionBandwidth, CNAME,
- videoSink, NULL /* we're a server */,
- True /* we're a SSM source */);
- // Note: This starts RTCP running automatically
-
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
- if (rtspServer == NULL) {
- *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- exit(1);
- }
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, "testStream", inputFileName,
- "Session streamed by \"testH264VideoStreamer\"",
- True /*SSM*/);
- sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
- rtspServer->addServerMediaSession(sms);
-
- char* url = rtspServer->rtspURL(sms);
- *env << "Play this stream using the URL \"" << url << "\"\n";
- delete[] url;
-
- // Start the streaming:
- *env << "Beginning streaming...\n";
- play();
-
- env->taskScheduler().doEventLoop(); // does not return
-
- return 0; // only to prevent compiler warning
-}
-
-void afterPlaying(void* /*clientData*/) {
- *env << "...done reading from file\n";
- videoSink->stopPlaying();
- Medium::close(videoSource);
- // Note that this also closes the input file that this source read from.
-
- // Start playing once again:
- play();
-}
-
-void play() {
- // Open the input file as a 'byte-stream file source':
- ByteStreamFileSource* fileSource
- = ByteStreamFileSource::createNew(*env, inputFileName);
- if (fileSource == NULL) {
- *env << "Unable to open file \"" << inputFileName
- << "\" as a byte-stream file source\n";
- exit(1);
- }
-
- FramedSource* videoES = fileSource;
-
- // Create a framer for the Video Elementary Stream:
- videoSource = H264VideoStreamFramer::createNew(*env, videoES);
-
- // Finally, start playing:
- *env << "Beginning to read from file...\n";
- videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
-}
diff --git a/RtspFace/live555/testProgs/testOnDemandRTSPServer.hpp b/RtspFace/live555/testProgs/testOnDemandRTSPServer.hpp
deleted file mode 100644
index 2608308..0000000
--- a/RtspFace/live555/testProgs/testOnDemandRTSPServer.hpp
+++ /dev/null
@@ -1,455 +0,0 @@
-/**********
-This library is free software; you can redistribute it and/or modify it under
-the terms of the GNU Lesser General Public License as published by the
-Free Software Foundation; either version 3 of the License, or (at your
-option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
-
-This library is distributed in the hope that it will be useful, but WITHOUT
-ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
-more details.
-
-You should have received a copy of the GNU Lesser General Public License
-along with this library; if not, write to the Free Software Foundation, Inc.,
-51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-**********/
-// Copyright (c) 1996-2017, Live Networks, Inc. All rights reserved
-// A test program that demonstrates how to stream - via unicast RTP
-// - various kinds of file on demand, using a built-in RTSP server.
-// main program
-
-#include "liveMedia.hh"
-#include "BasicUsageEnvironment.hh"
-
-UsageEnvironment* env;
-
-// To make the second and subsequent client for each stream reuse the same
-// input stream as the first client (rather than playing the file from the
-// start for each client), change the following "False" to "True":
-Boolean reuseFirstSource = False;
-
-// To stream *only* MPEG-1 or 2 video "I" frames
-// (e.g., to reduce network bandwidth),
-// change the following "False" to "True":
-Boolean iFramesOnly = False;
-
-static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
- char const* streamName, char const* inputFileName); // fwd
-
-static char newDemuxWatchVariable;
-
-static MatroskaFileServerDemux* matroskaDemux;
-static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {
- matroskaDemux = newDemux;
- newDemuxWatchVariable = 1;
-}
-
-static OggFileServerDemux* oggDemux;
-static void onOggDemuxCreation(OggFileServerDemux* newDemux, void* /*clientData*/) {
- oggDemux = newDemux;
- newDemuxWatchVariable = 1;
-}
-
-int main(int argc, char** argv) {
- // Begin by setting up our usage environment:
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- env = BasicUsageEnvironment::createNew(*scheduler);
-
- UserAuthenticationDatabase* authDB = NULL;
-#ifdef ACCESS_CONTROL
- // To implement client access control to the RTSP server, do the following:
- authDB = new UserAuthenticationDatabase;
- authDB->addUserRecord("username1", "password1"); // replace these with real strings
- // Repeat the above with each <username>, <password> that you wish to allow
- // access to the server.
-#endif
-
- // Create the RTSP server:
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
- if (rtspServer == NULL) {
- *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- exit(1);
- }
-
- char const* descriptionString
- = "Session streamed by \"testOnDemandRTSPServer\"";
-
- // Set up each of the possible streams that can be served by the
- // RTSP server. Each such stream is implemented using a
- // "ServerMediaSession" object, plus one or more
- // "ServerMediaSubsession" objects for each audio/video substream.
-
- // A MPEG-4 video elementary stream:
- {
- char const* streamName = "mpeg4ESVideoTest";
- char const* inputFileName = "test.m4e";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(MPEG4VideoFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A H.264 video elementary stream:
- {
- char const* streamName = "h264ESVideoTest";
- char const* inputFileName = "test.264";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(H264VideoFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A H.265 video elementary stream:
- {
- char const* streamName = "h265ESVideoTest";
- char const* inputFileName = "test.265";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(H265VideoFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A MPEG-1 or 2 audio+video program stream:
- {
- char const* streamName = "mpeg1or2AudioVideoTest";
- char const* inputFileName = "test.mpg";
- // NOTE: This *must* be a Program Stream; not an Elementary Stream
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- MPEG1or2FileServerDemux* demux
- = MPEG1or2FileServerDemux::createNew(*env, inputFileName, reuseFirstSource);
- sms->addSubsession(demux->newVideoServerMediaSubsession(iFramesOnly));
- sms->addSubsession(demux->newAudioServerMediaSubsession());
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A MPEG-1 or 2 video elementary stream:
- {
- char const* streamName = "mpeg1or2ESVideoTest";
- char const* inputFileName = "testv.mpg";
- // NOTE: This *must* be a Video Elementary Stream; not a Program Stream
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(MPEG1or2VideoFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource, iFramesOnly));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A MP3 audio stream (actually, any MPEG-1 or 2 audio file will work):
- // To stream using 'ADUs' rather than raw MP3 frames, uncomment the following:
-//#define STREAM_USING_ADUS 1
- // To also reorder ADUs before streaming, uncomment the following:
-//#define INTERLEAVE_ADUS 1
- // (For more information about ADUs and interleaving,
- // see <http://www.live555.com/rtp-mp3/>)
- {
- char const* streamName = "mp3AudioTest";
- char const* inputFileName = "test.mp3";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- Boolean useADUs = False;
- Interleaving* interleaving = NULL;
-#ifdef STREAM_USING_ADUS
- useADUs = True;
-#ifdef INTERLEAVE_ADUS
- unsigned char interleaveCycle[] = {0,2,1,3}; // or choose your own...
- unsigned const interleaveCycleSize
- = (sizeof interleaveCycle)/(sizeof (unsigned char));
- interleaving = new Interleaving(interleaveCycleSize, interleaveCycle);
-#endif
-#endif
- sms->addSubsession(MP3AudioFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource,
- useADUs, interleaving));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A WAV audio stream:
- {
- char const* streamName = "wavAudioTest";
- char const* inputFileName = "test.wav";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- // To convert 16-bit PCM data to 8-bit u-law, prior to streaming,
- // change the following to True:
- Boolean convertToULaw = False;
- sms->addSubsession(WAVAudioFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource, convertToULaw));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // An AMR audio stream:
- {
- char const* streamName = "amrAudioTest";
- char const* inputFileName = "test.amr";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(AMRAudioFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A 'VOB' file (e.g., from an unencrypted DVD):
- {
- char const* streamName = "vobTest";
- char const* inputFileName = "test.vob";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- // Note: VOB files are MPEG-2 Program Stream files, but using AC-3 audio
- MPEG1or2FileServerDemux* demux
- = MPEG1or2FileServerDemux::createNew(*env, inputFileName, reuseFirstSource);
- sms->addSubsession(demux->newVideoServerMediaSubsession(iFramesOnly));
- sms->addSubsession(demux->newAC3AudioServerMediaSubsession());
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A MPEG-2 Transport Stream:
- {
- char const* streamName = "mpeg2TransportStreamTest";
- char const* inputFileName = "test.ts";
- char const* indexFileName = "test.tsx";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(MPEG2TransportFileServerMediaSubsession
- ::createNew(*env, inputFileName, indexFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // An AAC audio stream (ADTS-format file):
- {
- char const* streamName = "aacAudioTest";
- char const* inputFileName = "test.aac";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(ADTSAudioFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A DV video stream:
- {
- // First, make sure that the RTPSinks' buffers will be large enough to handle the huge size of DV frames (as big as 288000).
- OutPacketBuffer::maxSize = 300000;
-
- char const* streamName = "dvVideoTest";
- char const* inputFileName = "test.dv";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(DVVideoFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A AC3 video elementary stream:
- {
- char const* streamName = "ac3AudioTest";
- char const* inputFileName = "test.ac3";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
-
- sms->addSubsession(AC3AudioFileServerMediaSubsession
- ::createNew(*env, inputFileName, reuseFirstSource));
-
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A Matroska ('.mkv') file, with video+audio+subtitle streams:
- {
- char const* streamName = "matroskaFileTest";
- char const* inputFileName = "test.mkv";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
-
- newDemuxWatchVariable = 0;
- MatroskaFileServerDemux::createNew(*env, inputFileName, onMatroskaDemuxCreation, NULL);
- env->taskScheduler().doEventLoop(&newDemuxWatchVariable);
-
- Boolean sessionHasTracks = False;
- ServerMediaSubsession* smss;
- while ((smss = matroskaDemux->newServerMediaSubsession()) != NULL) {
- sms->addSubsession(smss);
- sessionHasTracks = True;
- }
- if (sessionHasTracks) {
- rtspServer->addServerMediaSession(sms);
- }
- // otherwise, because the stream has no tracks, we don't add a ServerMediaSession to the server.
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A WebM ('.webm') file, with video(VP8)+audio(Vorbis) streams:
- // (Note: ".webm' files are special types of Matroska files, so we use the same code as the Matroska ('.mkv') file code above.)
- {
- char const* streamName = "webmFileTest";
- char const* inputFileName = "test.webm";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
-
- newDemuxWatchVariable = 0;
- MatroskaFileServerDemux::createNew(*env, inputFileName, onMatroskaDemuxCreation, NULL);
- env->taskScheduler().doEventLoop(&newDemuxWatchVariable);
-
- Boolean sessionHasTracks = False;
- ServerMediaSubsession* smss;
- while ((smss = matroskaDemux->newServerMediaSubsession()) != NULL) {
- sms->addSubsession(smss);
- sessionHasTracks = True;
- }
- if (sessionHasTracks) {
- rtspServer->addServerMediaSession(sms);
- }
- // otherwise, because the stream has no tracks, we don't add a ServerMediaSession to the server.
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // An Ogg ('.ogg') file, with video and/or audio streams:
- {
- char const* streamName = "oggFileTest";
- char const* inputFileName = "test.ogg";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
-
- newDemuxWatchVariable = 0;
- OggFileServerDemux::createNew(*env, inputFileName, onOggDemuxCreation, NULL);
- env->taskScheduler().doEventLoop(&newDemuxWatchVariable);
-
- Boolean sessionHasTracks = False;
- ServerMediaSubsession* smss;
- while ((smss = oggDemux->newServerMediaSubsession()) != NULL) {
- sms->addSubsession(smss);
- sessionHasTracks = True;
- }
- if (sessionHasTracks) {
- rtspServer->addServerMediaSession(sms);
- }
- // otherwise, because the stream has no tracks, we don't add a ServerMediaSession to the server.
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // An Opus ('.opus') audio file:
- // (Note: ".opus' files are special types of Ogg files, so we use the same code as the Ogg ('.ogg') file code above.)
- {
- char const* streamName = "opusFileTest";
- char const* inputFileName = "test.opus";
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
-
- newDemuxWatchVariable = 0;
- OggFileServerDemux::createNew(*env, inputFileName, onOggDemuxCreation, NULL);
- env->taskScheduler().doEventLoop(&newDemuxWatchVariable);
-
- Boolean sessionHasTracks = False;
- ServerMediaSubsession* smss;
- while ((smss = oggDemux->newServerMediaSubsession()) != NULL) {
- sms->addSubsession(smss);
- sessionHasTracks = True;
- }
- if (sessionHasTracks) {
- rtspServer->addServerMediaSession(sms);
- }
- // otherwise, because the stream has no tracks, we don't add a ServerMediaSession to the server.
-
- announceStream(rtspServer, sms, streamName, inputFileName);
- }
-
- // A MPEG-2 Transport Stream, coming from a live UDP (raw-UDP or RTP/UDP) source:
- {
- char const* streamName = "mpeg2TransportStreamFromUDPSourceTest";
- char const* inputAddressStr = "239.255.42.42";
- // This causes the server to take its input from the stream sent by the "testMPEG2TransportStreamer" demo application.
- // (Note: If the input UDP source is unicast rather than multicast, then change this to NULL.)
- portNumBits const inputPortNum = 1234;
- // This causes the server to take its input from the stream sent by the "testMPEG2TransportStreamer" demo application.
- Boolean const inputStreamIsRawUDP = False;
- ServerMediaSession* sms
- = ServerMediaSession::createNew(*env, streamName, streamName,
- descriptionString);
- sms->addSubsession(MPEG2TransportUDPServerMediaSubsession
- ::createNew(*env, inputAddressStr, inputPortNum, inputStreamIsRawUDP));
- rtspServer->addServerMediaSession(sms);
-
- char* url = rtspServer->rtspURL(sms);
- *env << "\n\"" << streamName << "\" stream, from a UDP Transport Stream input source \n\t(";
- if (inputAddressStr != NULL) {
- *env << "IP multicast address " << inputAddressStr << ",";
- } else {
- *env << "unicast;";
- }
- *env << " port " << inputPortNum << ")\n";
- *env << "Play this stream using the URL \"" << url << "\"\n";
- delete[] url;
- }
-
- // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
- // Try first with the default HTTP port (80), and then with the alternative HTTP
- // port numbers (8000 and 8080).
-
- if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
- *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
- } else {
- *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
- }
-
- env->taskScheduler().doEventLoop(); // does not return
-
- return 0; // only to prevent compiler warning
-}
-
-static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
- char const* streamName, char const* inputFileName) {
- char* url = rtspServer->rtspURL(sms);
- UsageEnvironment& env = rtspServer->envir();
- env << "\n\"" << streamName << "\" stream, from the file \""
- << inputFileName << "\"\n";
- env << "Play this stream using the URL \"" << url << "\"\n";
- delete[] url;
-}
diff --git a/RtspFace/live555/testProgs/testRTSPClient.hpp b/RtspFace/live555/testProgs/testRTSPClient.hpp
index 78088ef..bf37d55 100644
--- a/RtspFace/live555/testProgs/testRTSPClient.hpp
+++ b/RtspFace/live555/testProgs/testRTSPClient.hpp
@@ -20,8 +20,8 @@
// client application. For a full-featured RTSP client application - with much more functionality, and many options - see
// "openRTSP": http://www.live555.com/openRTSP/
-#include "liveMedia.hh"
-#include "BasicUsageEnvironment.hh"
+#include <liveMedia.hh>
+#include <BasicUsageEnvironment.hh>
#include <iostream>
diff --git a/RtspFace/main.cpp b/RtspFace/main.cpp
index d25eddb..b31af5c 100644
--- a/RtspFace/main.cpp
+++ b/RtspFace/main.cpp
@@ -1,6 +1,7 @@
#include "PipeLine.h"
#include "PL_RTSPClient.h"
#include "PL_H264Decoder.h"
+#include "PL_H264Encoder.h"
#include "PL_AVFrameYUV420.h"
#include <iostream>
@@ -13,6 +14,7 @@
pipeLine.register_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
pipeLine.register_elem_creator("PL_H264Decoder", create_PL_H264Decoder);
pipeLine.register_elem_creator("PL_AVFrameYUV420", create_PL_AVFrameYUV420);
+ pipeLine.register_elem_creator("PL_H264Encoder", create_PL_H264Encoder);
PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLine.push_elem("PL_RTSPClient");
RTSPConfig rtspConfig;
@@ -28,8 +30,11 @@
PL_H264Decoder* h264Decoder = (PL_H264Decoder*)pipeLine.push_elem("PL_H264Decoder");
h264Decoder->init(nullptr);
- PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420");
- avFrameYUV420->init(nullptr);
+ //PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420");
+ //avFrameYUV420->init(nullptr);
+
+ PL_H264Encoder* h264Encoder = (PL_H264Encoder*)pipeLine.push_elem("PL_H264Encoder");
+ h264Encoder->init(nullptr);
while(true)
{
diff --git a/RtspFace/make.sh b/RtspFace/make.sh
index 4749d56..4286377 100644
--- a/RtspFace/make.sh
+++ b/RtspFace/make.sh
@@ -2,6 +2,10 @@
LIVEMEDIA_INC="-I$LIVEMEDIA_BASE/liveMedia/include -I$LIVEMEDIA_BASE/groupsock/include -I$LIVEMEDIA_BASE/UsageEnvironment/include -I$LIVEMEDIA_BASE/BasicUsageEnvironment/include"
LIVEMEDIA_LIB="-L$LIVEMEDIA_BASE/liveMedia -L$LIVEMEDIA_BASE/groupsock -L$LIVEMEDIA_BASE/UsageEnvironment -L$LIVEMEDIA_BASE/BasicUsageEnvironment -lliveMedia -lgroupsock -lBasicUsageEnvironment -lUsageEnvironment"
+LIBX264_BASE=/opt/x264/inst
+LIBX264_INC="-I$LIBX264_BASE/include"
+LIBX264_LIB="-L$LIBX264_BASE/lib -lx264"
+
FFMPEG_BASE=/opt/ffmpeg-3.2.2/inst
FFMPEG_INC="-I$FFMPEG_BASE/include"
FFMPEG_LIB="-L$FFMPEG_BASE/lib -lavutil -lavformat -lswresample -lavcodec"
@@ -15,7 +19,7 @@
LIBYUV_LIB="-L$LIBYUV_BASE -lyuv"
CPPFLAGS+="-pthread $LIVEMEDIA_INC $FFMPEG_INC $LIBBASE64_INC $LIBYUV_INC"
-LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB"
+LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB $LIBX264_LIB"
CFLAGS+="-D__STDC_CONSTANT_MACROS"
@@ -25,10 +29,11 @@
g++ -g -c -std=c++11 main.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_RTSPClient.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_H264Decoder.cpp $CFLAGS $CPPFLAGS
-g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PL_AVFrameBGRA.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS
+g++ -g -c -std=c++11 PL_H264Encoder.cpp $CFLAGS $CPPFLAGS
g++ -g -c -std=c++11 PipeLine.cpp $CFLAGS $CPPFLAGS
-g++ -g -std=c++11 main.o PL_RTSPClient.o PL_H264Decoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PipeLine.o $LDFLAGS -o rtsp_face
+g++ -g -std=c++11 main.o PL_RTSPClient.o PL_H264Decoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PL_H264Encoder.o PipeLine.o $LDFLAGS -o rtsp_face
-#export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$FFMPEG_BASE/lib
+#export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib
#./rtsp_face rtsp://admin:admin12345@192.168.1.63:554/h264/ch1/main/av_stream
--
Gitblit v1.8.0