RtspFace/PL_H264Decoder.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/PL_H264Encoder.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/PL_RTSPClient.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/PL_RTSPServer.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/live555/testProgs/testH264VideoStreamer.hpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/live555/testProgs/testOnDemandRTSPServer.hpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/live555/testProgs/testRTSPClient.hpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/main.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
RtspFace/make.sh | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 |
RtspFace/PL_H264Decoder.cpp
@@ -126,7 +126,7 @@ in->pAVCodecContext->codec_type = AVMEDIA_TYPE_VIDEO; in->pAVCodecContext->bit_rate = 0; in->pAVCodecContext->time_base.den = 25; in->pAVCodecContext->width = 1920; in->pAVCodecContext->width = 1920;//#todo get from pm in->pAVCodecContext->height = 1080; if (in->pAVCodecContext->extradata == NULL) @@ -159,7 +159,7 @@ bool decodeH264(H264Decoder_Internal* in, uint8_t* buffer, size_t buffSize) { AVPacket packet = {0}; int frameFinished = buffSize; int gotPicture = buffSize; // frameFinished if (av_packet_from_data(&packet, buffer, buffSize) != 0) { @@ -168,8 +168,8 @@ } // decode avcodec_decode_video2(in->pAVCodecContext, in->pAVFrame, &frameFinished, &packet); if(frameFinished) avcodec_decode_video2(in->pAVCodecContext, in->pAVFrame, &gotPicture, &packet); if(gotPicture) { // decode ok return true; RtspFace/PL_H264Encoder.cpp
@@ -2,24 +2,33 @@ extern "C" { #include <libyuv.h> #include <libavcodec/avcodec.h> #include <libavutil/frame.h> #include <libavformat/avformat.h> #include "libavutil/imgutils.h" } struct PL_H264Encoder_Internal struct H264Encoder_Internal { uint8_t buffer[1920*1080*4]; uint8_t buffer[1920*1080*3]; size_t buffSize; size_t buffSizeMax; bool payError; bool ffmpegInited; size_t frameCount; AVCodecContext* pAVCodecContext; AVFrame* pAVFrame;//#todo delete PL_H264Encoder_Internal() : H264Encoder_Internal() : buffSize(0), buffSizeMax(sizeof(buffer)), payError(true) payError(true), ffmpegInited(false), frameCount(0), pAVCodecContext(nullptr), pAVFrame(nullptr) { } ~PL_H264Encoder_Internal() ~H264Encoder_Internal() { } @@ -27,6 +36,11 @@ { buffSize = 0; payError = true; ffmpegInited = false; frameCount = 0; pAVCodecContext = nullptr; pAVFrame = nullptr; } }; @@ -35,52 +49,180 @@ return new PL_H264Encoder; } PL_H264Encoder::PL_H264Encoder() : internal(new PL_H264Encoder_Internal) PL_H264Encoder::PL_H264Encoder() : internal(new H264Encoder_Internal) { } PL_H264Encoder::~PL_H264Encoder() { delete (PL_H264Encoder_Internal*)internal; delete (H264Encoder_Internal*)internal; internal= nullptr; } bool PL_H264Encoder::init(void* args) { PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal; H264Encoder_Internal* in = (H264Encoder_Internal*)internal; in->reset(); return true; } void PL_H264Encoder::finit() { PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal; H264Encoder_Internal* in = (H264Encoder_Internal*)internal; } bool initH264EncoderEnv(H264Encoder_Internal* in) { av_register_all(); // find the video encoder AVCodec* avCodec = avcodec_find_encoder(AV_CODEC_ID_H264); if (!avCodec) { printf("codec not found!\n"); return false; } in->pAVCodecContext = avcodec_alloc_context3(avCodec); in->pAVCodecContext->bit_rate = 3*1024*1024*8; // 3MB in->pAVCodecContext->width = 1920; in->pAVCodecContext->height = 1080;//#todo from config in->pAVCodecContext->time_base.num=1; in->pAVCodecContext->time_base.den=25; in->pAVCodecContext->gop_size = 20; in->pAVCodecContext->max_b_frames = 0; in->pAVCodecContext->pix_fmt = AV_PIX_FMT_YUV420P; if(avcodec_open2(in->pAVCodecContext, avCodec, NULL) >= 0) { in->pAVFrame = av_frame_alloc(); // Allocate video frame in->pAVFrame->format = in->pAVCodecContext->pix_fmt; in->pAVFrame->width = in->pAVCodecContext->width; in->pAVFrame->height = in->pAVCodecContext->height; int ret = av_image_alloc(in->pAVFrame->data, in->pAVFrame->linesize, in->pAVCodecContext->width, in->pAVCodecContext->height, in->pAVCodecContext->pix_fmt, 16); if (ret < 0) { printf("av_image_alloc error\n"); return false; } } else { printf("avcodec_open2 error\n"); return false; } return true; } void copyAVFrame(AVFrame* dest, AVFrame* src) { int height = dest->height; int width = dest->width; memcpy(dest->data[0], src->data[0], height * width); // Y memcpy(dest->data[1], src->data[1], height * width / 4); // U memcpy(dest->data[2], src->data[2], height * width / 4); // V } bool encodeH264(H264Encoder_Internal* in, AVFrame* pAVFrame, size_t buffSize) { in->buffSize = 0; in->frameCount++; copyAVFrame(in->pAVFrame, pAVFrame); in->pAVFrame->pts = in->frameCount; AVPacket pAVPacket = {0}; av_init_packet(&pAVPacket); // encode the image int gotPacket = 0; int ret = avcodec_encode_video2(in->pAVCodecContext, &pAVPacket, in->pAVFrame, &gotPacket); if (ret < 0) { printf("avcodec_encode_video2 (1) error=%d\n", ret); return false; } if (gotPacket > 0) { printf("Succeed to encode (1) frame=%d, size=%d\n", in->pAVFrame->pts, pAVPacket.size); memcpy(in->buffer + in->buffSize, pAVPacket.data, pAVPacket.size); in->buffSize += pAVPacket.size; av_free_packet(&pAVPacket); } //#todo finit //Flush Encoder //while (gotPacket > 0) //{ // ret = avcodec_encode_video2(in->pAVCodecContext, &pAVPacket, NULL, &gotPacket); // if (ret < 0) // { // printf("avcodec_encode_video2 (2) error=%d\n", ret); // return false; // } // if (gotPacket > 0) // { // printf("Succeed to encode (2) frame=%d, size=%d\n", in->pAVFrame->pts, pAVPacket.size); // memcpy(in->buffer + in->buffSize, pAVPacket.data, pAVPacket.size); // in->buffSize += pAVPacket.size; // av_free_packet(&pAVPacket); // } //} //#test if (in->buffSize > 0) { static FILE * pFile = fopen("out.h264","wba+"); fwrite (in->buffer , sizeof(char), in->buffSize, pFile); fflush(pFile); } in->payError = (in->buffSize == 0); return !(in->payError); } bool PL_H264Encoder::pay(const PipeMaterial& pm) { PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal; H264Encoder_Internal* in = (H264Encoder_Internal*)internal; //in->buffer readly //static size_t f=0; //char fname[50]; //sprintf(fname, "%u.bgra", ++f); //FILE * pFile = fopen (fname,"wb"); //fwrite (in->buffer , sizeof(char), in->buffSize, pFile); //fclose(pFile); return true; in->payError = true; if (!in->ffmpegInited) { bool ret = initH264EncoderEnv(in); if (!ret) { printf("initH264EncoderEnv error"); return false; } else in->ffmpegInited = true; } bool ret = encodeH264(in, (AVFrame*)pm.buffer, pm.buffSize); in->payError = !ret; return ret; } bool PL_H264Encoder::gain(PipeMaterial& pm) { PL_H264Encoder_Internal* in = (PL_H264Encoder_Internal*)internal; H264Encoder_Internal* in = (H264Encoder_Internal*)internal; pm.buffer = in->buffer; pm.buffSize = in->buffSize; if (!in->payError) { pm.buffer = in->buffer; pm.buffSize = in->buffSize; } pm.former = this; return true; return !in->payError; } RtspFace/PL_RTSPClient.cpp
@@ -36,12 +36,14 @@ { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } if (continue_mutex != nullptr) { pthread_mutex_destroy(continue_mutex); delete continue_mutex; continue_mutex = nullptr; } } @@ -58,6 +60,7 @@ { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } frame_mutex = new pthread_mutex_t; @@ -67,6 +70,7 @@ { pthread_mutex_destroy(continue_mutex); delete continue_mutex; continue_mutex = nullptr; } continue_mutex = new pthread_mutex_t; RtspFace/PL_RTSPServer.cpp
@@ -1,29 +1,123 @@ #include "PL_RTSPServer.h" #include "testOnDemandRTSPServer.hpp" #include <liveMedia.hh> #include <BasicUsageEnvironment.hh> class MyH264FramedSource : public FramedSource { public: static MyH264FramedSource* createNew(UsageEnvironment& env); protected: MyH264FramedSource(UsageEnvironment& env) virtual ~MyH264FramedSource() // overide FramedSource virtual void doGetNextFrame() { // deliverFrame //if (fFrameSize > 0) //{ // // send Frame to the consumer // FramedSource::afterGetting(this); //} // isCurrentlyAwaitingData //if (frame->m_size > fMaxSize) //{ // fFrameSize = fMaxSize; // fNumTruncatedBytes = frame->m_size - fMaxSize; //} //else //{ // fFrameSize = frame->m_size; //} //memcpy(fTo, frame->m_buffer, fFrameSize); //if (fFrameSize > 0) // FramedSource::afterGetting(this); } virtual void doStopGettingFrames() { FramedSource::doStopGettingFrames(); } }; struct PL_RTSPServer_Internal { uint8_t buffer[1920*1080*4]; size_t buffSize; size_t buffSizeMax; //uint8_t buffer[1920*1080*4]; //size_t buffSize; //size_t buffSizeMax; bool payError; pthread_t live_daemon_thid; pthread_mutex_t* frame_mutex; bool live_daemon_running; UsageEnvironment* env; // To make the second and subsequent client for each stream reuse the same // input stream as the first client (rather than playing the file from the // start for each client), change the following "False" to "True": Boolean reuseFirstSource; // To stream *only* MPEG-1 or 2 video "I" frames // (e.g., to reduce network bandwidth), // change the following "False" to "True": Boolean iFramesOnly; UserAuthenticationDatabase* authDB; RTSPServer* rtspServer;//#todo delete char descriptionString[1024]; PL_RTSPServer_Internal() : buffSize(0), buffSizeMax(sizeof(buffer)), payError(true) //buffSize(0), buffSizeMax(sizeof(buffer)), payError(true), live_daemon_thid(0), frame_mutex(nullptr), live_daemon_running(false), env(nullptr), reuseFirstSource(False), iFramesOnly(False), authDB(nullptr), rtspServer(nullptr); { pthread_mutex_init(frame_mutex, NULL); } ~PL_RTSPServer_Internal() { if (frame_mutex != nullptr) { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } } void reset() { buffSize = 0; //buffSize = 0; payError = true; if (frame_mutex != nullptr) { pthread_mutex_destroy(frame_mutex); delete frame_mutex; frame_mutex = nullptr; } frame_mutex = new pthread_mutex_t; pthread_mutex_init(frame_mutex, NULL); live_daemon_thid = 0; live_daemon_running = false; env = nullptr; reuseFirstSource = False; iFramesOnly = False; authDB = nullptr; rtspServer = nullptr; strcpy(descriptionString, "Session streamed by \"testOnDemandRTSPServer\""); } }; @@ -42,32 +136,92 @@ internal= nullptr; } void* live_daemon_thd(void* arg) { RTSPClient_Internal* in = (RTSPClient_Internal*)arg; // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); in->env = BasicUsageEnvironment::createNew(*scheduler); #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: in->authDB = new UserAuthenticationDatabase; in->authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server: in->rtspServer = RTSPServer::createNew(*env, 8554, authDB); if (rtspServer == NULL) { *(in->env) << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; return; } // Set up each of the possible streams that can be served by the // RTSP server. Each such stream is implemented using a // "ServerMediaSession" object, plus one or more // "ServerMediaSubsession" objects for each audio/video substream. char const* streamName = "plH264Encoder"; ServerMediaSession* sms = ServerMediaSession::createNew(*(in->env), streamName, streamName, in->descriptionString); sms->addSubsession(MyH264FramedSource::createNew(*(in->env), in)); in->rtspServer->addServerMediaSession(sms); // announceStream char* url = rtspServer->rtspURL(sms); *(in->env) << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n"; *(in->env) << "Play this stream using the URL \"" << url << "\"\n"; delete[] url; // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port numbers (8000 and 8080). if (rtspServer->setUpTunnelingOverHTTP(80)) *(in->env) << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n"; else *(in->env) << "\n(RTSP-over-HTTP tunneling is not available.)\n"; in->live_daemon_running = true; env->taskScheduler().doEventLoop(); // does not return in->live_daemon_running = false; } bool PL_RTSPServer::init(void* args) { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; in->reset(); int ret = pthread_mutex_lock(in->frame_mutex); if(ret != 0) { printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret)); return false; } ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in); if(ret != 0) { printf("pthread_create: %s/n", strerror(ret)); return false; } return true; } void PL_RTSPServer::finit() { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; RTSPClient_Internal* in = (RTSPClient_Internal*)internal; pthread_join(in->live_daemon_thid, NULL); } bool PL_RTSPServer::pay(const PipeMaterial& pm) { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; //in->buffer readly //static size_t f=0; //char fname[50]; //sprintf(fname, "%u.bgra", ++f); //FILE * pFile = fopen (fname,"wb"); //fwrite (in->buffer , sizeof(char), in->buffSize, pFile); //fclose(pFile); return true; } @@ -76,8 +230,8 @@ { PL_RTSPServer_Internal* in = (PL_RTSPServer_Internal*)internal; pm.buffer = in->buffer; pm.buffSize = in->buffSize; pm.buffer = nullptr; pm.buffSize = 0; pm.former = this; return true; } RtspFace/live555/testProgs/testH264VideoStreamer.hpp
File was deleted RtspFace/live555/testProgs/testOnDemandRTSPServer.hpp
File was deleted RtspFace/live555/testProgs/testRTSPClient.hpp
@@ -20,8 +20,8 @@ // client application. For a full-featured RTSP client application - with much more functionality, and many options - see // "openRTSP": http://www.live555.com/openRTSP/ #include "liveMedia.hh" #include "BasicUsageEnvironment.hh" #include <liveMedia.hh> #include <BasicUsageEnvironment.hh> #include <iostream> RtspFace/main.cpp
@@ -1,6 +1,7 @@ #include "PipeLine.h" #include "PL_RTSPClient.h" #include "PL_H264Decoder.h" #include "PL_H264Encoder.h" #include "PL_AVFrameYUV420.h" #include <iostream> @@ -13,6 +14,7 @@ pipeLine.register_elem_creator("PL_RTSPClient", create_PL_RTSPClient); pipeLine.register_elem_creator("PL_H264Decoder", create_PL_H264Decoder); pipeLine.register_elem_creator("PL_AVFrameYUV420", create_PL_AVFrameYUV420); pipeLine.register_elem_creator("PL_H264Encoder", create_PL_H264Encoder); PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLine.push_elem("PL_RTSPClient"); RTSPConfig rtspConfig; @@ -28,8 +30,11 @@ PL_H264Decoder* h264Decoder = (PL_H264Decoder*)pipeLine.push_elem("PL_H264Decoder"); h264Decoder->init(nullptr); PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420"); avFrameYUV420->init(nullptr); //PL_AVFrameYUV420* avFrameYUV420 = (PL_AVFrameYUV420*)pipeLine.push_elem("PL_AVFrameYUV420"); //avFrameYUV420->init(nullptr); PL_H264Encoder* h264Encoder = (PL_H264Encoder*)pipeLine.push_elem("PL_H264Encoder"); h264Encoder->init(nullptr); while(true) { RtspFace/make.sh
@@ -2,6 +2,10 @@ LIVEMEDIA_INC="-I$LIVEMEDIA_BASE/liveMedia/include -I$LIVEMEDIA_BASE/groupsock/include -I$LIVEMEDIA_BASE/UsageEnvironment/include -I$LIVEMEDIA_BASE/BasicUsageEnvironment/include" LIVEMEDIA_LIB="-L$LIVEMEDIA_BASE/liveMedia -L$LIVEMEDIA_BASE/groupsock -L$LIVEMEDIA_BASE/UsageEnvironment -L$LIVEMEDIA_BASE/BasicUsageEnvironment -lliveMedia -lgroupsock -lBasicUsageEnvironment -lUsageEnvironment" LIBX264_BASE=/opt/x264/inst LIBX264_INC="-I$LIBX264_BASE/include" LIBX264_LIB="-L$LIBX264_BASE/lib -lx264" FFMPEG_BASE=/opt/ffmpeg-3.2.2/inst FFMPEG_INC="-I$FFMPEG_BASE/include" FFMPEG_LIB="-L$FFMPEG_BASE/lib -lavutil -lavformat -lswresample -lavcodec" @@ -15,7 +19,7 @@ LIBYUV_LIB="-L$LIBYUV_BASE -lyuv" CPPFLAGS+="-pthread $LIVEMEDIA_INC $FFMPEG_INC $LIBBASE64_INC $LIBYUV_INC" LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB" LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB $LIBX264_LIB" CFLAGS+="-D__STDC_CONSTANT_MACROS" @@ -25,10 +29,11 @@ g++ -g -c -std=c++11 main.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_RTSPClient.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_H264Decoder.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_AVFrameBGRA.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_AVFrameYUV420.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PL_H264Encoder.cpp $CFLAGS $CPPFLAGS g++ -g -c -std=c++11 PipeLine.cpp $CFLAGS $CPPFLAGS g++ -g -std=c++11 main.o PL_RTSPClient.o PL_H264Decoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PipeLine.o $LDFLAGS -o rtsp_face g++ -g -std=c++11 main.o PL_RTSPClient.o PL_H264Decoder.o PL_AVFrameYUV420.o PL_AVFrameBGRA.o PL_H264Encoder.o PipeLine.o $LDFLAGS -o rtsp_face #export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$FFMPEG_BASE/lib #export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib #./rtsp_face rtsp://admin:admin12345@192.168.1.63:554/h264/ch1/main/av_stream