From 633e76c1d533c3d9c257b92df7ebdfd36c9fd8a0 Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期四, 29 十二月 2016 18:42:50 +0800
Subject: [PATCH] unify log
---
RtspFace/main.cpp | 15 +-
RtspFace/PL_RTSPClient.cpp | 7
RtspFace/PL_SensetimeFaceDetect.cpp | 29 ++--
RtspFace/PipeLine.cpp | 42 ++++++-
RtspFace/make.sh | 10 +
RtspFace/PL_RTSPServer.cpp | 18 +-
RtspFace/PipeLine.h | 15 +
RtspFace/PL_H264Decoder.cpp | 28 +++-
RtspFace/PL_H264Encoder.cpp | 11 +
RtspFace/logger.h | 49 ++++++++
RtspFace/MaterialBuffer.h | 34 +++--
RtspFace/PL_AVFrameBGRA.cpp | 57 +++++++--
RtspFace/PL_AVFrameYUV420.cpp | 15 +-
13 files changed, 243 insertions(+), 87 deletions(-)
diff --git a/RtspFace/MaterialBuffer.h b/RtspFace/MaterialBuffer.h
index 399b637..b26da14 100644
--- a/RtspFace/MaterialBuffer.h
+++ b/RtspFace/MaterialBuffer.h
@@ -1,11 +1,9 @@
#ifndef _MATERIAL_BUFFER_H_
#define _MATERIAL_BUFFER_H_
+// timeval
#if defined(WIN32) || defined(_MSC_VER)
- struct timeval {
- time_t tv_sec; /* seconds */
- suseconds_t tv_usec; /* microseconds */
- };
+ #include <Winsock2.h>
#else
#include <sys/time.h>
#endif
@@ -15,21 +13,33 @@
enum MBFType
{
MBFT__FIRST,
- MBFT_JPEG,
- MBFT_YUV420,
- MBFT_BGRA,
- MBFT_H264_NALU,
- MBFT_H264_NALU_WITH_AUX,
- MBFT_PTR_AVFRAME,
+
+ MBFT_SDP, // buffer = char[N], buffSize = N
+ MBFT_FMTP, // buffer = char[N], buffSize = N
+
+ MBFT_JPEG, // buffer = uint8_t[N], buffSize = N
+ MBFT_YUV420, // buffer = uint8_t[N], buffSize = N
+ MBFT_BGRA, // buffer = uint8_t[N], buffSize = N
+
+ MBFT_H264_NALU, // buffer = uint8_t[N], buffSize = N
+ MBFT_H264_NALU_AUX, // buffer = uint8_t[N], buffSize = N//#todo support!
+
+ MBFT_PTR_AVFRAME, // buffer = AVFrame*, buffSize = 0
+
MBFT__LAST
};
MBFType type;
- uint8_t* buffer;//#todo void*
+ void* buffer;
size_t buffSize;
+ int width;
+ int height;
timeval pts;
- MB_Frame() : type(MBFT__FIRST), buffer(nullptr), buffSize(0), pts() { }
+ MB_Frame() :
+ type(MBFT__FIRST), buffer(nullptr), buffSize(0),
+ width(0), height(0), pts()
+ { }
};
#endif
diff --git a/RtspFace/PL_AVFrameBGRA.cpp b/RtspFace/PL_AVFrameBGRA.cpp
index 9329f14..4b0ba9e 100644
--- a/RtspFace/PL_AVFrameBGRA.cpp
+++ b/RtspFace/PL_AVFrameBGRA.cpp
@@ -1,4 +1,6 @@
#include "PL_AVFrameBGRA.h"
+#include "MaterialBuffer.h"
+#include "logger.h"
extern "C"
{
@@ -11,14 +13,15 @@
struct PL_AVFrameBGRA_Internal
{
- uint8_t buffer[1920*1080*4];
+ uint8_t buffer[1920*1080*4];//#todo
size_t buffSize;
size_t buffSizeMax;
+ MB_Frame lastFrame;
bool payError;
PL_AVFrameBGRA_Internal() :
- buffSize(0), buffSizeMax(sizeof(buffer)),
+ buffSize(0), buffSizeMax(sizeof(buffer)), lastFrame(),
payError(true)
{
}
@@ -31,6 +34,9 @@
{
buffSize = 0;
payError = true;
+
+ MB_Frame _lastFrame;
+ lastFrame = _lastFrame;
}
};
@@ -69,12 +75,28 @@
{
PL_AVFrameBGRA_Internal* in = (PL_AVFrameBGRA_Internal*)internal;
- AVFrame* pAVFrame = (AVFrame*)pm.buffer;
+ if (pm.type != PipeMaterial::PMT_FRAME)
+ {
+ LOG(ERROR) << "PL_AVFrameBGRA::pay only support PMT_FRAME";
+ return false;
+ }
+
+ if (pm.buffer == nullptr)
+ return false;
+
+ MB_Frame* frame = (MB_Frame*)pm.buffer;
+ if (frame->type != MB_Frame::MBFT_PTR_AVFRAME)
+ {
+ LOG(ERROR) << "PL_AVFrameBGRA::pay only support MBFT_PTR_AVFRAME";
+ return false;
+ }
+
+ AVFrame* pAVFrame = (AVFrame*)frame->buffer;
if (pAVFrame == nullptr)
return false;
- int height = pAVFrame->height;
- int width = pAVFrame->width;
+ const int height = pAVFrame->height;
+ const int width = pAVFrame->width;
//int I420ToBGRA(const uint8* src_y, int src_stride_y,
// const uint8* src_u, int src_stride_u,
@@ -90,13 +112,21 @@
in->buffSize = in->buffSizeMax;
//in->buffer readly
+
+ in->lastFrame.type = MB_Frame::MBFT_BGRA;
+ in->lastFrame.buffer = in->buffer;
+ in->lastFrame.buffSize = in->buffSize;
+ in->lastFrame.width = width;
+ in->lastFrame.height = height;
+ in->lastFrame.pts = frame->pts;
- static size_t f=0;
- char fname[50];
- sprintf(fname, "%u.bgra", ++f);
- FILE * pFile = fopen (fname,"wb");
- fwrite (in->buffer , sizeof(char), in->buffSize, pFile);
- fclose(pFile);
+ //#test
+ //static size_t f=0;
+ //char fname[50];
+ //sprintf(fname, "%u.bgra", ++f);
+ //FILE * pFile = fopen (fname,"wb");
+ //fwrite (in->buffer , sizeof(char), in->buffSize, pFile);
+ //fclose(pFile);
return true;
}
@@ -105,8 +135,9 @@
{
PL_AVFrameBGRA_Internal* in = (PL_AVFrameBGRA_Internal*)internal;
- pm.buffer = in->buffer;
- pm.buffSize = in->buffSize;
+ pm.type = PipeMaterial::PMT_FRAME;
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
pm.former = this;
return true;
}
diff --git a/RtspFace/PL_AVFrameYUV420.cpp b/RtspFace/PL_AVFrameYUV420.cpp
index 495a852..93e0507 100644
--- a/RtspFace/PL_AVFrameYUV420.cpp
+++ b/RtspFace/PL_AVFrameYUV420.cpp
@@ -1,5 +1,6 @@
#include "PL_AVFrameYUV420.h"
#include "MaterialBuffer.h"
+#include "logger.h"
extern "C"
{
@@ -68,7 +69,7 @@
if (pm.type != PipeMaterial::PMT_FRAME)
{
- printf("PL_H264Encoder::pay only support PMT_FRAME\n");
+ LOG(ERROR) << "PL_AVFrameYUV420::pay only support PMT_FRAME";
return false;
}
@@ -78,7 +79,7 @@
MB_Frame* frame = (MB_Frame*)pm.buffer;
if (frame->type != MB_Frame::MBFT_PTR_AVFRAME)
{
- printf("PL_H264Encoder::pay only support MBFT_PTR_AVFRAME\n");
+ LOG(ERROR) << "PL_AVFrameYUV420::pay only support MBFT_PTR_AVFRAME";
return false;
}
@@ -89,8 +90,8 @@
int picSize = pAVFrame->height * pAVFrame->width;
in->buffSize = picSize * 1.5;
- int height = pAVFrame->height;
- int width = pAVFrame->width;
+ const int height = pAVFrame->height;
+ const int width = pAVFrame->width;
uint8_t* pBuff = in->buffer;
@@ -110,6 +111,8 @@
in->lastFrame.type = MB_Frame::MBFT_YUV420;
in->lastFrame.buffer = in->buffer;
in->lastFrame.buffSize = in->buffSize;
+ in->lastFrame.width = width;
+ in->lastFrame.height = height;
in->lastFrame.pts = frame->pts;
//#test
@@ -128,8 +131,8 @@
AVFrameYUV420_Internal* in = (AVFrameYUV420_Internal*)internal;
pm.type = PipeMaterial::PMT_FRAME;
- pm.buffer = (uint8_t*)(&(in->lastFrame));
- pm.buffSize = sizeof(in->lastFrame);
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
pm.former = this;
return true;
}
diff --git a/RtspFace/PL_H264Decoder.cpp b/RtspFace/PL_H264Decoder.cpp
index 11ae388..3f61cce 100644
--- a/RtspFace/PL_H264Decoder.cpp
+++ b/RtspFace/PL_H264Decoder.cpp
@@ -1,5 +1,6 @@
#include "PL_H264Decoder.h"
#include "MaterialBuffer.h"
+#include "logger.h"
#include <H264VideoRTPSource.hh> // for SPropRecord
#include <libbase64.h>
@@ -121,7 +122,7 @@
if (!avCodec)
{
- printf("codec not found!\n");
+ LOG(WARN) << "codec not found!";
return false;
}
@@ -169,7 +170,7 @@
if (av_packet_from_data(&packet, buffer, buffSize) != 0)
{
- printf("av_packet_from_data error\n");
+ LOG(WARN) << "av_packet_from_data error";
return false;
}
@@ -184,7 +185,7 @@
}
else
{
- printf("incomplete frame\n");
+ LOG(WARN) << "incomplete frame";
return false;
}
}
@@ -214,30 +215,41 @@
bool ret = initH264DecoderEnv(in, sps.sPropBytes, sps.sPropLength, pps.sPropBytes, pps.sPropLength);
if (!ret)
+ {
+ LOG(ERROR) << "PL_H264Decoder::pay initH264DecoderEnv error";
return false; // #todo log
+ }
else
in->fmtp_set_to_context = true;
}
- if (pm.buffer == nullptr || pm.buffSize <= 0)
+ if (pm.buffer == nullptr)
return false;
bool ret = false;
if (pm.type == PipeMaterial::PMT_BYTES)
{
+ if (pm.buffSize <= 0)
+ return false;
+
timeval pts = {0};
- ret = decodeH264(in, pm.buffer, pm.buffSize, pts);
+ ret = decodeH264(in, (uint8_t*)pm.buffer, pm.buffSize, pts);
}
else if (pm.type == PipeMaterial::PMT_FRAME)
{
MB_Frame* frame = (MB_Frame*)pm.buffer;
- ret = decodeH264(in, frame->buffer, frame->buffSize, frame->pts);
+ if (frame->buffSize <= 0)
+ return false;
+
+ ret = decodeH264(in, (uint8_t*)frame->buffer, frame->buffSize, frame->pts);
if (ret)
{
in->lastFrame.type = MB_Frame::MBFT_PTR_AVFRAME;
in->lastFrame.buffer = (uint8_t*)(in->pAVFrame);
in->lastFrame.buffSize = sizeof(in->pAVFrame);
+ in->lastFrame.width = in->pAVFrame->width;
+ in->lastFrame.height = in->pAVFrame->height;
//in->lastFrame.pts = frame->pts;//#todo
gettimeofday(&(in->lastFrame.pts),NULL);
}
@@ -254,8 +266,8 @@
if (!in->payError)
{
pm.type = PipeMaterial::PMT_FRAME;
- pm.buffer = (uint8_t*)(&(in->lastFrame));
- pm.buffSize = sizeof(uint8_t*);
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
}
pm.former = this;
return !in->payError;
diff --git a/RtspFace/PL_H264Encoder.cpp b/RtspFace/PL_H264Encoder.cpp
index 33ffc40..7068b79 100644
--- a/RtspFace/PL_H264Encoder.cpp
+++ b/RtspFace/PL_H264Encoder.cpp
@@ -105,13 +105,13 @@
in->pAVCodecContext->height = 600;//#todo from config
in->pAVCodecContext->time_base.num=1;
in->pAVCodecContext->time_base.den=25;
- in->pAVCodecContext->gop_size = 2;
+ in->pAVCodecContext->gop_size = 25;
in->pAVCodecContext->max_b_frames = 0;
//in->pAVCodecContext->profile = FF_PROFILE_H264_MAIN;
in->pAVCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(in->pAVCodecContext->priv_data, "preset", "superfast", 0);
- //av_opt_set(c->priv_data, "tune", "zerolatency", 0);
+ //av_opt_set(in->pAVCodecContext->priv_data, "tune", "zerolatency", 0);
if(avcodec_open2(in->pAVCodecContext, avCodec, NULL) >= 0)
{
@@ -323,6 +323,8 @@
in->lastFrame.type = MB_Frame::MBFT_H264_NALU;
in->lastFrame.buffer = in->buffer;
in->lastFrame.buffSize = in->buffSize;
+ in->lastFrame.width = frame->width;
+ in->lastFrame.height = frame->height;
in->lastFrame.pts = frame->pts;
}
@@ -336,9 +338,8 @@
if (!in->payError)
{
pm.type = PipeMaterial::PMT_FRAME;
- pm.buffer = (uint8_t*)(&(in->lastFrame));
- pm.buffSize = sizeof(in->lastFrame);
- pm.former = this;
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
}
pm.former = this;
return !in->payError;
diff --git a/RtspFace/PL_RTSPClient.cpp b/RtspFace/PL_RTSPClient.cpp
index 71869db..ae885d7 100644
--- a/RtspFace/PL_RTSPClient.cpp
+++ b/RtspFace/PL_RTSPClient.cpp
@@ -1,5 +1,6 @@
#include "PL_RTSPClient.h"
#include "MaterialBuffer.h"
+#include "logger.h"
#include <pthread.h>
void rtsp_client_sdp_callback(void* arg, const char* val);
@@ -179,8 +180,8 @@
}
pm.type = PipeMaterial::PMT_FRAME;
- pm.buffer = (uint8_t*)(&(in->lastFrame));
- pm.buffSize = sizeof(in->lastFrame);
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
pm.former = this;
return true;
@@ -223,6 +224,8 @@
in->lastFrame.type = MB_Frame::MBFT_H264_NALU;
in->lastFrame.buffer = buffer;
in->lastFrame.buffSize = buffSize;
+ in->lastFrame.width = 0;
+ in->lastFrame.height = 0;
in->lastFrame.pts = presentationTime;
int ret = pthread_mutex_unlock(in->frame_mutex);
diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index 6c6394b..79b78fd 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -1,5 +1,6 @@
#include "PL_RTSPServer.h"
#include "MaterialBuffer.h"
+#include "logger.h"
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
@@ -112,7 +113,7 @@
*FrameBuffer = pBuffer;
*FrameSize = newBufferSize;
- printf("send frame size=%u\n", in.buffSize);
+ LOG(DEBUG) << "send frame size=" << in.buffSize;
}
virtual char ReleaseFrame()
@@ -124,7 +125,7 @@
int ret = pthread_mutex_unlock(in.frame_mutex);
if(ret != 0)
{
- printf("pthread_mutex_unlock frame_mutex: %s/n", strerror(ret));
+ LOG(WARN) << "pthread_mutex_unlock frame_mutex: " << strerror(ret);
return 0;
}
}
@@ -142,7 +143,7 @@
int ret = pthread_mutex_lock(in.frame_mutex);
if(ret != 0)
{
- printf("pthread_mutex_lock frame_mutex: %s/n", strerror(ret));
+ LOG(WARN) << "pthread_mutex_lock frame_mutex: " << strerror(ret);
return;
}
}
@@ -196,7 +197,7 @@
int ret = pthread_create(&(in->live_daemon_thid), NULL, live_daemon_thd, in);
if(ret != 0)
{
- printf("pthread_create: %s/n", strerror(ret));
+ LOG(ERROR) << "pthread_create: " << strerror(ret);
return false;
}
@@ -214,19 +215,21 @@
{
RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
- if (pm.buffer == nullptr || pm.buffSize <= 0)
+ if (pm.buffer == nullptr)
return false;
if (pm.type != PipeMaterial::PMT_FRAME)
{
- printf("PL_RTSPServer::pay only support PMT_FRAME\n");
+ LOG(ERROR) << "PL_RTSPServer::pay only support PMT_FRAME";
return false;
}
if (in->buffSize > 0)
- printf("PL_RTSPServer::pay may lost data size=%u\n", in->buffSize);
+ LOG(WARN) << "PL_RTSPServer::pay may lost data size=" << in->buffSize;
MB_Frame* frame = (MB_Frame*)pm.buffer;
+ if (frame->buffer == nullptr)
+ return false;
memcpy(in->buffer, frame->buffer, frame->buffSize);
in->buffSize = frame->buffSize;
@@ -241,6 +244,7 @@
{
RTSPServer_Internal* in = (RTSPServer_Internal*)internal;
+ pm.type = PipeMaterial::PMT_NONE;
pm.buffer = nullptr;
pm.buffSize = 0;
pm.former = this;
diff --git a/RtspFace/PL_SensetimeFaceDetect.cpp b/RtspFace/PL_SensetimeFaceDetect.cpp
index f6b22c1..85245fb 100644
--- a/RtspFace/PL_SensetimeFaceDetect.cpp
+++ b/RtspFace/PL_SensetimeFaceDetect.cpp
@@ -1,5 +1,6 @@
#include "PL_SensetimeFaceDetect.h"
#include "MaterialBuffer.h"
+#include "logger.h"
#include <opencv2/opencv.hpp>
#include <cv_face.h>
@@ -69,7 +70,7 @@
in->config.point_size_config = CV_DETECT_ENABLE_ALIGN_106;
else
{
- printf("alignment point size must be 21 or 106\n");
+ LOG(ERROR) << "alignment point size must be 21 or 106";
return false;
}
@@ -78,7 +79,7 @@
in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD);
if (cv_result != CV_OK)
{
- printf("cv_face_create_tracker failed, error code %d\n", cv_result);
+ LOG(ERROR) << "cv_face_create_tracker failed, error code" << cv_result;
return false;
}
@@ -86,11 +87,11 @@
cv_result = cv_face_track_set_detect_face_cnt_limit(in->handle_track, in->config.detect_face_cnt_limit, &val);
if (cv_result != CV_OK)
{
- printf("cv_face_track_set_detect_face_cnt_limit failed, error : %d\n", cv_result);
+ LOG(ERROR) << "cv_face_track_set_detect_face_cnt_limit failed, error : " << cv_result;
return false;
}
else
- printf("detect face count limit : %d\n", val);
+ LOG(ERROR) << "detect face count limit : " << val;
return true;
}
@@ -119,21 +120,21 @@
CV_FACE_UP, &p_face, &face_count);
if (cv_result != CV_OK)
{
- printf("cv_face_track failed, error : %d\n", cv_result);
+ LOG(ERROR) << "cv_face_track failed, error : " << cv_result;
cv_face_release_tracker_result(p_face, face_count);
return -1;
}
// draw the video
- cv::Mat yuvMat(cv::Size(1920,1080), CV_8UC3, buffer);
+ cv::Mat yuvMat(cv::Size(1920,1080), CV_8UC3, buffer);//#todo
cv::Mat yMat(cv::Size(1920,1080), CV_8UC1, buffer);
for (int i = 0; i < face_count; i++)
{
- printf("face: %d-----[%d, %d, %d, %d]-----id: %d\n", i,
+ LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d\n", i,
p_face[i].rect.left, p_face[i].rect.top,
p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
- printf("face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]\n",
+ LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]\n",
p_face[i].yaw,
p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
@@ -179,7 +180,7 @@
if (pm.type != PipeMaterial::PMT_FRAME)
{
- printf("PL_H264Encoder::pay only support PMT_FRAME\n");
+ LOG(ERROR) << "PL_H264Encoder::pay only support PMT_FRAME";
return false;
}
@@ -189,11 +190,11 @@
MB_Frame* frame = (MB_Frame*)pm.buffer;
if (frame->type != MB_Frame::MBFT_YUV420)
{
- printf("PL_H264Encoder::pay only support MBFT_YUV420\n");
+ LOG(ERROR) << "PL_H264Encoder::pay only support MBFT_YUV420";
return false;
}
- int face_count = doFaceDetect(in, frame->buffer, 1920, 1080, 1920, CV_PIX_FMT_YUV420P);
+ int face_count = doFaceDetect(in, (uint8_t*)frame->buffer, 1920, 1080, 1920, CV_PIX_FMT_YUV420P);//#todo
if (face_count < 0)
{
in->payError = true;
@@ -207,6 +208,8 @@
in->lastFrame.type = MB_Frame::MBFT_YUV420;
in->lastFrame.buffer = frame->buffer;//#todo should copy
in->lastFrame.buffSize = frame->buffSize;
+ in->lastFrame.width = frame->width;
+ in->lastFrame.height = frame->height;
in->lastFrame.pts = frame->pts;
return true;
@@ -219,8 +222,8 @@
if (!in->payError)
{
pm.type = PipeMaterial::PMT_FRAME;
- pm.buffer = (uint8_t*)(&(in->lastFrame));
- pm.buffSize = sizeof(in->lastFrame);
+ pm.buffer = &(in->lastFrame);
+ pm.buffSize = 0;
pm.former = this;
}
pm.former = this;
diff --git a/RtspFace/PipeLine.cpp b/RtspFace/PipeLine.cpp
index 8b1ffea..2491199 100644
--- a/RtspFace/PipeLine.cpp
+++ b/RtspFace/PipeLine.cpp
@@ -1,4 +1,5 @@
#include "PipeLine.h"
+#include "logger.h"
PipeMaterial::PipeMaterial() :
type(PMT__FIRST), buffer(nullptr), buffSize(0),
@@ -77,8 +78,35 @@
return elem;
}
+class PipeDebugger
+{
+private:
+ PipeLine* pipeLine;
+
+public:
+ PipeLineElem* retElem;
+ PipeMaterial* pm;
+
+ PipeDebugger(PipeLine* _pipeLine) :
+ pipeLine(_pipeLine), retElem(nullptr), pm(nullptr)
+ {
+ LOG(DEBUG) << "pipe line begin";
+ }
+
+ ~PipeDebugger()
+ {
+ bool retOK = (*(pipeLine->elems).rbegin() == retElem);
+ if (retOK)
+ LOG(DEBUG) << "pipe line end, ret OK";
+ else
+ LOG(WARN) << "pipe line end, ret ERROR";
+ }
+};
+
PipeLineElem* PipeLine::pipe(PipeMaterial* pm /*= nullptr*/)
{
+ PipeDebugger debugger(this);
+
PipeLineElem* elem_begin = *elems.begin();
PipeLineElem* elem_last = *elems.rbegin();
@@ -88,12 +116,14 @@
uint8_t pmPlacement[sizeof(PipeMaterial)];
if (pm == nullptr)
pm = new (pmPlacement) PipeMaterial;
+
+ debugger.pm = pm;
if (elems.size() == 1)
{
elem_begin->gain(*pm);
pm->exec_deleter();
- return elem_begin;
+ return debugger.retElem = elem_begin;
}
else if (elems.size() == 2)
{
@@ -103,13 +133,13 @@
pm->exec_deleter();
}
else
- return elem_begin;
- return elem_last;
+ return debugger.retElem = elem_begin;
+ return debugger.retElem = elem_last;
}
else
{
if (!elem_begin->gain(*pm))
- return elem_begin;
+ return debugger.retElem = elem_begin;
bool lastRet = true;
elem_vec_t::iterator iter = elems.begin();
@@ -123,7 +153,7 @@
lastRet = elem_begin->gain(*pm);
}
else
- return elem_begin;//#todo this may memory leakage in pm
+ return debugger.retElem = elem_begin;
++iter;
elem_begin = *iter;
@@ -134,7 +164,7 @@
elem_last->pay(*pm);
pm->exec_deleter();
}
- return elem_last;
+ return debugger.retElem = elem_last;
}
return nullptr;
diff --git a/RtspFace/PipeLine.h b/RtspFace/PipeLine.h
index e690be3..79e1984 100644
--- a/RtspFace/PipeLine.h
+++ b/RtspFace/PipeLine.h
@@ -21,14 +21,16 @@
enum PipeMaterialBufferType
{
PMT__FIRST,
- PMT_BYTES, // uint8_t[]
- PMT_FRAME, // MB_Frame*
- PMT_PM_LIST,
+ PMT_NONE, // buffer = nullptr, buffSize = 0
+ PMT_BYTES, // buffer = uint8_t[N], buffSize = N
+ PMT_FRAME, // buffer = MB_Frame*, buffSize = 0
+ PMT_PM_LIST, // buffer = PipeMaterial*[N], buffSize = N
+ PMT_FRAME_LIST, // buffer = MB_Frame*[N], buffSize = N
PMT__LAST
};
- PipeMaterialBufferType type; // #todo MaterialBuffer merge into there
- uint8_t* buffer;//#todo void*
+ PipeMaterialBufferType type;
+ void* buffer;
size_t buffSize;
PipeLineElem* former;
pm_deleter_func deleter;
@@ -65,6 +67,8 @@
// gain --> [pay --> pm.deleter --> gain -->] [pay --> pm.deleter --> gain -->] ... --> pay --> pm.deleter
class PipeLine
{
+ friend class PipeDebugger;
+
public:
PipeLine();
@@ -76,6 +80,7 @@
PipeLineElem* push_elem(const std::string& type);
// do pipe sync. returns the element who returns false, or the last one.
+ // if false return, the element should deal with pm, clean up.
PipeLineElem* pipe(PipeMaterial* pm = nullptr);
// do pipe async
diff --git a/RtspFace/logger.h b/RtspFace/logger.h
new file mode 100644
index 0000000..fe50d00
--- /dev/null
+++ b/RtspFace/logger.h
@@ -0,0 +1,49 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** logger.h
+**
+** -------------------------------------------------------------------------*/
+
+#ifndef LOGGER_H
+#define LOGGER_H
+
+#include <unistd.h>
+
+#include "log4cpp/Category.hh"
+#include "log4cpp/FileAppender.hh"
+#include "log4cpp/PatternLayout.hh"
+
+
+#define LOG(__level) log4cpp::Category::getRoot() << log4cpp::Priority::__level << __FILE__ << ":" << __LINE__ << "\t"
+#define LOGP(__level, __format, arg...) log4cpp::Category::getRoot().log(log4cpp::Priority::__level, "%s:%d\t" __format, __FILE__, __LINE__, ##arg);
+
+inline void initLogger(int verbose)
+{
+ // initialize log4cpp
+ log4cpp::Category &log = log4cpp::Category::getRoot();
+ log4cpp::Appender *app = new log4cpp::FileAppender("root", fileno(stdout));
+ if (app)
+ {
+ log4cpp::PatternLayout *plt = new log4cpp::PatternLayout();
+ if (plt)
+ {
+ plt->setConversionPattern("%d [%-6p] - %m%n");
+ app->setLayout(plt);
+ }
+ log.addAppender(app);
+ }
+ switch (verbose)
+ {
+ case 2: log.setPriority(log4cpp::Priority::DEBUG); break;
+ case 1: log.setPriority(log4cpp::Priority::INFO); break;
+ default: log.setPriority(log4cpp::Priority::NOTICE); break;
+
+ }
+ LOG(INFO) << "level:" << log4cpp::Priority::getPriorityName(log.getPriority());
+}
+
+#endif
+
diff --git a/RtspFace/main.cpp b/RtspFace/main.cpp
index bf67254..2e9ae59 100644
--- a/RtspFace/main.cpp
+++ b/RtspFace/main.cpp
@@ -9,11 +9,12 @@
#include "PL_SensetimeFaceDetect.h"
-#include <iostream>
-using namespace std;
+#include "logger.h"
int main(int argc, char** argv)
{
+ initLogger(2);
+
PipeLine pipeLine;
pipeLine.register_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
@@ -37,7 +38,7 @@
bool ret = rtspClient->init(&rtspConfig);
if (!ret)
{
- cout << "rtspClient.init error" << endl;
+ LOG(ERROR) << "rtspClient.init error";
exit(EXIT_FAILURE);
}
}
@@ -64,7 +65,7 @@
// bool ret = queue1->init(&config);
// if (!ret)
// {
- // cout << "queue1.init error" << endl;
+ // LOG(ERROR) << "queue1.init error";
// exit(EXIT_FAILURE);
// }
//}
@@ -80,15 +81,15 @@
bool ret = rtspServer->init(&config);
if (!ret)
{
- cout << "rtspServer.init error" << endl;
+ LOG(ERROR) << "rtspServer.init error";
exit(EXIT_FAILURE);
}
}
while(true)
{
- //cout << "begin pipe" << endl;
+ //LOG(ERROR) << "begin pipe";
pipeLine.pipe();
- //cout << "end pipe" << endl;
+ //LOG(ERROR) << "end pipe";
}
}
diff --git a/RtspFace/make.sh b/RtspFace/make.sh
index 1fe8646..4a4892f 100644
--- a/RtspFace/make.sh
+++ b/RtspFace/make.sh
@@ -29,8 +29,12 @@
OPENCV_INC=
OPENCV_LIB="-lopencv_core"
-CPPFLAGS+="-pthread $LIVEMEDIA_INC $FFMPEG_INC $LIBBASE64_INC $LIBYUV_INC $SENSETIMEFACESDK_INC"
-LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB $LIBX264_LIB $SENSETIMEFACESDK_LIB $OPENCV_LIB"
+LIBLOG4CPP_BASE=/opt/log4cpp/inst
+LIBLOG4CPP_INC="-I$LIBLOG4CPP_BASE/include"
+LIBLOG4CPP_LIB="-L$LIBLOG4CPP_BASE/lib -llog4cpp"
+
+CPPFLAGS+="-pthread $LIVEMEDIA_INC $FFMPEG_INC $LIBBASE64_INC $LIBYUV_INC $SENSETIMEFACESDK_INC $LIBLOG4CPP_INC"
+LDFLAGS+="-pthread $LIVEMEDIA_LIB $FFMPEG_LIB $LIBBASE64_LIB $LIBYUV_LIB $LIBX264_LIB $SENSETIMEFACESDK_LIB $OPENCV_LIB $LIBLOG4CPP_LIB"
CFLAGS+="-D__STDC_CONSTANT_MACROS"
@@ -60,5 +64,5 @@
$FFMPEGRTSPSERVER_OBJ PL_RTSPServer.o \
$LDFLAGS -o rtsp_face
-#export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib:$SENSETIMEFACESDK_BASE/libs/linux-x86_64
+#export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIBX264_BASE/lib:$FFMPEG_BASE/lib:$SENSETIMEFACESDK_BASE/libs/linux-x86_64:$LIBLOG4CPP_BASE/lib
#./rtsp_face rtsp://admin:admin12345@192.168.1.64:554/h264/ch1/main/av_stream
--
Gitblit v1.8.0