#include "CameraWrapperClient.h"
|
#include <logger.h>
|
#include <Logger/src/logger.hpp>
|
#include <MaterialBuffer.h>
|
#include <PL_Gainer.h>
|
#include <PL_Paint.h>
|
#include <PL_Scale.h>
|
#include <PL_V4L2Source.h>
|
#include <PL_BlockGrouping.h>
|
#include <PL_ColorConv.h>
|
#include <PL_AndroidMediaCodecEncoder.h>
|
|
CameraWrapper::~CameraWrapper()
|
{
|
stop();
|
delete pipeLineRender;
|
delete pipeLineDecoderDetector;
|
}
|
|
bool CameraWrapper::start()
|
{
|
LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
|
|
pthread_mutex_init(&live_daemon_mut, NULL);
|
|
running = true;
|
int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
|
if(ret != 0)
|
{
|
LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
|
running = false;
|
return false;
|
}
|
|
//struct sched_param param;
|
//int policy = -1;
|
//int rc = pthread_getschedparam(decoder_thid, &policy, ¶m);
|
//LOGP(INFO, "pthread_getschedparam, rc=%d, policy=%d, priority=%d", rc, policy, param.__sched_priority);
|
//policy = SCHED_RR;
|
//param.__sched_priority = 1;
|
//rc = pthread_setschedparam(decoder_thid, policy, ¶m);
|
//LOGP(INFO, "pthread_getschedparam, rc=%d, policy=%d, priority=%d", rc, policy, param.__sched_priority);
|
|
ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
|
if(ret != 0)
|
{
|
LOGP(ERROR, "pthread_create live_daemon_thid: %s/n", strerror(ret));
|
running = false;
|
return false;
|
}
|
ret = pthread_create(&jni_call_thid, NULL, CameraWrapper::jni_call_thd, this);
|
if(ret != 0)
|
{
|
LOGP(ERROR, "pthread_create jni_call_thid: %s/n", strerror(ret));
|
running = false;
|
return false;
|
}
|
return true;
|
}
|
|
void CameraWrapper::stop()
|
{
|
LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
|
|
if (!running)
|
return;
|
|
running = false;
|
pthread_join(decoder_thid, NULL);
|
pthread_join(live_daemon_thid, NULL);
|
|
pthread_mutex_destroy(&live_daemon_mut);
|
}
|
|
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
|
{
|
return 0.0f;
|
}
|
|
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
|
{
|
const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
|
|
for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
|
{
|
if (!iter->test_face_in_cone(35.0f, 35.0f, 35.0f))
|
continue;
|
|
RectWrapper rw;
|
rw.rect = iter->rect;
|
rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
|
rw.userData = (void*)(iter->id);
|
rects.push_back(rw);
|
}
|
}
|
|
bool CameraWrapper::initPl()
|
{
|
PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
|
PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
|
PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
|
PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
|
PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
|
PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
|
PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
|
PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
|
|
bool ret = initPl_DecoderPl();
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
|
return false;
|
}
|
|
{
|
PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
|
ret = plGainer->init(nullptr);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
|
return false;
|
}
|
|
PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
|
ret = plScale->init(&plScaleCfg);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
|
return false;
|
}
|
|
PL_ColorConv_Config PLColorConvCfg;
|
PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
ret = plColorConv->init(&PLColorConvCfg);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
return false;
|
}
|
|
PL_Paint_Config plPaintCfg;
|
plPaintCfg.fontPath = fontPath;
|
plPaintCfg.plplCtx = &plplContext;
|
PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
|
ret = plPaint->init(&plPaintCfg);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
return false;
|
}
|
|
PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
|
ret = asvRender->init(&asvrConfig);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
|
return false;
|
}
|
}
|
|
return true;
|
}
|
|
bool CameraWrapper::initPl_DecoderPl()
|
{
|
bool ret = false;
|
|
PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
|
ret = rtspClient->init(&rtspConfig);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
|
return false;
|
}
|
|
PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
|
ret = amcDecoder->init(&amcdConfig);
|
if (!ret)
|
{
|
LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
|
return false;
|
}
|
|
//PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
|
//PL_V4L2Source_Config v4l2Config;
|
//v4l2Config.width = 640;
|
//v4l2Config.height = 480;
|
//ret = v4l2Source->init(&v4l2Config);
|
//if (!ret) {
|
// LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
|
// return false;
|
//}
|
|
return ret;
|
}
|
|
void CameraWrapper::lockFace()
|
{
|
faceCacheLocked = true;
|
}
|
|
void CameraWrapper::releaseFace()
|
{
|
faceCacheLocked = false;
|
}
|
|
void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
|
{
|
if (faceLabels.size() > 32)
|
faceLabels.clear();
|
|
faceLabels.insert(std::make_pair(st_track_id, label));
|
LOG_ERROR <<"client setFaceLabel : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
|
}
|
|
void CameraWrapper::fireFaceCount(int faceCount)
|
{
|
this->faceCount = faceCount;
|
this->faceCountChanged = true;
|
}
|
|
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
|
{
|
CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
|
PLPLContext& plplContext(cameraWrapper.plplContext);
|
const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
|
|
plplContext.cmds.clear();
|
plplContext.params.clear();
|
|
float width_scale = ((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
|
float height_scale = ((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
|
|
for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
|
{
|
plplContext.cmds.push_back(PLPLC_COLOR);
|
plplContext.params.push_back('F');
|
if (iter->test_face_in_cone(35.0f, 35.0f, 35.0f))
|
{
|
if (iter->outOfFrame)
|
{
|
plplContext.params.push_back(255);
|
plplContext.params.push_back(255);
|
plplContext.params.push_back(0);
|
plplContext.params.push_back(255);
|
}
|
else
|
{
|
plplContext.params.push_back(255);
|
plplContext.params.push_back(0);
|
plplContext.params.push_back(0);
|
plplContext.params.push_back(255);
|
}
|
}
|
else
|
{
|
plplContext.params.push_back(0);
|
plplContext.params.push_back(255);
|
plplContext.params.push_back(0);
|
plplContext.params.push_back(255);
|
}
|
|
plplContext.cmds.push_back(PLPLC_RECT);
|
plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
|
plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
|
plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
|
plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
|
|
std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
|
if (iterFaceLabel != cameraWrapper.faceLabels.end())
|
{
|
plplContext.cmds.push_back(PLPLC_WTEXT);
|
plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
|
plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
|
const wchar_t* label = iterFaceLabel->second.c_str();
|
plplContext.params.push_back(PLPLType(label));
|
}
|
}
|
|
return false;
|
}
|
|
void test_paint(CameraWrapper& cameraWrapper)
|
{
|
//cameraWrapper.plplContext.clear();
|
|
cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
|
cameraWrapper.plplContext.params.push_back(100);
|
cameraWrapper.plplContext.params.push_back(100);
|
cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
|
|
cameraWrapper.setFaceLabel(0, L"会员vi");
|
cameraWrapper.setFaceLabel(1, L"会员ab");
|
cameraWrapper.setFaceLabel(3, L"会员wr");
|
|
cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
|
cameraWrapper.plplContext.params.push_back('F');
|
cameraWrapper.plplContext.params.push_back(255);
|
cameraWrapper.plplContext.params.push_back(255);
|
cameraWrapper.plplContext.params.push_back(255);
|
cameraWrapper.plplContext.params.push_back(255);
|
cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
|
cameraWrapper.plplContext.params.push_back(20);
|
cameraWrapper.plplContext.params.push_back(20);
|
cameraWrapper.plplContext.params.push_back(100);
|
cameraWrapper.plplContext.params.push_back(100);
|
}
|
|
void cw_elem_destory_func(PipeLineElem* elem)
|
{
|
delete elem;
|
}
|
|
bool CameraWrapper::resetPl()
|
{
|
pipeLineDecoderDetector->finit(cw_elem_destory_func);
|
sleep(2);
|
return initPl_DecoderPl();
|
}
|
|
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
|
{
|
LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
|
CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
|
|
while(cameraWrapper.running)
|
{
|
if (cameraWrapper.killed)
|
{
|
LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
|
cameraWrapper.resetPl();
|
cameraWrapper.killed = false;
|
sleep(2);
|
}
|
|
PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
|
bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
|
//LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
|
|
if (!ret)
|
continue;
|
|
PipeMaterial pm;
|
ret = last->gain(pm);
|
if (!ret)
|
continue;
|
|
cameraWrapper.lastAliveTime = time(nullptr);
|
|
//pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
|
|
//#debug
|
//test_paint(cameraWrapper);
|
|
cameraWrapper.pipeLineRender->pipe(&pm);
|
}
|
|
LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
|
}
|
|
/*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
|
{
|
LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
|
CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
|
|
while(cameraWrapper.running)
|
{
|
sleep(10);
|
continue;
|
if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
|
{
|
PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
|
rtspClient->kill();
|
|
cameraWrapper.killed = true;
|
}
|
}
|
}
|
|
void *CameraWrapper::jni_call_thd(void *arg) {
|
|
CameraWrapper* cameraWrapper = (CameraWrapper*)arg;
|
while(cameraWrapper->running)
|
{
|
if(cameraWrapper->faceCountChanged){
|
// double check it's all ok
|
LOG_ERROR <<"client fireFaceCount start!!!!!!!!"<< LOG_ENDL;
|
int getEnvStat = cameraWrapper->javaVM->GetEnv((void **)&(cameraWrapper->javaEnv), JNI_VERSION_1_6);
|
if (getEnvStat == JNI_EDETACHED)
|
{
|
//LOG_WARN << "GetEnv: not attached" << std::endl;
|
if (cameraWrapper->javaVM->AttachCurrentThread(&(cameraWrapper->javaEnv), NULL) != 0)
|
LOG_WARN << "Failed to attach" << LOG_ENDL;
|
else
|
getEnvStat = JNI_OK;
|
}
|
else if (getEnvStat == JNI_OK){
|
}
|
else if (getEnvStat == JNI_EVERSION)
|
LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
|
else if (getEnvStat == JNI_ERR)
|
LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
|
LOG_ERROR <<"client fireFaceCount end!!!!!!!!!!!!"<< LOG_ENDL;
|
|
cameraWrapper->javaEnv->CallStaticVoidMethod(cameraWrapper->faceCallbackClazz,
|
cameraWrapper->faceCallbackFunc,
|
cameraWrapper->cameraIdx,
|
cameraWrapper->faceCount);
|
|
if (cameraWrapper->javaEnv->ExceptionCheck())
|
cameraWrapper->javaEnv->ExceptionDescribe();
|
cameraWrapper->javaVM->DetachCurrentThread();
|
cameraWrapper->faceCountChanged = false;
|
}
|
usleep(10000);
|
}
|
|
}
|