chenke
2017-07-27 d475c7ab0b4bd6781c6f312b153a4f5bc780d57a
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
@@ -1,32 +1,44 @@
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <MaterialBuffer.h>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <MaterialBuffer.h>
#include <PL_Scale.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
CameraWrapper::~CameraWrapper()
{
   stop();
    delete pipeLineDecoderDetector;
    delete pipeLineAnalyzer;
    delete pipeLineRender;
    delete pipeLineAnalyzer;
    delete pipeLineDecoderDetector;
}
bool CameraWrapper::start()
{
   LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
   running = true;
    pthread_mutex_init(&live_daemon_mut, NULL);
    running = true;
   int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
   if(ret != 0)
   {
      LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
      LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
      running = false;
      return false;
   }
    ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create live_daemon_thid: %s/n", strerror(ret));
        running = false;
        return false;
    }
   return true;
}
@@ -39,7 +51,10 @@
      return;
   running = false;
   pthread_join(decoder_thid, NULL);
    pthread_join(decoder_thid, NULL);
    pthread_join(live_daemon_thid, NULL);
    pthread_mutex_destroy(&live_daemon_mut);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
@@ -66,49 +81,20 @@
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    //PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    bool ret = false;
    bool ret = initPl_DecoderPl();
    if (!ret)
    {
        PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
        ret = rtspClient->init(&rtspConfig);
        if (!ret)
        {
           LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
           return  false;
        }
        PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
        ret = amcDecoder->init(&amcdConfig);
        if (!ret)
        {
           LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
           return  false;
        }
        //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
        //PL_V4L2Source_Config v4l2Config;
        //v4l2Config.width = 640;
        //v4l2Config.height = 480;
        //ret = v4l2Source->init(&v4l2Config);
        //if (!ret) {
        //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
        //    return false;
        //}
#ifdef USE_ST_SDK
        PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
        ret = sfTrack->init(&sftConfig);
        if (!ret) {
            LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
            return false;
        }
#endif
        LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
        return  false;
    }
    {
@@ -120,7 +106,7 @@
            return  false;
        }
        PL_Gainer* plBG = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
@@ -141,7 +127,25 @@
            return  false;
        }
      PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
      ret = plScale->init(&plScaleCfg);
      if (!ret)
      {
          LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
          return  false;
      }
      PL_ColorConv_Config PLColorConvCfg;
      PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      ret = plColorConv->init(&PLColorConvCfg);
      if (!ret)
      {
          LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
          return  false;
      }
        PL_Paint_Config plPaintCfg;
      plPaintCfg.fontPath = fontPath;
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
@@ -161,6 +165,74 @@
    }
   return true;
}
bool CameraWrapper::initPl_DecoderPl()
{
    bool ret = false;
    PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
    ret = rtspClient->init(&rtspConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
        return  false;
    }
    PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
    ret = amcDecoder->init(&amcdConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
        return  false;
    }
    //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
    //PL_V4L2Source_Config v4l2Config;
    //v4l2Config.width = 640;
    //v4l2Config.height = 480;
    //ret = v4l2Source->init(&v4l2Config);
    //if (!ret) {
    //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
    //    return false;
    //}
#ifdef USE_ST_SDK
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
//    ret = sfTrack->init(&sftConfig);
//    if (!ret)
//    {
//        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
//        return false;
//    }
    PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
    ret = sfTrack->init(&sftConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
        return false;
    }
#endif
    return ret;
}
void CameraWrapper::lockFace()
{
    faceCacheLocked = true;
}
void CameraWrapper::releaseFace()
{
    faceCacheLocked = false;
}
void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
@@ -201,6 +273,9 @@
    plplContext.cmds.clear();
    plplContext.params.clear();
    float width_scale = ((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
    float height_scale = ((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
@@ -231,18 +306,18 @@
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back(iter->rect.leftTop.X);
        plplContext.params.push_back(iter->rect.leftTop.Y);
        plplContext.params.push_back(iter->rect.rightBottom.X);
        plplContext.params.push_back(iter->rect.rightBottom.Y);
        plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
        std::map<int, std::string>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        if (iterFaceLabel != cameraWrapper.faceLabels.end())
        {
            plplContext.cmds.push_back(PLPLC_TEXT);
            plplContext.params.push_back(iter->rect.leftTop.X);
            plplContext.params.push_back(iter->rect.leftTop.Y);
            const char* label = iterFaceLabel->second.c_str();
            plplContext.cmds.push_back(PLPLC_WTEXT);
            plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
            plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
            const wchar_t* label = iterFaceLabel->second.c_str();
            plplContext.params.push_back(PLPLType(label));
        }
    }
@@ -269,15 +344,20 @@
void test_paint(CameraWrapper& cameraWrapper)
{
    cameraWrapper.setFaceLabel(0, "vip");
    cameraWrapper.setFaceLabel(1, "abc");
    cameraWrapper.setFaceLabel(2, "wrn");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
    cameraWrapper.setFaceLabel(0, L"会员vi");
    cameraWrapper.setFaceLabel(1, L"会员ab");
    cameraWrapper.setFaceLabel(3, L"会员wr");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
    cameraWrapper.plplContext.params.push_back('F');
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(0);
    cameraWrapper.plplContext.params.push_back(0);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
    cameraWrapper.plplContext.params.push_back(20);
@@ -286,17 +366,44 @@
    cameraWrapper.plplContext.params.push_back(100);
}
//struct ScopeMutexLocker
//{
//    pthread_mutex_t* mut;
//    ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
//    ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
//    //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
//};
void cw_elem_destory_func(PipeLineElem* elem)
{
    delete elem;
}
bool CameraWrapper::resetPl()
{
    pipeLineDecoderDetector->finit(cw_elem_destory_func);
    sleep(2);
    return initPl_DecoderPl();
}
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
{
   LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
   CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
   while(cameraWrapper.running)
   {
        if (cameraWrapper.killed)
        {
            LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
            cameraWrapper.resetPl();
            cameraWrapper.killed = false;
            sleep(2);
        }
      PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
      bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
      LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
      //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
      if (!ret)
         continue;
@@ -305,6 +412,8 @@
        ret = last->gain(pm);
        if (!ret)
            continue;
        cameraWrapper.lastAliveTime = time(nullptr);
        if (! cameraWrapper.faceCacheLocked)
        {
@@ -321,25 +430,26 @@
        //test_paint(cameraWrapper);
        cameraWrapper.pipeLineRender->pipe(&pm);
   }
    }
   
   LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
}
void CameraWrapper::lockFace()
/*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
{
   faceCacheLocked = true;
}
    LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
void CameraWrapper::releaseFace()
{
   faceCacheLocked = false;
}
    while(cameraWrapper.running)
    {
        sleep(10);
void CameraWrapper::setFaceLabel(int st_track_id, const std::string& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
        if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
        {
            PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
            rtspClient->kill();
    faceLabels.insert(std::make_pair(st_track_id, label));
            cameraWrapper.killed = true;
        }
    }
}