houxiao
2017-06-13 4b914a5d7e3d7971cb3e3ed49047fa331bd74da3
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
@@ -1,11 +1,18 @@
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <MaterialBuffer.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
CameraWrapper::~CameraWrapper()
{
   stop();
   delete pipeLine;
    delete pipeLineDecoderDetector;
    delete pipeLineAnalyzer;
    delete pipeLineRender;
}
bool CameraWrapper::start()
@@ -35,46 +42,122 @@
   pthread_join(live_daemon_thid, NULL);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
{
    return 0.0f;
}
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
{
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        RectWrapper rw;
        rw.rect = iter->rect;
        rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
        rects.push_back(rw);
    }
}
bool CameraWrapper::initPl()
{
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
   PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLine->push_elem("PL_RTSPClient");
   bool ret = rtspClient->init(&rtspConfig);
   if (!ret)
   {
      LOG_ERROR << "rtspClient.init error" << LOG_ENDL;
      return  false;
   }
    bool ret = false;
   PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLine->push_elem("PL_AndroidMediaCodecDecoder");
   ret = amcDecoder->init(&amcdConfig);
   if (!ret)
   {
      LOG_ERROR << "amcDecoder.init error" << LOG_ENDL;
      return  false;
   }
    PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLine->push_elem("PL_AndroidSurfaceViewRender");
    ret = asvRender->init(&asvrConfig);
    if (!ret)
    {
        LOG_ERROR << "asvRender.init error" << LOG_ENDL;
        return  false;
    }
        PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
        ret = rtspClient->init(&rtspConfig);
        if (!ret)
        {
           LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
           return  false;
        }
        PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
        ret = amcDecoder->init(&amcdConfig);
        if (!ret)
        {
           LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
           return  false;
        }
        //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
        //PL_V4L2Source_Config v4l2Config;
        //v4l2Config.width = 640;
        //v4l2Config.height = 480;
        //ret = v4l2Source->init(&v4l2Config);
        //if (!ret) {
        //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
        //    return false;
        //}
#ifdef USE_ST_SDK
   PL_SensetimeFaceTrack* sfTrack = (PL_SensetimeFaceTrack*)pipeLine->push_elem("PL_SensetimeFaceTrack");
   ret = sfTrack->init(&sftConfig);
   if (!ret)
   {
      LOG_ERROR << "sfTrack.init error" << LOG_ENDL;
      return  false;
   }
        PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
        ret = sfTrack->init(&sftConfig);
        if (!ret) {
            LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
            return false;
        }
#endif
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
            return  false;
        }
        PL_Gainer* plBG = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
        ret = plBG->init(&plbgConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
            return  false;
        }
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
            return  false;
        }
        PL_Paint_Config plPaintCfg;
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
   return true;
}
@@ -108,6 +191,70 @@
    cameraWrapper.javaVM->DetachCurrentThread();
}
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
{
    PLPLContext& plplContext(*(PLPLContext*)args);
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
    plplContext.cmds.clear();
    plplContext.params.clear();
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
        plplContext.params.push_back('F');
        if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
        {
            if (iter->outOfFrame)
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
            else
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
        }
        else
        {
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back(iter->rect.leftTop.X);
        plplContext.params.push_back(iter->rect.leftTop.Y);
        plplContext.params.push_back(iter->rect.rightBottom.X);
        plplContext.params.push_back(iter->rect.rightBottom.Y);
    }
    return false;
}
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
/*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
{
   LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
@@ -116,28 +263,43 @@
   while(cameraWrapper.running)
   {
      PipeLineElem* last = cameraWrapper.pipeLine->pipe();
      bool ret = cameraWrapper.pipeLine->check_pipe_complete(last);
      PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
      bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
      LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
      if (!ret)
         continue;
      if (cameraWrapper.faceCacheLocked)
         continue;
#ifdef USE_ST_SDK
        PipeMaterial pm;
      ret = last->gain(pm);
        ret = last->gain(pm);
        if (!ret)
            continue;
      if (!ret)
         continue;
        if (! cameraWrapper.faceCacheLocked)
        {
            PipeMaterial pmAnalizer(pm);
            PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
            bool ret = last->gain(pmAnalizer);
            if (ret)
                pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
        }
      int faceCount = cameraWrapper.faceCache.cachePm(pm);
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper.plplContext));
      if (faceCount > 0 && cameraWrapper.faceCallbackFunc != 0)
            invokeCallback(cameraWrapper, faceCount);
#endif
        //#debug
        //cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
        //cameraWrapper.plplContext.params.push_back('F');
        //cameraWrapper.plplContext.params.push_back(255);
        //cameraWrapper.plplContext.params.push_back(0);
        //cameraWrapper.plplContext.params.push_back(0);
        //cameraWrapper.plplContext.params.push_back(255);
        //cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
        //cameraWrapper.plplContext.params.push_back(20);
        //cameraWrapper.plplContext.params.push_back(20);
        //cameraWrapper.plplContext.params.push_back(100);
        //cameraWrapper.plplContext.params.push_back(100);
        cameraWrapper.pipeLineRender->pipe(&pm);
   }
   
   LOG_INFO << "CameraWrapper::live_daemon_thd stop, ret=" << LOG_ENDL;