pans
2017-08-16 e473f9eeb0233afcbd38de5de975dea4d131026d
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperServer.cpp
@@ -1,4 +1,5 @@
#include "CameraWrapperServer.h"
#include "RemoteFuncServer.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <MaterialBuffer.h>
@@ -9,7 +10,8 @@
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
#include <PL_AndroidMediaCodecEncoder.h>
#include <PL_RTSPServer.h>
#include <PL_RTSPServer2.h>
#include <iostream>
CameraWrapper::~CameraWrapper()
{
@@ -104,7 +106,7 @@
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
   PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
   PipeLine::register_global_elem_creator("PL_RTSPServer", create_PL_RTSPServer);
   PipeLine::register_global_elem_creator("PL_RTSPServer2", create_PL_RTSPServer2);
    bool ret = initPl_DecoderPl();
    if (!ret)
@@ -145,7 +147,7 @@
      PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
      plScaleCfg.toWidth = 640;
      plScaleCfg.toHeight = 480;
      plScaleCfg.toHeight = 480;//#todo config
      ret = plScale->init(&plScaleCfg);
      if (!ret)
      {
@@ -153,24 +155,52 @@
          return  false;
      }
        PL_AndroidMediaCodecEncoder_Config amceCfg;
        amceCfg.ak_bit_rate = 5000000; // 512KB
        amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 25;
        amceCfg.ak_height = 480;
        amceCfg.ak_i_frame_interval = 20;
        amceCfg.ak_mime = "video/avc";
        amceCfg.ak_width = 640;
        PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
        ret = plAMCE->init(&amceCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
            return  false;
        }
      //PL_ColorConv_Config PLColorConvCfg;
      //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      //ret = plColorConv->init(&PLColorConvCfg);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
      //   return  false;
      //}
      PL_RTSPServer* plRtspServer = (PL_RTSPServer*)pipeLineRender->push_elem("PL_RTSPServer");
      RTSPServerConfig rtspServerConfig;
//      PL_Paint_Config plPaintCfg;
//      plPaintCfg.fontPath = fontPath;
//      plPaintCfg.plplCtx = &plplContext;
//      PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
//      ret = plPaint->init(&plPaintCfg);
//      if (!ret)
//      {
//         LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
//         return  false;
//      }
      //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
      //ret = asvRender->init(&asvrConfig);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
      //   return  false;
      //}
      PL_AndroidMediaCodecEncoder_Config amceCfg;
      amceCfg.ak_bit_rate = 5000000; // 512KB
      amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 20;
      amceCfg.ak_height = 480;
      amceCfg.ak_i_frame_interval = 10;
      amceCfg.ak_mime = "video/avc";
      amceCfg.ak_width = 640;
      PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
      ret = plAMCE->init(&amceCfg);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
         return  false;
      }
      PL_RTSPServer2* plRtspServer = (PL_RTSPServer2*)pipeLineRender->push_elem("PL_RTSPServer2");
      RTSPServer2Config rtspServerConfig;
      //rtspServerConfig.syncDeliverFrame = false;
      ret = plRtspServer->init(&rtspServerConfig);
      if (!ret)
@@ -178,35 +208,7 @@
         LOG_ERROR << "pipeLineRender.plRtspServer init error" << LOG_ENDL;
         return  false;
      }
      //PL_ColorConv_Config PLColorConvCfg;
      //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      //ret = plColorConv->init(&PLColorConvCfg);
      //if (!ret)
      //{
      //    LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
      //    return  false;
      //}
        //PL_Paint_Config plPaintCfg;
      //plPaintCfg.fontPath = fontPath;
        //plPaintCfg.plplCtx = &plplContext;
        //PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        //ret = plPaint->init(&plPaintCfg);
        //if (!ret)
        //{
        //    LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
        //    return  false;
        //}
        //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        //ret = asvRender->init(&asvrConfig);
        //if (!ret)
        //{
        //    LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
        //    return  false;
        //}
    }
   }
   return true;
}
@@ -242,7 +244,7 @@
    //}
#ifdef USE_ST_SDK
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo   use multi
//    ret = sfTrack->init(&sftConfig);
//    if (!ret)
//    {
@@ -372,18 +374,46 @@
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    //remote call start, 为了保证通用性,未将以下步骤封入RtspFaceDetectClient
    //#todo 优化封装
    try
    {
        LOG_INFO <<"try start"<< LOG_ENDL;
    return false;
//        RtspFaceDetect::Client* rClient = getRtspFaceDetectClient()->getClient();
//        auto& waitScope = getRtspFaceDetectClient()->getWaitScope();
        RtspFaceDetectClient* client = getRtspFaceDetectClient();
        RtspFaceDetect::Client rClient = client->getEzRpcClient()->getMain<RtspFaceDetect>();
        auto& waitScope = client->getWaitScope();
        auto  request = rClient.fireFaceCountListenerRequest();
        request.setCameraIndex(cameraWrapper.cameraIdx);
        request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm));
        LOG_INFO <<cameraWrapper.cameraIdx << "+" << cameraWrapper.faceCache.getFaceCount(*pm)<< LOG_ENDL;
        LOG_INFO <<"call client : i have face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
        auto sendAct = request.send();
        sendAct.ignoreResult().wait(waitScope);
    }
    catch (const kj::Exception& e)
    {
        LOG_INFO <<"catch!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
        LOG_ERROR << "catch!!!" <<e.getDescription().cStr() << LOG_ENDL;
        std::cout << e.getDescription().cStr() << std::endl;
        return false;
    }
    catch (std::exception e){
        LOG_ERROR << "catch!!!" <<e.what() << LOG_ENDL;
    }
    //remote call end
    return true;
}
void test_paint(CameraWrapper& cameraWrapper)
@@ -409,14 +439,6 @@
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
}
//struct ScopeMutexLocker
//{
//    pthread_mutex_t* mut;
//    ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
//    ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
//    //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
//};
void cw_elem_destory_func(PipeLineElem* elem)
{
@@ -488,7 +510,6 @@
    {
        sleep(10);
      continue;//#todo
        if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
        {
            PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);