pans
2017-08-18 7572cd33f27b512691b6acf39cd0d78d35017a46
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperServer.cpp
@@ -6,11 +6,12 @@
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
#include <PL_AndroidMediaCodecEncoder.h>
#include <PL_RTSPServer2.h>
#include <iostream>
CameraWrapper::~CameraWrapper()
{
   stop();
@@ -101,7 +102,6 @@
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
   PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
   PipeLine::register_global_elem_creator("PL_RTSPServer2", create_PL_RTSPServer2);
@@ -145,7 +145,7 @@
      PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
      plScaleCfg.toWidth = 640;
      plScaleCfg.toHeight = 480;
      plScaleCfg.toHeight = 480;//#todo config
      ret = plScale->init(&plScaleCfg);
      if (!ret)
      {
@@ -153,60 +153,62 @@
          return  false;
      }
        PL_AndroidMediaCodecEncoder_Config amceCfg;
        amceCfg.ak_bit_rate = 5000000; // 512KB
        amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 25;
        amceCfg.ak_height = 480;
        amceCfg.ak_i_frame_interval = 20;
        amceCfg.ak_mime = "video/avc";
        amceCfg.ak_width = 640;
        PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
        ret = plAMCE->init(&amceCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
            return  false;
        }
      //PL_ColorConv_Config PLColorConvCfg;
      //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      //ret = plColorConv->init(&PLColorConvCfg);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
      //   return  false;
      //}
      PL_Paint_Config plPaintCfg;
      plPaintCfg.fontPath = fontPath;
      plPaintCfg.plplCtx = &plplContext;
      PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
      ret = plPaint->init(&plPaintCfg);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
         return  false;
      }
      //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
      //ret = asvRender->init(&asvrConfig);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
      //   return  false;
      //}
      PL_AndroidMediaCodecEncoder_Config amceCfg;
      amceCfg.ak_bit_rate = 512 * 1024 * 8; // 512KB
      amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 40;
      amceCfg.ak_height = 480;
      amceCfg.ak_i_frame_interval = 1;
      amceCfg.ak_mime = "video/avc";
      amceCfg.ak_width = 640;
      amceCfg.codecProfileLevel.profile = Android_CodecProfileLevel::AVCProfileBaseline;
      amceCfg.codecProfileLevel.level = Android_CodecProfileLevel::AVCLevel1;
      PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
      ret = plAMCE->init(&amceCfg);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
         return  false;
      }
      PL_RTSPServer2* plRtspServer = (PL_RTSPServer2*)pipeLineRender->push_elem("PL_RTSPServer2");
      RTSPServer2Config rtspServerConfig;
      //rtspServerConfig.syncDeliverFrame = false;
      //rtspServerConfig.payBlockFullQueue = true;
      ret = plRtspServer->init(&rtspServerConfig);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plRtspServer init error" << LOG_ENDL;
         return  false;
      }
      //PL_ColorConv_Config PLColorConvCfg;
      //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      //ret = plColorConv->init(&PLColorConvCfg);
      //if (!ret)
      //{
      //    LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
      //    return  false;
      //}
        //PL_Paint_Config plPaintCfg;
      //plPaintCfg.fontPath = fontPath;
        //plPaintCfg.plplCtx = &plplContext;
        //PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        //ret = plPaint->init(&plPaintCfg);
        //if (!ret)
        //{
        //    LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
        //    return  false;
        //}
        //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        //ret = asvRender->init(&asvrConfig);
        //if (!ret)
        //{
        //    LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
        //    return  false;
        //}
    }
   }
   return true;
}
@@ -242,13 +244,13 @@
    //}
#ifdef USE_ST_SDK
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
//    ret = sfTrack->init(&sftConfig);
//    if (!ret)
//    {
//        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
//        return false;
//    }
    //PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo   use multi
    //ret = sfTrack->init(&sftConfig);
    //if (!ret)
    //{
    //    LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
    //    return false;
    //}
    PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
    ret = sfTrack->init(&sftConfig);
    if (!ret)
@@ -275,7 +277,6 @@
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
@@ -374,21 +375,41 @@
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    //remote call start, 为了保证通用性,未将以下步骤封入RtspFaceDetectClient
    RtspFaceDetectClient* client = getRtspFaceDetectClient();
    if (client == nullptr)
        return false;
    //#todo 优化封装
    try
    {
        LOG_INFO <<"try start"<< LOG_ENDL;
    auto request = client->fireFaceCountListenerRequest();
    request.setCameraIndex(cameraWrapper.cameraIdx);
    request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm));
    auto sendAct = request.send();
    sendAct.wait(client->getWaitScope());
//        RtspFaceDetect::Client* rClient = getRtspFaceDetectClient()->getClient();
//        auto& waitScope = getRtspFaceDetectClient()->getWaitScope();
       RtspFaceDetectClient* client = getRtspFaceDetectClient();
       RtspFaceDetect::Client rClient = client->getEzRpcClient()->getMain<RtspFaceDetect>();
       auto& waitScope = client->getWaitScope();
       auto  request = rClient.fireFaceCountListenerRequest();
       request.setCameraIndex(cameraWrapper.cameraIdx);
       request.setFaceCount(faceCount);
       LOG_INFO <<"!!!!!"<<cameraWrapper.cameraIdx << "+" << faceCount << LOG_ENDL;
       LOG_INFO <<"call client : i have face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
       auto sendAct = request.send();
       sendAct.ignoreResult().wait(waitScope);
    }
    catch (const kj::Exception& e)
    {
        LOG_INFO <<"catch!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
        LOG_ERROR << "catch!!!" <<e.getDescription().cStr() << LOG_ENDL;
        std::cout << e.getDescription().cStr() << std::endl;
        return false;
    }
    catch (std::exception e){
        LOG_ERROR << "catch!!!" <<e.what() << LOG_ENDL;
    }
    //remote call end
    return true;
}
@@ -487,7 +508,6 @@
    {
        sleep(10);
      continue;//#todo
        if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
        {
            PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);