pans
2017-08-18 7572cd33f27b512691b6acf39cd0d78d35017a46
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperServer.cpp
@@ -6,7 +6,6 @@
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
#include <PL_AndroidMediaCodecEncoder.h>
@@ -103,7 +102,6 @@
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
   PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
   PipeLine::register_global_elem_creator("PL_RTSPServer2", create_PL_RTSPServer2);
@@ -164,16 +162,16 @@
      //   return  false;
      //}
//      PL_Paint_Config plPaintCfg;
//      plPaintCfg.fontPath = fontPath;
//      plPaintCfg.plplCtx = &plplContext;
//      PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
//      ret = plPaint->init(&plPaintCfg);
//      if (!ret)
//      {
//         LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
//         return  false;
//      }
      PL_Paint_Config plPaintCfg;
      plPaintCfg.fontPath = fontPath;
      plPaintCfg.plplCtx = &plplContext;
      PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
      ret = plPaint->init(&plPaintCfg);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
         return  false;
      }
      //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
      //ret = asvRender->init(&asvrConfig);
@@ -184,13 +182,15 @@
      //}
      PL_AndroidMediaCodecEncoder_Config amceCfg;
      amceCfg.ak_bit_rate = 5000000; // 512KB
      amceCfg.ak_bit_rate = 512 * 1024 * 8; // 512KB
      amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 20;
      amceCfg.ak_frame_rate = 40;
      amceCfg.ak_height = 480;
      amceCfg.ak_i_frame_interval = 10;
      amceCfg.ak_i_frame_interval = 1;
      amceCfg.ak_mime = "video/avc";
      amceCfg.ak_width = 640;
      amceCfg.codecProfileLevel.profile = Android_CodecProfileLevel::AVCProfileBaseline;
      amceCfg.codecProfileLevel.level = Android_CodecProfileLevel::AVCLevel1;
      PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
      ret = plAMCE->init(&amceCfg);
      if (!ret)
@@ -201,7 +201,7 @@
      PL_RTSPServer2* plRtspServer = (PL_RTSPServer2*)pipeLineRender->push_elem("PL_RTSPServer2");
      RTSPServer2Config rtspServerConfig;
      //rtspServerConfig.syncDeliverFrame = false;
      //rtspServerConfig.payBlockFullQueue = true;
      ret = plRtspServer->init(&rtspServerConfig);
      if (!ret)
      {
@@ -244,13 +244,13 @@
    //}
#ifdef USE_ST_SDK
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo   use multi
//    ret = sfTrack->init(&sftConfig);
//    if (!ret)
//    {
//        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
//        return false;
//    }
    //PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo   use multi
    //ret = sfTrack->init(&sftConfig);
    //if (!ret)
    //{
    //    LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
    //    return false;
    //}
    PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
    ret = sfTrack->init(&sftConfig);
    if (!ret)
@@ -277,7 +277,6 @@
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
@@ -376,7 +375,6 @@
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
@@ -390,17 +388,17 @@
//        RtspFaceDetect::Client* rClient = getRtspFaceDetectClient()->getClient();
//        auto& waitScope = getRtspFaceDetectClient()->getWaitScope();
        RtspFaceDetectClient* client = getRtspFaceDetectClient();
        RtspFaceDetect::Client rClient = client->getEzRpcClient()->getMain<RtspFaceDetect>();
        auto& waitScope = client->getWaitScope();
       RtspFaceDetectClient* client = getRtspFaceDetectClient();
       RtspFaceDetect::Client rClient = client->getEzRpcClient()->getMain<RtspFaceDetect>();
       auto& waitScope = client->getWaitScope();
        auto  request = rClient.fireFaceCountListenerRequest();
        request.setCameraIndex(cameraWrapper.cameraIdx);
        request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm));
        LOG_INFO <<cameraWrapper.cameraIdx << "+" << cameraWrapper.faceCache.getFaceCount(*pm)<< LOG_ENDL;
        LOG_INFO <<"call client : i have face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
        auto sendAct = request.send();
        sendAct.ignoreResult().wait(waitScope);
       auto  request = rClient.fireFaceCountListenerRequest();
       request.setCameraIndex(cameraWrapper.cameraIdx);
       request.setFaceCount(faceCount);
       LOG_INFO <<"!!!!!"<<cameraWrapper.cameraIdx << "+" << faceCount << LOG_ENDL;
       LOG_INFO <<"call client : i have face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
       auto sendAct = request.send();
       sendAct.ignoreResult().wait(waitScope);
    }
    catch (const kj::Exception& e)
    {