houxiao
2017-08-17 2b43077d967c28fe99e1ff2b99f19e1433c710d9
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperServer.cpp
@@ -1,15 +1,15 @@
#include "CameraWrapperServer.h"
#include "RemoteFuncServer.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <MaterialBuffer.h>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
#include <PL_AndroidMediaCodecEncoder.h>
#include <PL_RTSPServer2.h>
CameraWrapper::~CameraWrapper()
{
   stop();
@@ -100,9 +100,9 @@
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    //PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
   PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
   PipeLine::register_global_elem_creator("PL_RTSPServer2", create_PL_RTSPServer2);
    bool ret = initPl_DecoderPl();
    if (!ret)
@@ -143,7 +143,7 @@
      PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
      plScaleCfg.toWidth = 640;
      plScaleCfg.toHeight = 480;
      plScaleCfg.toHeight = 480;//#todo config
      ret = plScale->init(&plScaleCfg);
      if (!ret)
      {
@@ -151,50 +151,62 @@
          return  false;
      }
        //PL_AndroidMediaCodecEncoder_Config amceCfg;
        //amceCfg.ak_bit_rate = 5000000; // 512KB
        //amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      //amceCfg.ak_frame_rate = 25;
        //amceCfg.ak_height = 480;
        //amceCfg.ak_i_frame_interval = 20;
        //amceCfg.ak_mime = "video/avc";
        //amceCfg.ak_width = 640;
        //PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
        //ret = plAMCE->init(&amceCfg);
        //if (!ret)
        //{
        //    LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
        //    return  false;
        //}
      //PL_ColorConv_Config PLColorConvCfg;
      //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      //ret = plColorConv->init(&PLColorConvCfg);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
      //   return  false;
      //}
      PL_ColorConv_Config PLColorConvCfg;
      PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
      ret = plColorConv->init(&PLColorConvCfg);
      PL_Paint_Config plPaintCfg;
      plPaintCfg.fontPath = fontPath;
      plPaintCfg.plplCtx = &plplContext;
      PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
      ret = plPaint->init(&plPaintCfg);
      if (!ret)
      {
          LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
          return  false;
         LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
         return  false;
      }
        PL_Paint_Config plPaintCfg;
      plPaintCfg.fontPath = fontPath;
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
      //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
      //ret = asvRender->init(&asvrConfig);
      //if (!ret)
      //{
      //   LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
      //   return  false;
      //}
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
      PL_AndroidMediaCodecEncoder_Config amceCfg;
      amceCfg.ak_bit_rate = 512 * 1024 * 8; // 512KB
      amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
      amceCfg.ak_frame_rate = 40;
      amceCfg.ak_height = 480;
      amceCfg.ak_i_frame_interval = 1;
      amceCfg.ak_mime = "video/avc";
      amceCfg.ak_width = 640;
      amceCfg.codecProfileLevel.profile = Android_CodecProfileLevel::AVCProfileBaseline;
      amceCfg.codecProfileLevel.level = Android_CodecProfileLevel::AVCLevel1;
      PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
      ret = plAMCE->init(&amceCfg);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
         return  false;
      }
      PL_RTSPServer2* plRtspServer = (PL_RTSPServer2*)pipeLineRender->push_elem("PL_RTSPServer2");
      RTSPServer2Config rtspServerConfig;
      //rtspServerConfig.payBlockFullQueue = true;
      ret = plRtspServer->init(&rtspServerConfig);
      if (!ret)
      {
         LOG_ERROR << "pipeLineRender.plRtspServer init error" << LOG_ENDL;
         return  false;
      }
   }
   return true;
}
@@ -230,13 +242,13 @@
    //}
#ifdef USE_ST_SDK
//    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
//    ret = sfTrack->init(&sftConfig);
//    if (!ret)
//    {
//        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
//        return false;
//    }
    //PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo   use multi
    //ret = sfTrack->init(&sftConfig);
    //if (!ret)
    //{
    //    LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
    //    return false;
    //}
    PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
    ret = sfTrack->init(&sftConfig);
    if (!ret)
@@ -359,19 +371,7 @@
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
void test_paint(CameraWrapper& cameraWrapper)
@@ -397,14 +397,6 @@
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
}
//struct ScopeMutexLocker
//{
//    pthread_mutex_t* mut;
//    ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
//    ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
//    //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
//};
void cw_elem_destory_func(PipeLineElem* elem)
{
@@ -461,9 +453,7 @@
        //#debug
        //test_paint(cameraWrapper);
      cameraWrapper.pipeLineRender->pipe(&pm);//#todo
      //if (cameraWrapper.pipeLineRender->check_pipe_complete(last = cameraWrapper.pipeLineRender->pipe(&pm)))
      //   last->gain(pm);
      cameraWrapper.pipeLineRender->pipe(&pm);
    }
   
   LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;