houxiao
2017-07-20 cf0afe7a787dfed444a418dc0a224a49160108c1
restart pipeline

git-svn-id: http://192.168.1.226/svn/proxy@765 454eff88-639b-444f-9e54-f578c98de674
2个文件已添加
6个文件已修改
1129 ■■■■ 已修改文件
VisitFace/RtspNativeCodec/app/src/main/AndroidManifest.xml 1 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CMakeLists.txt 147 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp 863 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.h 40 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp 35 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/cpu_sched_test.cpp 31 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/cpu_sched_test.h 6 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/java/com/example/nativecodec/NativeCodec.java 6 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/AndroidManifest.xml
@@ -28,6 +28,7 @@
    <intent-filter>
    <action android:name="android.intent.action.MAIN" />
    <category android:name="android.intent.category.LAUNCHER" />
        <category android:name="android.intent.category.DEFAULT" />
    </intent-filter>
    </activity>
    </application>
VisitFace/RtspNativeCodec/app/src/main/cpp/CMakeLists.txt
@@ -6,51 +6,55 @@
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -std=c++11 -fno-rtti  -Wall -UNDEBUG")
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -std=c++11 -DANDROID_PLATFORM=android-22  -DANDROID_TOOLCHAIN=gcc -DANDROID_STL=gnustl_static -Wall -UNDEBUG")
set(WORKSPACE_PATH "D:/workspace/proxy")
add_library(rtspface SHARED
            RtspNativeCodecJNI.cpp
            CameraWrapper.cpp
            FaceCache.cpp
            FaceCacheForPLBG.cpp
            DebugNetwork.cpp
            CaptureCamera.cpp
            serial.c
            #TeleWrapper.cpp
            RtspNativeCodecJNI.cpp
            CameraWrapper.cpp
            FaceCache.cpp
            FaceCacheForPLBG.cpp
            DebugNetwork.cpp
            CaptureCamera.cpp
            cpu_sched_test.cpp
            serial.c
            #TeleWrapper.cpp
            "D:/Documents/works/RtspFace/PL_Scale.cpp"
            "D:/Documents/works/RtspFace/PipeLine.cpp"
            "D:/Documents/works/RtspFace/Logger/src/logger.cc"
            "D:/Documents/works/RtspFace/MediaHelper.cpp"
            "D:/Documents/works/RtspFace/GraphicHelper.cpp"
            "D:/Documents/works/RtspFace/PL_RTSPClient.cpp"
            "D:/Documents/works/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp"
            "D:/Documents/works/RtspFace/PL_AndroidSurfaceViewRender.cpp"
            "D:/Documents/works/RtspFace/PL_SensetimeFaceTrack.cpp"
            "D:/Documents/works/RtspFace/PL_ColorConv.cpp"
            "D:/Documents/works/RtspFace/PL_Gainer.cpp"
            "D:/Documents/works/RtspFace/PL_Paint.cpp"
            "D:/Documents/works/RtspFace/PL_V4L2Source.cpp"
            "D:/Documents/works/RtspFace/PL_BlockGrouping.cpp"
            "D:/Documents/works/RtspFace/PL_Queue.cpp"
            "D:/workspace/proxy/RtspFace/PipeLine.cpp"
            "D:/workspace/proxy/RtspFace/Logger/src/logger.cc"
            "D:/workspace/proxy/RtspFace/MediaHelper.cpp"
            "D:/workspace/proxy/RtspFace/GraphicHelper.cpp"
            "D:/workspace/proxy/RtspFace/PL_RTSPClient.cpp"
            "D:/workspace/proxy/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp"
            "D:/workspace/proxy/RtspFace/PL_AndroidSurfaceViewRender.cpp"
            "D:/workspace/proxy/RtspFace/PL_SensetimeFaceTrack.cpp"
            "D:/workspace/proxy/RtspFace/PL_Gainer.cpp"
            "D:/workspace/proxy/RtspFace/PL_Scale.cpp"
            "D:/workspace/proxy/RtspFace/PL_ColorConv.cpp"
            "D:/workspace/proxy/RtspFace/PL_Paint.cpp"
            "D:/workspace/proxy/RtspFace/PL_V4L2Source.cpp"
            "D:/workspace/proxy/RtspFace/PL_BlockGrouping.cpp"
            #"D:/workspace/proxy/RtspFace/PL_Queue.cpp"
            #"D:/workspace/proxy/RtspFace/PL_Fork2.cpp"
            "D:/Documents/works/FaceServer/proto_hton_ntoh.cpp"
            "D:/Documents/works/FaceServer/PbFaceList.pb.cc"
            "D:/workspace/proxy/FaceServer/proto_hton_ntoh.cpp"
            "D:/workspace/proxy/FaceServer/PbFaceList.pb.cc"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Capture.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Device.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Output.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Capture.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Device.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Output.cpp"
             "D:/Documents/works/RtspFace/CvUtil/CvxText.cpp"
            #"D:/workspace/proxy/RtspFace/CvUtil/CvxText.cpp"
            )
            )
# Include libraries needed for native-codec-jni lib
target_link_libraries(rtspface
                      android
                      log
                      #ui
                      mediandk
                      #OpenMAXAL
                      android
                      log
                      #ui
                      mediandk
                      #OpenMAXAL
                      #jnigraphics
                      #EGL
                      #GLESv2
@@ -58,52 +62,51 @@
                      )
include_directories(
                    "D:/Documents/works/RtspFace"
                    "D:/Documents/works/FaceServer"
                    "D:/workspace/proxy/RtspFace"
                    "D:/workspace/proxy/FaceServer"
                    #"D:/workspace/libhardware-android-5.1.1_r38/include"
                    #"D:/workspace/core-android-5.1.1_r38/include"
                    #"D:/workspace/native-android-5.1.1_r38/include"
                    #"D:/workspace/core-android-5.1.1_r38/libsync/include"
                    #"D:/workspace/native-android-5.1.1_r38/opengl/include"
                    #"D:/workspace/libhardware-android-5.1.1_r38/include"
                    #"D:/workspace/core-android-5.1.1_r38/include"
                    #"D:/workspace/native-android-5.1.1_r38/include"
                    #"D:/workspace/core-android-5.1.1_r38/libsync/include"
                    #"D:/workspace/native-android-5.1.1_r38/opengl/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/include"
                    "D:/Documents/works/RtspFace/libv4l2cpp/inc"
                    "D:/workspace/proxy/RtspFace/libv4l2cpp/inc"
                    #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/include/freetype2"
                    #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include"
                    )
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/freetype/include/freetype2"
                    )
target_link_libraries(rtspface
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libliveMedia.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libgroupsock.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libBasicUsageEnvironment.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libUsageEnvironment.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libliveMedia.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libgroupsock.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libBasicUsageEnvironment.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libUsageEnvironment.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/arm64-v8a/libyuv_static.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/lib/arm64-v8a/libopencv_java3.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/arm64-v8a/libprotobuf.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/lib/arm64-v8a/libcvface_api.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/lib/arm64-v8a/libfreetype.so"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/armeabi-v7a/libyuv_static.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/lib/armeabi-v7a/libopencv_java3.so"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/armeabi-v7a/libprotobuf.so"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/lib/armeabi-v7a/libcvface_api.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/freetype/lib/arm64-v8a/libfreetype.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/arm64-v8a/libv4l2wrapper.a"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/armeabi-v7a/libv4l2wrapper.a"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so"
                      )
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so"
                      )
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
@@ -1,404 +1,459 @@
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <MaterialBuffer.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
CameraWrapper::~CameraWrapper()
{
    stop();
    delete pipeLineDecoderDetector;
    delete pipeLineAnalyzer;
    delete pipeLineRender;
}
bool CameraWrapper::start()
{
    LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
    running = true;
    int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
        running = false;
        return false;
    }
    return true;
}
void CameraWrapper::stop()
{
    LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
    if (!running)
        return;
    running = false;
    pthread_join(decoder_thid, NULL);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
{
    return 0.0f;
}
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
{
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        RectWrapper rw;
        rw.rect = iter->rect;
        rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
        rw.userData = (void*)(iter->id);
        rects.push_back(rw);
    }
}
bool CameraWrapper::initPl()
{
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    bool ret = false;
    {
        PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
        ret = rtspClient->init(&rtspConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
        ret = amcDecoder->init(&amcdConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
            return  false;
        }
        //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
        //PL_V4L2Source_Config v4l2Config;
        //v4l2Config.width = 640;
        //v4l2Config.height = 480;
        //ret = v4l2Source->init(&v4l2Config);
        //if (!ret) {
        //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
        //    return false;
        //}
#ifdef USE_ST_SDK
        PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
        ret = sfTrack->init(&sftConfig);
        if (!ret) {
            LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
            return false;
        }
#endif
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
            return  false;
        }
        PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
        ret = plBG->init(&plbgConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
            return  false;
        }
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
            return  false;
        }
        ANativeWindow* window = (ANativeWindow*)(windowRender);
        ANativeWindow_Buffer buffer;
        if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0)
        {
            plScaleCfg.toHeight=buffer.height;
            plScaleCfg.toWidth=buffer.width;
            ANativeWindow_unlockAndPost(window);
        } else
        {
            plScaleCfg.toHeight=480;
            plScaleCfg.toWidth=640;
        }
        PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
        ret = plScale->init(&plScaleCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
            return  false;
        }
        PL_ColorConv_Config PLColorConvCfg;
        PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
        ret = plColorConv->init(&PLColorConvCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_Paint_Config plPaintCfg;
        plPaintCfg.fontPath = "data/msyh.ttc";
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
    return true;
}
static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
{
    //jni thread may not able to invoke java callback
    // double check it's all ok
    int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
    if (getEnvStat == JNI_EDETACHED)
    {
        //LOG_WARN << "GetEnv: not attached" << std::endl;
        if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
            LOG_WARN << "Failed to attach" << LOG_ENDL;
        else
            getEnvStat = JNI_OK;
    }
    else if (getEnvStat == JNI_OK)
    {
    }
    else if (getEnvStat == JNI_EVERSION)
        LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    else if (getEnvStat == JNI_ERR)
        LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
    if (cameraWrapper.javaEnv->ExceptionCheck())
        cameraWrapper.javaEnv->ExceptionDescribe();
    cameraWrapper.javaVM->DetachCurrentThread();
}
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    PLPLContext& plplContext(cameraWrapper.plplContext);
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
    plplContext.cmds.clear();
    plplContext.params.clear();
    float width_scale =((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
    float height_scale =((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
        plplContext.params.push_back('F');
        if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
        {
            if (iter->outOfFrame)
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
            else
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
        }
        else
        {
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
        std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        if (iterFaceLabel != cameraWrapper.faceLabels.end())
        {
            plplContext.cmds.push_back(PLPLC_WTEXT);
            plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
            plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
            const wchar_t* label = iterFaceLabel->second.c_str();
            plplContext.params.push_back(PLPLType(label));
        }
    }
    return false;
}
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
void test_paint(CameraWrapper& cameraWrapper)
{
    cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
    cameraWrapper.setFaceLabel(0, L"会员vi");
    cameraWrapper.setFaceLabel(1, L"会员ab");
    cameraWrapper.setFaceLabel(3, L"会员wr");
    cameraWrapper.setFaceLabel(4, L"会员wr");
    cameraWrapper.setFaceLabel(5, L"会员wn");
    cameraWrapper.setFaceLabel(6, L"会员wr");
    cameraWrapper.setFaceLabel(7, L"会员wn");
    cameraWrapper.setFaceLabel(8, L"会员wr");
    cameraWrapper.setFaceLabel(9, L"会员wr");
    cameraWrapper.setFaceLabel(10, L"会员wn");
    cameraWrapper.setFaceLabel(11, L"会员wr");
    cameraWrapper.setFaceLabel(12, L"会员wr");
    cameraWrapper.setFaceLabel(13, L"会员wr");
    cameraWrapper.setFaceLabel(14, L"会员wr");
    cameraWrapper.setFaceLabel(15, L"会员wr");
    cameraWrapper.setFaceLabel(16, L"会员wn");
    cameraWrapper.setFaceLabel(17, L"会员wr");
    cameraWrapper.setFaceLabel(18, L"会员wr");
    cameraWrapper.setFaceLabel(19, L"会员wr");
    cameraWrapper.setFaceLabel(20, L"会员wr");
    cameraWrapper.setFaceLabel(21, L"会员wr");
    cameraWrapper.setFaceLabel(22, L"会员wr");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
    cameraWrapper.plplContext.params.push_back('F');
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
//    cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
//    cameraWrapper.plplContext.params.push_back(20);
//    cameraWrapper.plplContext.params.push_back(20);
//    cameraWrapper.plplContext.params.push_back(100);
//    cameraWrapper.plplContext.params.push_back(100);
}
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
{
    LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
    while(cameraWrapper.running)
    {
        PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
        bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
        LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
        if (!ret)
            continue;
        PipeMaterial pm;
        ret = last->gain(pm);
        if (!ret)
            continue;
        if (! cameraWrapper.faceCacheLocked)
        {
            PipeMaterial pmAnalizer(pm);
            PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
            bool ret = last->gain(pmAnalizer);
            if (ret)
                pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
        }
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
        //#debug
        //test_paint(cameraWrapper);
        cameraWrapper.pipeLineRender->pipe(&pm);
    }
    LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
}
void CameraWrapper::lockFace()
{
    faceCacheLocked = true;
}
void CameraWrapper::releaseFace()
{
    faceCacheLocked = false;
}
void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <MaterialBuffer.h>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
CameraWrapper::~CameraWrapper()
{
    stop();
    delete pipeLineRender;
    delete pipeLineAnalyzer;
    delete pipeLineDecoderDetector;
}
bool CameraWrapper::start()
{
    LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
    pthread_mutex_init(&live_daemon_mut, NULL);
    running = true;
    int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
        running = false;
        return false;
    }
    ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create live_daemon_thid: %s/n", strerror(ret));
        running = false;
        return false;
    }
    return true;
}
void CameraWrapper::stop()
{
    LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
    if (!running)
        return;
    running = false;
    pthread_join(decoder_thid, NULL);
    pthread_join(live_daemon_thid, NULL);
    pthread_mutex_destroy(&live_daemon_mut);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
{
    return 0.0f;
}
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
{
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        RectWrapper rw;
        rw.rect = iter->rect;
        rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
        rw.userData = (void*)(iter->id);
        rects.push_back(rw);
    }
}
bool CameraWrapper::initPl()
{
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    bool ret = initPl_DecoderPl();
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
        return  false;
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
            return  false;
        }
        PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
        ret = plBG->init(&plbgConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
            return  false;
        }
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
            return  false;
        }
        ANativeWindow* window = (ANativeWindow*)(windowRender);
        ANativeWindow_Buffer buffer;
        if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0)
        {
            plScaleCfg.toHeight=buffer.height;
            plScaleCfg.toWidth=buffer.width;
            ANativeWindow_unlockAndPost(window);
        } else
        {
            plScaleCfg.toHeight=480;
            plScaleCfg.toWidth=640;
        }
        PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
        ret = plScale->init(&plScaleCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
            return  false;
        }
        PL_ColorConv_Config PLColorConvCfg;
        PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
        ret = plColorConv->init(&PLColorConvCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_Paint_Config plPaintCfg;
        plPaintCfg.fontPath = "/data/msyh.ttc";
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
    return true;
}
bool CameraWrapper::initPl_DecoderPl()
{
    bool ret = false;
    PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
    ret = rtspClient->init(&rtspConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
        return  false;
    }
    PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
    ret = amcDecoder->init(&amcdConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
        return  false;
    }
    //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
    //PL_V4L2Source_Config v4l2Config;
    //v4l2Config.width = 640;
    //v4l2Config.height = 480;
    //ret = v4l2Source->init(&v4l2Config);
    //if (!ret) {
    //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
    //    return false;
    //}
#ifdef USE_ST_SDK
    PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
    ret = sfTrack->init(&sftConfig);
    if (!ret)
    {
        LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
        return false;
    }
#endif
    return ret;
}
void CameraWrapper::lockFace()
{
    faceCacheLocked = true;
}
void CameraWrapper::releaseFace()
{
    faceCacheLocked = false;
}
void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
{
    //jni thread may not able to invoke java callback
    // double check it's all ok
    int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
    if (getEnvStat == JNI_EDETACHED)
    {
        //LOG_WARN << "GetEnv: not attached" << std::endl;
        if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
            LOG_WARN << "Failed to attach" << LOG_ENDL;
        else
            getEnvStat = JNI_OK;
    }
    else if (getEnvStat == JNI_OK)
    {
    }
    else if (getEnvStat == JNI_EVERSION)
        LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    else if (getEnvStat == JNI_ERR)
        LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
    if (cameraWrapper.javaEnv->ExceptionCheck())
        cameraWrapper.javaEnv->ExceptionDescribe();
    cameraWrapper.javaVM->DetachCurrentThread();
}
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    PLPLContext& plplContext(cameraWrapper.plplContext);
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
    plplContext.cmds.clear();
    plplContext.params.clear();
    float width_scale = ((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
    float height_scale = ((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
        plplContext.params.push_back('F');
        if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
        {
            if (iter->outOfFrame)
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
            else
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
        }
        else
        {
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
        std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        if (iterFaceLabel != cameraWrapper.faceLabels.end())
        {
            plplContext.cmds.push_back(PLPLC_WTEXT);
            plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
            plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
            const wchar_t* label = iterFaceLabel->second.c_str();
            plplContext.params.push_back(PLPLType(label));
        }
    }
    return false;
}
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
void test_paint(CameraWrapper& cameraWrapper)
{
    cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
    cameraWrapper.setFaceLabel(0, L"会员vi");
    cameraWrapper.setFaceLabel(1, L"会员ab");
    cameraWrapper.setFaceLabel(3, L"会员wr");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
    cameraWrapper.plplContext.params.push_back('F');
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
    cameraWrapper.plplContext.params.push_back(20);
    cameraWrapper.plplContext.params.push_back(20);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
}
//struct ScopeMutexLocker
//{
//    pthread_mutex_t* mut;
//    ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
//    ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
//    //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
//};
void cw_elem_destory_func(PipeLineElem* elem)
{
    delete elem;
}
bool CameraWrapper::resetPl()
{
    pipeLineDecoderDetector->finit(cw_elem_destory_func);
    sleep(2);
    return initPl_DecoderPl();
}
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
{
    LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
    while(cameraWrapper.running)
    {
        if (cameraWrapper.killed)
        {
            LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
            cameraWrapper.resetPl();
            cameraWrapper.killed = false;
            sleep(2);
        }
        PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
        bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
        //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
        if (!ret)
            continue;
        PipeMaterial pm;
        ret = last->gain(pm);
        if (!ret)
            continue;
        cameraWrapper.lastAliveTime = time(nullptr);
        if (! cameraWrapper.faceCacheLocked)
        {
            PipeMaterial pmAnalizer(pm);
            PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
            bool ret = last->gain(pmAnalizer);
            if (ret)
                pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
        }
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
        //#debug
        //test_paint(cameraWrapper);
        cameraWrapper.pipeLineRender->pipe(&pm);
    }
    LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
}
/*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
{
    LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
    while(cameraWrapper.running)
    {
        sleep(10);
        if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
        {
            PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
            rtspClient->kill();
            cameraWrapper.killed = true;
        }
    }
}
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.h
@@ -13,8 +13,6 @@
#include <PL_SensetimeFaceTrack.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_Queue.h>
//#include "looper.h"
#include <android/native_window_jni.h>
#include <media/NdkMediaCodec.h>
@@ -29,38 +27,39 @@
    PipeLine* pipeLineAnalyzer;
    PipeLine* pipeLineRender;
    PL_Queue* queueFrame;
    PL_RTSPClient_Config rtspConfig;
    PL_AndroidMediaCodecDecoder_Config amcdConfig;
    PL_AndroidSurfaceViewRender_Config asvrConfig;
    SensetimeFaceTrackConfig sftConfig;
    PL_RTSPClient_Config rtspConfig;
    PL_AndroidMediaCodecDecoder_Config amcdConfig;
    PL_AndroidSurfaceViewRender_Config asvrConfig;
    SensetimeFaceTrackConfig sftConfig;
    PL_Scale_Config plScaleCfg;
    int cameraIdx;
    JavaVM* javaVM;
    JNIEnv* javaEnv;
    JavaVM* javaVM;
    JNIEnv* javaEnv;
    jclass faceCallbackClazz;
    jmethodID faceCallbackFunc;
    void* windowRender;
    void* windowDecode;
    pthread_t decoder_thid;
    pthread_t analyzer_thid;
    pthread_t live_daemon_thid;
    pthread_mutex_t live_daemon_mut;
    bool running;
    volatile bool killed;
    time_t lastAliveTime;
    bool faceCacheLocked;
    //FaceCache faceCache;
    FaceCacheForPLBG faceCache;
    //FaceCache faceCache;
    FaceCacheForPLBG faceCache;
    PLPLContext plplContext;
    std::map<int, std::wstring> faceLabels;
    CameraWrapper() : 
        pipeLineDecoderDetector(nullptr), pipeLineAnalyzer(nullptr), pipeLineRender(nullptr), queueFrame(nullptr), rtspConfig(), amcdConfig(), asvrConfig(), sftConfig(),
        cameraIdx(0), javaVM(nullptr), javaEnv(nullptr), faceCallbackClazz(0), faceCallbackFunc(0), windowRender(nullptr), windowDecode(nullptr),
        decoder_thid(0), analyzer_thid(0), running(false),
        pipeLineDecoderDetector(nullptr), pipeLineAnalyzer(nullptr), pipeLineRender(nullptr), rtspConfig(), amcdConfig(), asvrConfig(), sftConfig(),
        cameraIdx(0), javaVM(nullptr), javaEnv(nullptr), faceCallbackClazz(0), faceCallbackFunc(0), windowRender(nullptr), windowDecode(nullptr),
        decoder_thid(0), live_daemon_thid(0), live_daemon_mut(), running(false), killed(false), lastAliveTime(0),
        faceCacheLocked(false), faceCache(),
        plplContext(), faceLabels()
    {
@@ -71,7 +70,7 @@
    bool start();
    void stop();
    bool initPl();
    void lockFace();
    void releaseFace();
@@ -79,7 +78,10 @@
private:
    static void* decoder_thd(void *arg);
    static void* analyzer_thd(void *arg);
    static void* live_daemon_thd(void *arg);
    bool resetPl();
    bool initPl_DecoderPl();
};
#endif
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp
@@ -18,6 +18,7 @@
#include "DebugNetwork.h"
#include "cpu_sched_test.h"
//#include <mediastreamer2/include/mediastreamer2/msjava.h>
@@ -38,6 +39,8 @@
void Java_cn_com_basic_face_util_RtspFaceNative_init(JNIEnv *env, jclass clazz)
{
    g_logger.set_level(INFO);
    cpu_sched();
#ifdef USE_ST_SDK
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
@@ -184,16 +187,14 @@
static void *getSurfaceHolderGetSurface(JNIEnv *env, jobject jobjSurfaceHolder)
{
    jclass jcSurfaceHolder = env->FindClass("android/view/SurfaceHolder");
    jmethodID jmSurfaceHolder_getSurface = env->GetMethodID(jcSurfaceHolder, "getSurface",
                                                            "()Landroid/view/Surface;");
    jmethodID jmSurfaceHolder_getSurface = env->GetMethodID(jcSurfaceHolder, "getSurface", "()Landroid/view/Surface;");
    jobject surface = env->CallObjectMethod(jobjSurfaceHolder, jmSurfaceHolder_getSurface);
    return surface;
}
// set the surface
void
Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx,
                                                      jobject surfaceRender)
void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx,
                                                           jobject surfaceRender)
{
    LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setSurface" << LOG_ENDL;
    assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT);
@@ -332,7 +333,7 @@
    assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT);
    cameraIdx -= 1;
    CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]);
    CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]);
    cameraWrapper.stop();
}
@@ -476,9 +477,7 @@
    evpHeader->size = sizeof(EVPHeader) + sizeof(FDP_FaceDetectPB) + buffSize;
    buffSize = evpHeader->size;
    FDP_FaceDetectPB *fdpFaceDetectPB = new(buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId,
                                                                                         cameraIdx +
                                                                                         1);
    FDP_FaceDetectPB *fdpFaceDetectPB = new(buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId, cameraIdx + 1);
    evpHeader->hton();
    fdpFaceDetectPB->hton();
@@ -731,22 +730,6 @@
}
bool Java_cn_com_basic_face_util_RtspFaceNative_telCall(JNIEnv *env, jclass clazz, jstring phone)
{
}
void Java_cn_com_basic_face_util_RtspFaceNative_Hang(JNIEnv *env, jclass clazz)
{
}
void Java_cn_com_basic_face_util_RtspFaceNative_TelShutdown(JNIEnv *env, jclass clazz)
{
}
void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label)
{
    LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel" << LOG_ENDL;
@@ -757,7 +740,7 @@
    const char *utfChars = env->GetStringUTFChars(label, NULL);
    std::string labelstr(utfChars);
    std::wstring wlabelstr;
    wlabelstr.assign(labelstr.begin(),labelstr.end());
    wlabelstr.assign(labelstr.begin(), labelstr.end());
    env->ReleaseStringUTFChars(label, utfChars);
    cameraWrapper.setFaceLabel(stTrackId, wlabelstr);
}
VisitFace/RtspNativeCodec/app/src/main/cpp/cpu_sched_test.cpp
New file
@@ -0,0 +1,31 @@
#include "cpu_sched_test.h"
#include <logger.h>
#include<sched.h>
#include<ctype.h>
#include<sys/types.h>
#include<sys/sysinfo.h>
#include<unistd.h>
// setup the cpu set of this program to run on
void set_cpu(int id)
{
    cpu_set_t mask;
    CPU_ZERO(&mask);
    if (sched_getaffinity(0, sizeof(mask), &mask) == -1)
    {
        fprintf(stderr, "warning: could not get CPU affinity/n");
    }
    CPU_SET(id, &mask);
    if (sched_setaffinity(0, sizeof(mask), &mask) == -1)
    {
        fprintf(stderr, "warning: could not set CPU affinity/n");
    }
}
void cpu_sched()
{
//set_cpu(0x0003);
}
VisitFace/RtspNativeCodec/app/src/main/cpp/cpu_sched_test.h
New file
@@ -0,0 +1,6 @@
#ifndef _CPU_SCHED_TEST_H_
#define _CPU_SCHED_TEST_H_
void cpu_sched();
#endif
VisitFace/RtspNativeCodec/app/src/main/java/com/example/nativecodec/NativeCodec.java
@@ -86,7 +86,7 @@
        ThisActivity = this;
        RtspFaceNative.init();
        RtspFaceNative.setLocalIP("192.168.1.37");
        RtspFaceNative.setLocalIP("192.168.1.84");
        mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
@@ -282,9 +282,9 @@
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin@192.168.1.188:554/cam/realmonitor?channel=1&subtype=2");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.70/h264");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.70/h264_2");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.22/h264");
                        mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.22/h264_2");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/sub/av_stream");
                        mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.132:554/h264/ch1/main/av_stream");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.132:554/h264/ch1/sub/av_stream");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream");
                        //mCreated = createPlayer(1, "rtsp://192.168.1.56:8554");