chenke
2017-07-20 1eb1ffa329352dbbd4c5d6cf3e1ef08272e31957
缩放和中文字体稳定版,NV12转RGB565功能正常

git-svn-id: http://192.168.1.226/svn/proxy@759 454eff88-639b-444f-9e54-f578c98de674
11个文件已修改
2个文件已添加
1241 ■■■■■ 已修改文件
RtspFace/PL_AndroidSurfaceViewRender.cpp 52 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_AndroidSurfaceViewRender.h 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_ColorConv.cpp 222 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_ColorConv.h 36 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_Paint.cpp 1 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_Scale.cpp 25 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_SensetimeFaceTrack.cpp 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CMakeLists.txt 99 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp 749 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.h 6 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp 39 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.h 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
VisitFace/RtspNativeCodec/app/src/main/java/com/example/nativecodec/NativeCodec.java 6 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_AndroidSurfaceViewRender.cpp
@@ -249,6 +249,33 @@
    return true;
}
bool convert_rgb565_origin_to_render(PL_ASVR_Internal *in) {
    ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
    int widow_width = ANativeWindow_getWidth(window);
    int window_height = ANativeWindow_getHeight(window);
    int src_width = in->lastMbfBuffOrigin.width;
    int src_height = in->lastMbfBuffOrigin.height;
    if (src_width != widow_width || src_height != window_height)
    {
        uint8_t* src = (uint8_t*)in->lastMbfBuffOrigin.buffer;
        uint8_t* dst = (uint8_t*)in->buffer1;
        libyuv::ScalePlane_16((uint16*)src, src_width,
                              src_width, src_height,
                              (uint16*)dst, widow_width,
                              widow_width, window_height,
                              libyuv::kFilterNone);
        in->buff1Size = widow_width * window_height * 2;
        memcpy(in->buffer, in->buffer1, in->buff1Size);
        in->buffSize = in->buff1Size;
    }
    else
    {
        memcpy(in->buffer, in->lastMbfBuffOrigin.buffer, in->lastMbfBuffOrigin.buffSize);
        in->buffSize = in->lastMbfBuffOrigin.buffSize;
    }
    return true;
}
bool convert_nv12_origin_to_render(PL_ASVR_Internal* in)
{
    if (false)
@@ -310,10 +337,10 @@
                          dst_width, dst_height,
                          libyuv::kFilterNone);
        libyuv::ScalePlane_16((uint16*)src_uv, MH_SUBSAMPLE1(src_width ,2),
                              MH_SUBSAMPLE1(src_width ,2), src_height,
                              (uint16*)dst_uv, MH_SUBSAMPLE1(dst_width ,2),
                              MH_SUBSAMPLE1(dst_width, 2), dst_height,
        libyuv::ScalePlane_16((uint16*)src_uv, MH_SUBSAMPLE1(src_width, 2),
                              MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2),
                              (uint16*)dst_uv, MH_SUBSAMPLE1(dst_width, 2),
                              MH_SUBSAMPLE1(dst_width, 2), MH_SUBSAMPLE1(dst_height, 2),
                              libyuv::kFilterNone);
        in->buff1Size = dst_width * dst_height * 1.5;
@@ -432,9 +459,9 @@
        return false;
    
    MB_Frame* frame = (MB_Frame*)pm->buffer;
    if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
    if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12 && frame->type != MB_Frame::MBFT_RGB565)
    {
        LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
        LOG_ERROR << "Only support MBFT_YUV420 、MBFT_NV12 and MBFT_RGB565" << LOG_ENDL;
        in->payError = true;
        return false;
    }
@@ -456,7 +483,8 @@
        ret = convert_yuv420_origin_to_render(in);
    else if (in->lastMbfBuffOrigin.type == MB_Frame::MBFT_NV12)
        ret = convert_nv12_origin_to_render(in);
    else if (in->lastMbfBuffOrigin.type == MB_Frame::MBFT_RGB565)
        ret = convert_rgb565_origin_to_render(in);
    if (!ret)
    {
        LOG_ERROR << "convert yuv origin to render error" << LOG_ENDL;
@@ -484,12 +512,13 @@
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_RGB565, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
    return !(in->payError);
}
@@ -528,3 +557,4 @@
    return true;
}
RtspFace/PL_AndroidSurfaceViewRender.h
@@ -36,7 +36,7 @@
    static bool pay_breaker_MBFT_YUV(const PipeMaterial* pm, void* args);
    
private:
    void* internal;
    void* internal;
};
PipeLineElem* create_PL_AndroidSurfaceViewRender();
RtspFace/PL_ColorConv.cpp
New file
@@ -0,0 +1,222 @@
#include "PL_ColorConv.h"
#include "MaterialBuffer.h"
#include "logger.h"
#include <libyuv.h>
#include <PbFaceList.pb.h>
#define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
struct PL_ColorConv_Internal
{
    uint8_t* buffer;
    size_t buffSize;
    size_t buffSizeMax;
    bool payError;
    PipeMaterial::PipeMaterialBufferType lastPmType;
    MB_Frame tempFrame;
    PL_ColorConv_Config config;
    PL_ColorConv_Internal() :
            buffer(nullptr), buffSize(0), buffSizeMax(0), payError(true),
            lastPmType(PipeMaterial::PMT_NONE), tempFrame(), config()
    {
    }
    ~PL_ColorConv_Internal()
    {
        delete[] buffer;
        buffer = nullptr;
    }
    void reset()
    {
        buffSize = 0;
        payError = true;
        lastPmType = PipeMaterial::PMT_NONE;
        MB_Frame _tempFrame;
        tempFrame = _tempFrame;
        PL_ColorConv_Config _config;
        config = _config;
        if (buffer != nullptr)
        {
            delete[] buffer;
            buffer = nullptr;
            buffSizeMax = 0;
        }
    }
};
PipeLineElem* create_PL_ColorConv()
{
    return new PL_ColorConv;
}
PL_ColorConv::PL_ColorConv() : internal(new PL_ColorConv_Internal)
{
}
PL_ColorConv::~PL_ColorConv()
{
    delete (PL_ColorConv_Internal*)internal;
    internal= nullptr;
}
bool PL_ColorConv::init(void* args)
{
    PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
    in->reset();
    if (args != nullptr)
    {
        PL_ColorConv_Config* config = (PL_ColorConv_Config*)args;
        in->config = *config;
    }
    return true;
}
void PL_ColorConv::finit()
{
    PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
}
bool image_to_rgb565(PL_ColorConv_Internal *in ,MB_Frame::MBFType srcType){
    const int srcHeight = in->tempFrame.height;
    const int srcWidth = in->tempFrame.width;
    int dstSize =  srcHeight * srcWidth * 2;
    if (in->buffer == nullptr || in->buffSizeMax <dstSize)
    {
        if (in->buffer != nullptr)
            delete[] in->buffer;
        in->buffer = new uint8_t[dstSize];
        in->buffSizeMax = dstSize;
        in->buffSize = dstSize;
        LOG_INFO << "image_scale alloc buffer size=" << dstSize << std::endl;
    }
    if (srcType == MB_Frame::MBFT_YUV420)
    {
        //#todo
        LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
        return false;
    }
    else if (srcType == MB_Frame::MBFT_NV12)
    {
        const uint8_t * srcBuffer = (uint8_t *)in->tempFrame.buffer;
        const uint8_t *src_y = srcBuffer;
        const uint8_t *src_uv =  src_y + (srcHeight * srcWidth);
        uint8_t *dst = (uint8_t *) (in->buffer);
        libyuv::NV12ToRGB565(src_y, srcWidth,
                             src_uv, srcWidth,
                             dst, srcWidth * 2,
                             srcWidth, srcHeight);
    }
    else if (srcType == MB_Frame::MBFT_BGRA)
    {
        //#todo
        LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
        return false;
    } else
    {
        LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
        return false;
    }
    return true;
}
bool PL_ColorConv::pay(const PipeMaterial& pm)
{
    PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
    in->payError = true;
    if (pm.buffer == nullptr)
        return false;
    bool ret = false;
    in->lastPmType = pm.type;
    switch(pm.type)
    {
        case PipeMaterial::PMT_BYTES:
            LOG_ERROR << "PL_ColorConv unsupport type: PMT_BYTES" << std::endl;
            break;
        case PipeMaterial::PMT_FRAME:
        {
            MB_Frame* frame = (MB_Frame*)pm.buffer;
            switch(frame->type)
            {
                case MB_Frame::MBFT_NV12:
                    in->tempFrame = *frame;
                    ret = image_to_rgb565(in, frame->type);
                    break;
                default:
                    LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
                    return false;
            }
        }
            break;
        case PipeMaterial::PMT_PM_LIST:
        {
            // break pm list into single pm(s)
            MB_Frame* ppm = (MB_Frame*)pm.buffer;
            for (size_t i = 0; i < pm.buffSize; i++, ppm++)
            {
                if (ppm->type== PipeMaterial::PMT_FRAME)
                {
                    MB_Frame* frame = (MB_Frame*)ppm->buffer;
                    switch(frame->type)
                    {
                        case MB_Frame::MBFT_NV12:
                            in->tempFrame = *frame;
                            image_to_rgb565(in, frame->type);
                            break;
                        default:
                            LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
                            return false;
                    }
                }
            }
        }break;
        default:
            LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
            return false;
    }
    in->payError = !ret;
    return ret;
}
bool PL_ColorConv::gain(PipeMaterial& pm)
{
    PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
    PipeMaterial newPm;
    newPm.type = PipeMaterial::PMT_NONE;
    newPm.former = this;
    switch(in->lastPmType)
    {
        case PipeMaterial::PMT_FRAME:
        case PipeMaterial::PMT_PM_LIST:
            {
                newPm.type = PipeMaterial::PMT_FRAME;
                in->tempFrame.buffer = in->buffer;
                in->tempFrame.buffSize = in->buffSize;
                in->tempFrame.type = MB_Frame::MBFT_RGB565;
                newPm.buffer = &(in->tempFrame);
                newPm.buffSize = 0;
            }
            break;
        default:
            LOG_ERROR << "Only support PMT_FRAME and PMT_PM_LIST" << std::endl;
    }
    pm = newPm;
    return !in->payError;
}
RtspFace/PL_ColorConv.h
New file
@@ -0,0 +1,36 @@
#ifndef _PL_ColorConv_H_
#define _PL_ColorConv_H_
#include "PipeLine.h"
#include "MaterialBuffer.h"
struct PL_ColorConv_Config
{
    int filterMode; // libyuv/scale.h/FilterMode
    // Used only pm.type==PMT_BYTES
    int frameType;
    uint16_t srcWidth;
    uint16_t srcHeight;
    PL_ColorConv_Config() :filterMode(0),frameType(0), srcWidth(0), srcHeight(0)
    { }
};
class PL_ColorConv : public PipeLineElem
{
public:
    PL_ColorConv();
    virtual ~PL_ColorConv();
    virtual bool init(void* args);
    virtual void finit();
    virtual bool pay(const PipeMaterial& pm);
    virtual bool gain(PipeMaterial& pm);
private:
    void* internal;
};
PipeLineElem* create_PL_ColorConv();
#endif
RtspFace/PL_Paint.cpp
@@ -5,7 +5,6 @@
#include <string.h> // for memcpy
#include <opencv2/core/mat.hpp>
#include <opencv2/imgproc.hpp>
#ifdef ENABLE_WTEXT
#include "CvUtil/CvxText.h"
#endif
RtspFace/PL_Scale.cpp
@@ -3,6 +3,8 @@
#include "logger.h"
#include <libyuv.h>
#define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
struct PL_Scale_Internal
{
    uint8_t* buffer;
@@ -11,12 +13,12 @@
    bool payError;
    
    PipeMaterial::PipeMaterialBufferType lastPmType;
    MB_Frame lastFrame;
    MB_Frame tempFrame;
    PL_Scale_Config config;
    PL_Scale_Internal() : 
        buffer(nullptr), buffSize(0), buffSizeMax(0), payError(true), 
        lastPmType(PipeMaterial::PMT_NONE), lastFrame(), config()
        lastPmType(PipeMaterial::PMT_NONE), tempFrame(), config()
    {
    }
    
@@ -33,8 +35,8 @@
        
        lastPmType = PipeMaterial::PMT_NONE;
        
        MB_Frame _lastFrame;
        lastFrame = _lastFrame;
        MB_Frame _tempFrame;
        tempFrame = _tempFrame;
        
        PL_Scale_Config _config;
        config = _config;
@@ -92,7 +94,6 @@
bool image_scale(PL_Scale_Internal* in, 
    uint8_t* srcBuffer, MB_Frame::MBFType srcType, uint16_t srcWidth, uint16_t srcHeight)
{
#define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
    const int dst_width = in->config.toWidth;
    const int dst_height = in->config.toHeight;
@@ -213,7 +214,7 @@
        {
        case MB_Frame::MBFT_YUV420:
        case MB_Frame::MBFT_BGRA:
            in->lastFrame = *frame;
            in->tempFrame = *frame;
            ret = image_scale(in, (uint8_t*)frame->buffer, frame->type,
                frame->width, frame->height);
            break;
@@ -238,7 +239,7 @@
                        case MB_Frame::MBFT_YUV420:
                        case MB_Frame::MBFT_BGRA:
                        case MB_Frame::MBFT_NV12:
                            in->lastFrame = *frame;
                            in->tempFrame = *frame;
                            ret = image_scale(in, (uint8_t*)frame->buffer,frame->type,
                                              frame->width, frame->height);
                            break;
@@ -279,13 +280,13 @@
    case PipeMaterial::PMT_PM_LIST:
    {
        newPm.type = PipeMaterial::PMT_FRAME;
        newPm.buffer = &(in->lastFrame);
        newPm.buffer = &(in->tempFrame);
        newPm.buffSize = 0;
        in->lastFrame.buffer = in->buffer;
        in->lastFrame.buffSize = in->buffSize;
        in->lastFrame.width = in->config.toWidth;
        in->lastFrame.height = in->config.toHeight;
        in->tempFrame.buffer = in->buffer;
        in->tempFrame.buffSize = in->buffSize;
        in->tempFrame.width = in->config.toWidth;
        in->tempFrame.height = in->config.toHeight;
    }
    break;
    default:
RtspFace/PL_SensetimeFaceTrack.cpp
@@ -209,7 +209,7 @@
int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, 
                uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt)
{
    //PipeLineElemTimingDebugger td(nullptr);
    PipeLineElemTimingDebugger td(nullptr);
    if (in->config.doTrackPerFrame == 0)
        return 0;
VisitFace/RtspNativeCodec/app/src/main/cpp/CMakeLists.txt
@@ -14,28 +14,33 @@
            DebugNetwork.cpp
            CaptureCamera.cpp
            serial.c
            TeleWrapper.cpp
            #TeleWrapper.cpp
            "D:/workspace/proxy/RtspFace/PipeLine.cpp"
            "D:/workspace/proxy/RtspFace/Logger/src/logger.cc"
            "D:/workspace/proxy/RtspFace/MediaHelper.cpp"
            "D:/workspace/proxy/RtspFace/PL_RTSPClient.cpp"
            "D:/workspace/proxy/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp"
            "D:/workspace/proxy/RtspFace/PL_AndroidSurfaceViewRender.cpp"
            "D:/workspace/proxy/RtspFace/PL_SensetimeFaceTrack.cpp"
            "D:/workspace/proxy/RtspFace/PL_Gainer.cpp"
            "D:/workspace/proxy/RtspFace/PL_Paint.cpp"
            "D:/workspace/proxy/RtspFace/PL_V4L2Source.cpp"
            "D:/workspace/proxy/RtspFace/PL_BlockGrouping.cpp"
            "D:/workspace/proxy/RtspFace/PL_Queue.cpp"
            "D:/Documents/works/RtspFace/PL_Scale.cpp"
            "D:/Documents/works/RtspFace/PipeLine.cpp"
            "D:/Documents/works/RtspFace/Logger/src/logger.cc"
            "D:/Documents/works/RtspFace/MediaHelper.cpp"
            "D:/Documents/works/RtspFace/GraphicHelper.cpp"
            "D:/Documents/works/RtspFace/PL_RTSPClient.cpp"
            "D:/Documents/works/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp"
            "D:/Documents/works/RtspFace/PL_AndroidSurfaceViewRender.cpp"
            "D:/Documents/works/RtspFace/PL_SensetimeFaceTrack.cpp"
            "D:/Documents/works/RtspFace/PL_ColorConv.cpp"
            "D:/Documents/works/RtspFace/PL_Gainer.cpp"
            "D:/Documents/works/RtspFace/PL_Paint.cpp"
            "D:/Documents/works/RtspFace/PL_V4L2Source.cpp"
            "D:/Documents/works/RtspFace/PL_BlockGrouping.cpp"
            "D:/Documents/works/RtspFace/PL_Queue.cpp"
            "D:/workspace/proxy/FaceServer/proto_hton_ntoh.cpp"
            "D:/workspace/proxy/FaceServer/PbFaceList.pb.cc"
            "D:/Documents/works/FaceServer/proto_hton_ntoh.cpp"
            "D:/Documents/works/FaceServer/PbFaceList.pb.cc"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Capture.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Device.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp"
             "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Output.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Capture.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Device.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp"
             "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Output.cpp"
             "D:/Documents/works/RtspFace/CvUtil/CvxText.cpp"
            )
@@ -53,8 +58,8 @@
                      )
include_directories(
                    "D:/workspace/proxy/RtspFace"
                    "D:/workspace/proxy/FaceServer"
                    "D:/Documents/works/RtspFace"
                    "D:/Documents/works/FaceServer"
                    #"D:/workspace/libhardware-android-5.1.1_r38/include"
                    #"D:/workspace/core-android-5.1.1_r38/include"
@@ -62,39 +67,43 @@
                    #"D:/workspace/core-android-5.1.1_r38/libsync/include"
                    #"D:/workspace/native-android-5.1.1_r38/opengl/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/include"
                    "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/include"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/include"
                    "D:/workspace/proxy/RtspFace/libv4l2cpp/inc"
                    #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include"
                    "D:/Documents/works/RtspFace/libv4l2cpp/inc"
                    "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/include/freetype2"
                    #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include"
                    )
target_link_libraries(rtspface
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libliveMedia.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libgroupsock.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libBasicUsageEnvironment.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libUsageEnvironment.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libliveMedia.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libgroupsock.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libBasicUsageEnvironment.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libUsageEnvironment.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/armeabi-v7a/libyuv_static.a"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/lib/armeabi-v7a/libopencv_java3.so"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/armeabi-v7a/libprotobuf.so"
                      "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/lib/armeabi-v7a/libcvface_api.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/arm64-v8a/libyuv_static.a"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/lib/arm64-v8a/libopencv_java3.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/arm64-v8a/libprotobuf.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/lib/arm64-v8a/libcvface_api.so"
                      "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/lib/arm64-v8a/libfreetype.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/armeabi-v7a/libv4l2wrapper.a"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/arm64-v8a/libv4l2wrapper.a"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so"
                      #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so"
                      #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so"
                      )
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
@@ -1,345 +1,404 @@
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <MaterialBuffer.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
CameraWrapper::~CameraWrapper()
{
    stop();
    delete pipeLineDecoderDetector;
    delete pipeLineAnalyzer;
    delete pipeLineRender;
}
bool CameraWrapper::start()
{
    LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
    running = true;
    int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
        running = false;
        return false;
    }
    return true;
}
void CameraWrapper::stop()
{
    LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
    if (!running)
        return;
    running = false;
    pthread_join(decoder_thid, NULL);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
{
    return 0.0f;
}
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
{
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        RectWrapper rw;
        rw.rect = iter->rect;
        rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
        rw.userData = (void*)(iter->id);
        rects.push_back(rw);
    }
}
bool CameraWrapper::initPl()
{
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    bool ret = false;
    {
        PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
        ret = rtspClient->init(&rtspConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
        ret = amcDecoder->init(&amcdConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
            return  false;
        }
        //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
        //PL_V4L2Source_Config v4l2Config;
        //v4l2Config.width = 640;
        //v4l2Config.height = 480;
        //ret = v4l2Source->init(&v4l2Config);
        //if (!ret) {
        //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
        //    return false;
        //}
#ifdef USE_ST_SDK
        PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
        ret = sfTrack->init(&sftConfig);
        if (!ret) {
            LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
            return false;
        }
#endif
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
            return  false;
        }
        PL_Gainer* plBG = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
        ret = plBG->init(&plbgConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
            return  false;
        }
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
            return  false;
        }
        PL_Paint_Config plPaintCfg;
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
    return true;
}
static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
{
    //jni thread may not able to invoke java callback
    // double check it's all ok
    int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
    if (getEnvStat == JNI_EDETACHED)
    {
        //LOG_WARN << "GetEnv: not attached" << std::endl;
        if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
            LOG_WARN << "Failed to attach" << LOG_ENDL;
        else
            getEnvStat = JNI_OK;
    }
    else if (getEnvStat == JNI_OK)
    {
    }
    else if (getEnvStat == JNI_EVERSION)
        LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    else if (getEnvStat == JNI_ERR)
        LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
    if (cameraWrapper.javaEnv->ExceptionCheck())
        cameraWrapper.javaEnv->ExceptionDescribe();
    cameraWrapper.javaVM->DetachCurrentThread();
}
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    PLPLContext& plplContext(cameraWrapper.plplContext);
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
    plplContext.cmds.clear();
    plplContext.params.clear();
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
        plplContext.params.push_back('F');
        if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
        {
            if (iter->outOfFrame)
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
            else
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
        }
        else
        {
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back(iter->rect.leftTop.X);
        plplContext.params.push_back(iter->rect.leftTop.Y);
        plplContext.params.push_back(iter->rect.rightBottom.X);
        plplContext.params.push_back(iter->rect.rightBottom.Y);
        std::map<int, std::string>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        if (iterFaceLabel != cameraWrapper.faceLabels.end())
        {
            plplContext.cmds.push_back(PLPLC_TEXT);
            plplContext.params.push_back(iter->rect.leftTop.X);
            plplContext.params.push_back(iter->rect.leftTop.Y);
            const char* label = iterFaceLabel->second.c_str();
            plplContext.params.push_back(PLPLType(label));
        }
    }
    return false;
}
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
void test_paint(CameraWrapper& cameraWrapper)
{
    cameraWrapper.setFaceLabel(0, "vip");
    cameraWrapper.setFaceLabel(1, "abc");
    cameraWrapper.setFaceLabel(2, "wrn");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
    cameraWrapper.plplContext.params.push_back('F');
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(0);
    cameraWrapper.plplContext.params.push_back(0);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
    cameraWrapper.plplContext.params.push_back(20);
    cameraWrapper.plplContext.params.push_back(20);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
}
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
{
    LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
    while(cameraWrapper.running)
    {
        PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
        bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
        LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
        if (!ret)
            continue;
        PipeMaterial pm;
        ret = last->gain(pm);
        if (!ret)
            continue;
        if (! cameraWrapper.faceCacheLocked)
        {
            PipeMaterial pmAnalizer(pm);
            PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
            bool ret = last->gain(pmAnalizer);
            if (ret)
                pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
        }
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
        //#debug
        //test_paint(cameraWrapper);
        cameraWrapper.pipeLineRender->pipe(&pm);
    }
    LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
}
void CameraWrapper::lockFace()
{
    faceCacheLocked = true;
}
void CameraWrapper::releaseFace()
{
    faceCacheLocked = false;
}
void CameraWrapper::setFaceLabel(int st_track_id, const std::string& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
#include "CameraWrapper.h"
#include <logger.h>
#include <Logger/src/logger.hpp>
#include <PL_Gainer.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <MaterialBuffer.h>
#include <PL_V4L2Source.h>
#include <PL_BlockGrouping.h>
#include <PL_ColorConv.h>
CameraWrapper::~CameraWrapper()
{
    stop();
    delete pipeLineDecoderDetector;
    delete pipeLineAnalyzer;
    delete pipeLineRender;
}
bool CameraWrapper::start()
{
    LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
    running = true;
    int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
    if(ret != 0)
    {
        LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
        running = false;
        return false;
    }
    return true;
}
void CameraWrapper::stop()
{
    LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
    if (!running)
        return;
    running = false;
    pthread_join(decoder_thid, NULL);
}
static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
{
    return 0.0f;
}
static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
{
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        RectWrapper rw;
        rw.rect = iter->rect;
        rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
        rw.userData = (void*)(iter->id);
        rects.push_back(rw);
    }
}
bool CameraWrapper::initPl()
{
    PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
    PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
    PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
    PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
    PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
    PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
    PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
    PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
    PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
    bool ret = false;
    {
        PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
        ret = rtspClient->init(&rtspConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
        ret = amcDecoder->init(&amcdConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
            return  false;
        }
        //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
        //PL_V4L2Source_Config v4l2Config;
        //v4l2Config.width = 640;
        //v4l2Config.height = 480;
        //ret = v4l2Source->init(&v4l2Config);
        //if (!ret) {
        //    LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
        //    return false;
        //}
#ifdef USE_ST_SDK
        PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
        ret = sfTrack->init(&sftConfig);
        if (!ret) {
            LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
            return false;
        }
#endif
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
            return  false;
        }
        PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
        PL_BlockGrouping_Config plbgConfig;
        plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
        plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
        ret = plBG->init(&plbgConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
            return  false;
        }
    }
    {
        PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
        ret = plGainer->init(nullptr);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
            return  false;
        }
        ANativeWindow* window = (ANativeWindow*)(windowRender);
        ANativeWindow_Buffer buffer;
        if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0)
        {
            plScaleCfg.toHeight=buffer.height;
            plScaleCfg.toWidth=buffer.width;
            ANativeWindow_unlockAndPost(window);
        } else
        {
            plScaleCfg.toHeight=480;
            plScaleCfg.toWidth=640;
        }
        PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
        ret = plScale->init(&plScaleCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
            return  false;
        }
        PL_ColorConv_Config PLColorConvCfg;
        PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
        ret = plColorConv->init(&PLColorConvCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_Paint_Config plPaintCfg;
        plPaintCfg.fontPath = "data/msyh.ttc";
        plPaintCfg.plplCtx = &plplContext;
        PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
        ret = plPaint->init(&plPaintCfg);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
            return  false;
        }
        PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
        ret = asvRender->init(&asvrConfig);
        if (!ret)
        {
            LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
            return  false;
        }
    }
    return true;
}
static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
{
    //jni thread may not able to invoke java callback
    // double check it's all ok
    int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
    if (getEnvStat == JNI_EDETACHED)
    {
        //LOG_WARN << "GetEnv: not attached" << std::endl;
        if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
            LOG_WARN << "Failed to attach" << LOG_ENDL;
        else
            getEnvStat = JNI_OK;
    }
    else if (getEnvStat == JNI_OK)
    {
    }
    else if (getEnvStat == JNI_EVERSION)
        LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    else if (getEnvStat == JNI_ERR)
        LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
    if (cameraWrapper.javaEnv->ExceptionCheck())
        cameraWrapper.javaEnv->ExceptionDescribe();
    cameraWrapper.javaVM->DetachCurrentThread();
}
bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    PLPLContext& plplContext(cameraWrapper.plplContext);
    const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
    plplContext.cmds.clear();
    plplContext.params.clear();
    float width_scale =((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
    float height_scale =((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
    for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
    {
        plplContext.cmds.push_back(PLPLC_COLOR);
        plplContext.params.push_back('F');
        if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
        {
            if (iter->outOfFrame)
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
            else
            {
                plplContext.params.push_back(255);
                plplContext.params.push_back(0);
                plplContext.params.push_back(0);
                plplContext.params.push_back(255);
            }
        }
        else
        {
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
            plplContext.params.push_back(0);
            plplContext.params.push_back(255);
        }
        plplContext.cmds.push_back(PLPLC_RECT);
        plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
        plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
        std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
        if (iterFaceLabel != cameraWrapper.faceLabels.end())
        {
            plplContext.cmds.push_back(PLPLC_WTEXT);
            plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
            plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
            const wchar_t* label = iterFaceLabel->second.c_str();
            plplContext.params.push_back(PLPLType(label));
        }
    }
    return false;
}
bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
{
    CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
    if (cameraWrapper.faceCacheLocked)
        return false;
    int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
    if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
        return false;
    cameraWrapper.faceCache.cachePm(*pm);
    invokeCallback(cameraWrapper, faceCount);
    return false;
}
void test_paint(CameraWrapper& cameraWrapper)
{
    cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(100);
    cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
    cameraWrapper.setFaceLabel(0, L"会员vi");
    cameraWrapper.setFaceLabel(1, L"会员ab");
    cameraWrapper.setFaceLabel(3, L"会员wr");
    cameraWrapper.setFaceLabel(4, L"会员wr");
    cameraWrapper.setFaceLabel(5, L"会员wn");
    cameraWrapper.setFaceLabel(6, L"会员wr");
    cameraWrapper.setFaceLabel(7, L"会员wn");
    cameraWrapper.setFaceLabel(8, L"会员wr");
    cameraWrapper.setFaceLabel(9, L"会员wr");
    cameraWrapper.setFaceLabel(10, L"会员wn");
    cameraWrapper.setFaceLabel(11, L"会员wr");
    cameraWrapper.setFaceLabel(12, L"会员wr");
    cameraWrapper.setFaceLabel(13, L"会员wr");
    cameraWrapper.setFaceLabel(14, L"会员wr");
    cameraWrapper.setFaceLabel(15, L"会员wr");
    cameraWrapper.setFaceLabel(16, L"会员wn");
    cameraWrapper.setFaceLabel(17, L"会员wr");
    cameraWrapper.setFaceLabel(18, L"会员wr");
    cameraWrapper.setFaceLabel(19, L"会员wr");
    cameraWrapper.setFaceLabel(20, L"会员wr");
    cameraWrapper.setFaceLabel(21, L"会员wr");
    cameraWrapper.setFaceLabel(22, L"会员wr");
    cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
    cameraWrapper.plplContext.params.push_back('F');
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
    cameraWrapper.plplContext.params.push_back(255);
//    cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
//    cameraWrapper.plplContext.params.push_back(20);
//    cameraWrapper.plplContext.params.push_back(20);
//    cameraWrapper.plplContext.params.push_back(100);
//    cameraWrapper.plplContext.params.push_back(100);
}
/*static*/ void* CameraWrapper::decoder_thd(void* arg)
{
    LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
    CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
    while(cameraWrapper.running)
    {
        PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
        bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
        LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
        if (!ret)
            continue;
        PipeMaterial pm;
        ret = last->gain(pm);
        if (!ret)
            continue;
        if (! cameraWrapper.faceCacheLocked)
        {
            PipeMaterial pmAnalizer(pm);
            PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
            bool ret = last->gain(pmAnalizer);
            if (ret)
                pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
        }
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
        //#debug
        //test_paint(cameraWrapper);
        cameraWrapper.pipeLineRender->pipe(&pm);
    }
    LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
}
void CameraWrapper::lockFace()
{
    faceCacheLocked = true;
}
void CameraWrapper::releaseFace()
{
    faceCacheLocked = false;
}
void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
{
    if (faceLabels.size() > 32)
        faceLabels.clear();
    faceLabels.insert(std::make_pair(st_track_id, label));
}
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.h
@@ -12,6 +12,7 @@
#include <PL_AndroidSurfaceViewRender.h>
#include <PL_SensetimeFaceTrack.h>
#include <PL_Paint.h>
#include <PL_Scale.h>
#include <PL_Queue.h>
//#include "looper.h"
@@ -34,6 +35,7 @@
    PL_AndroidMediaCodecDecoder_Config amcdConfig;
    PL_AndroidSurfaceViewRender_Config asvrConfig;
    SensetimeFaceTrackConfig sftConfig;
    PL_Scale_Config plScaleCfg;
    int cameraIdx;
    JavaVM* javaVM;
@@ -53,7 +55,7 @@
    PLPLContext plplContext;
    std::map<int, std::string> faceLabels;
    std::map<int, std::wstring> faceLabels;
    CameraWrapper() : 
        pipeLineDecoderDetector(nullptr), pipeLineAnalyzer(nullptr), pipeLineRender(nullptr), queueFrame(nullptr), rtspConfig(), amcdConfig(), asvrConfig(), sftConfig(),
@@ -73,7 +75,7 @@
    void lockFace();
    void releaseFace();
    void setFaceLabel(int st_track_id, const std::string& label);
    void setFaceLabel(int st_track_id, const std::wstring& label);
private:
    static void* decoder_thd(void *arg);
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp
@@ -17,7 +17,6 @@
#include <stdlib.h>
#include "DebugNetwork.h"
#include "TeleWrapper.h"
//#include <mediastreamer2/include/mediastreamer2/msjava.h>
@@ -30,8 +29,6 @@
static std::string g_stface_license_str;
CameraWrapper g_CameraWrappers[CAMERA_COUNT];
TeleWrapper g_TeleWrapper;
CaptureCamera g_CaptureCamera;
@@ -55,7 +52,6 @@
        g_CameraWrappers[i].pipeLineRender = new PipeLine;
        //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector));
    }
    g_TeleWrapper.start();
}
void
@@ -741,48 +737,29 @@
bool Java_cn_com_basic_face_util_RtspFaceNative_telCall(JNIEnv *env, jclass clazz, jstring phone)
{
    std::string _phone;
    {
        const char *utfFunc = env->GetStringUTFChars(phone, NULL);
        _phone = utfFunc;
        env->ReleaseStringUTFChars(phone, utfFunc);
    }
    const char *phoneNum = _phone.c_str();
      TeleTask  task;
      task.command= TeleTask::CALL;
      task.param = phoneNum;
      g_TeleWrapper.push(task);
}
void Java_cn_com_basic_face_util_RtspFaceNative_Hang(JNIEnv *env, jclass clazz)
{
     TeleTask  task;
    task.command =TeleTask::HANGUP;
    g_TeleWrapper.push(task);
}
void Java_cn_com_basic_face_util_RtspFaceNative_TelShutdown(JNIEnv *env, jclass clazz)
{
   LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL;
   g_TeleWrapper.stop();
}
void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring phoneNumber)
void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label)
{
    LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel" << LOG_ENDL;
    assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT);
    cameraIdx -= 1;
    CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]);
    std::string _phoneNumber;
    {
        const char *utfFunc = env->GetStringUTFChars(phoneNumber, NULL);
        _phoneNumber = utfFunc;
        env->ReleaseStringUTFChars(phoneNumber, utfFunc);
    }
    cameraWrapper.setFaceLabel(stTrackId, _phoneNumber);
    const char *utfChars = env->GetStringUTFChars(label, NULL);
    std::string labelstr(utfChars);
    std::wstring wlabelstr;
    wlabelstr.assign(labelstr.begin(),labelstr.end());
    env->ReleaseStringUTFChars(label, utfChars);
    cameraWrapper.setFaceLabel(stTrackId, wlabelstr);
}
} // extern C
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.h
@@ -61,7 +61,7 @@
bool Java_cn_com_basic_face_util_RtspFaceNative_teleCall(JNIEnv *env, jclass clazz, jstring phoneNumber);
void Java_cn_com_basic_face_util_RtspFaceNative_teleHang(JNIEnv *env, jclass clazz);
void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring phoneNumber);
void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label);
}
VisitFace/RtspNativeCodec/app/src/main/java/com/example/nativecodec/NativeCodec.java
@@ -86,7 +86,7 @@
        ThisActivity = this;
        RtspFaceNative.init();
        RtspFaceNative.setLocalIP("192.168.1.74");
        RtspFaceNative.setLocalIP("192.168.1.37");
        mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
@@ -282,9 +282,9 @@
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin@192.168.1.188:554/cam/realmonitor?channel=1&subtype=2");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.70/h264");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.70/h264_2");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.22/h264");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.22/h264");
                        //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/sub/av_stream");
                        mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream");
                        mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.132:554/h264/ch1/main/av_stream");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream");
                        //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream");
                        //mCreated = createPlayer(1, "rtsp://192.168.1.56:8554");