houxiao
2017-08-01 ab5f950eb26752a7a26ea746dd22a41a00b1074a
cleaan up

git-svn-id: http://192.168.1.226/svn/proxy@957 454eff88-639b-444f-9e54-f578c98de674
4个文件已修改
1238 ■■■■ 已修改文件
RtspFace/PL_BlockGrouping.cpp 294 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_BlockGrouping.h 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp 856 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_SensetimeFaceTrackMultiTrd.h 86 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
RtspFace/PL_BlockGrouping.cpp
@@ -20,30 +20,30 @@
    MBFrameWrapper(bool _copyData, const MB_Frame _frame) :
        frame(_frame), frameRefCount(_copyData ? 0 : -1)
    {
        if (_copyData)
        {
            uint8_t* newBuffer = new uint8_t[_frame.buffSize];
            frame.buffer = newBuffer;
            memcpy(newBuffer, _frame.buffer, _frame.buffSize);
        }
    }
        if (_copyData)
        {
            uint8_t* newBuffer = new uint8_t[_frame.buffSize];
            frame.buffer = newBuffer;
            memcpy(newBuffer, _frame.buffer, _frame.buffSize);
        }
    }
   void reference()
   {
       if (frameRefCount >= 0)
           frameRefCount++;
       else
           frameRefCount--;
       if (frameRefCount >= 0)
           frameRefCount++;
       else
           frameRefCount--;
   }
    void release()
    {
        if (frameRefCount == 0)
        {
            delete[] (uint8_t*)frame.buffer;
            frame.reset();
        }
        else if (frameRefCount == -1)
    void release()
    {
        if (frameRefCount == 0)
        {
            delete[] (uint8_t*)frame.buffer;
            frame.reset();
        }
        else if (frameRefCount == -1)
        {
            frame.reset();
        }
@@ -56,7 +56,7 @@
        {
            frameRefCount--;
        }
    }
    }
    
    ~MBFrameWrapper()
    {
@@ -68,7 +68,7 @@
{
    const uint16_t blockID;
    RectWrapper rectWrapper;
    RectWrapper rectWrapper;
    MBFrameWrapper* mbfWrapper;
    uint8_t* croppedData;
    size_t croppedDataSize;
@@ -81,48 +81,48 @@
        if (croppedData != nullptr || mbfWrapper == nullptr)
            return;
        PLGH_Rect& rect(rectWrapper.rect);
        PLGH_Rect& rect(rectWrapper.rect);
        if (mbfWrapper->frame.type == MB_Frame::MBFT_YUV420)
        {
            int src_width = mbfWrapper->frame.width;
            int src_height = mbfWrapper->frame.height;
            uint8_t* src_y = (uint8_t*)(mbfWrapper->frame.buffer);
            uint8_t* src_u = (uint8_t*)(src_y + (src_height * src_width));
            uint8_t* src_v = (uint8_t*)(src_u + (src_height * src_width / 4));
            int src_width = mbfWrapper->frame.width;
            int src_height = mbfWrapper->frame.height;
            uint8_t* src_y = (uint8_t*)(mbfWrapper->frame.buffer);
            uint8_t* src_u = (uint8_t*)(src_y + (src_height * src_width));
            uint8_t* src_v = (uint8_t*)(src_u + (src_height * src_width / 4));
            cv::Mat matY(cv::Size(src_width, src_height), CV_8UC1, src_y);
            cv::Mat roiMatY(matY, cv::Rect(rect.leftTop.X, rect.leftTop.Y, rect.width(), rect.height()));
            cv::Mat cloneRoiMatY(roiMatY.clone());
            cv::Mat matY(cv::Size(src_width, src_height), CV_8UC1, src_y);
            cv::Mat roiMatY(matY, cv::Rect(rect.leftTop.X, rect.leftTop.Y, rect.width(), rect.height()));
            cv::Mat cloneRoiMatY(roiMatY.clone());
            cv::Mat matU(cv::Size(MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2)), CV_8UC1, src_u);
            cv::Mat roiMatU(matU, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            cv::Mat cloneRoiMatU(roiMatU.clone());
            cv::Mat matU(cv::Size(MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2)), CV_8UC1, src_u);
            cv::Mat roiMatU(matU, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            cv::Mat cloneRoiMatU(roiMatU.clone());
            cv::Mat matV(cv::Size(MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2)), CV_8UC1, src_v);
            cv::Mat roiMatV(matV, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            cv::Mat cloneRoiMatV(roiMatV.clone());
            cv::Mat matV(cv::Size(MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2)), CV_8UC1, src_v);
            cv::Mat roiMatV(matV, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            cv::Mat cloneRoiMatV(roiMatV.clone());
            const size_t yPlanarSize = rect.width() * rect.height();
            croppedDataSize = yPlanarSize * 1.5;
            croppedData = new uint8_t[croppedDataSize];
            const size_t yPlanarSize = rect.width() * rect.height();
            croppedDataSize = yPlanarSize * 1.5;
            croppedData = new uint8_t[croppedDataSize];
            uint8_t* dst_y = (uint8_t*)croppedData;
            uint8_t* dst_u = (uint8_t*)(dst_y + yPlanarSize);
            uint8_t* dst_v = (uint8_t*)(dst_u + yPlanarSize / 4);
            uint8_t* dst_y = (uint8_t*)croppedData;
            uint8_t* dst_u = (uint8_t*)(dst_y + yPlanarSize);
            uint8_t* dst_v = (uint8_t*)(dst_u + yPlanarSize / 4);
            memcpy(dst_y, cloneRoiMatY.ptr(), yPlanarSize);
            memcpy(dst_u, cloneRoiMatU.ptr(), yPlanarSize / 4);
            memcpy(dst_v, cloneRoiMatV.ptr(), yPlanarSize / 4);
            memcpy(dst_y, cloneRoiMatY.ptr(), yPlanarSize);
            memcpy(dst_u, cloneRoiMatU.ptr(), yPlanarSize / 4);
            memcpy(dst_v, cloneRoiMatV.ptr(), yPlanarSize / 4);
            //{
            //    static size_t f = 0;
            //    char fname[50];
            //    sprintf(fname, "/sdcard/face-%u-w%u-h%u.yuv420", ++f, rect.width(), rect.height());
            //    FILE *pFile = fopen(fname, "wb");
            //    fwrite(croppedData, 1, croppedDataSize, pFile);
            //    fclose(pFile);
            //    if (f > 10)exit(0);
            //}
            //{
            //    static size_t f = 0;
            //    char fname[50];
            //    sprintf(fname, "/sdcard/face-%u-w%u-h%u.yuv420", ++f, rect.width(), rect.height());
            //    FILE *pFile = fopen(fname, "wb");
            //    fwrite(croppedData, 1, croppedDataSize, pFile);
            //    fclose(pFile);
            //    if (f > 10)exit(0);
            //}
        }
        else if (mbfWrapper->frame.type == MB_Frame::MBFT_NV12)
        {
@@ -136,8 +136,8 @@
            cv::Mat cloneRoiMatY(roiMatY.clone());
            
            cv::Mat matUV(cv::Size(MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2)), CV_16UC1, src_uv);
            cv::Mat roiMatUV(matUV, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            //cv::Mat roiMatUV(matUV, cv::Rect(rect.leftTop.X, rect.leftTop.Y, rect.width(), rect.height()));
            cv::Mat roiMatUV(matUV, cv::Rect(MH_SUBSAMPLE1(rect.leftTop.X, 2), MH_SUBSAMPLE1(rect.leftTop.Y, 2), MH_SUBSAMPLE1(rect.width(), 2), MH_SUBSAMPLE1(rect.height(), 2)));
            //cv::Mat roiMatUV(matUV, cv::Rect(rect.leftTop.X, rect.leftTop.Y, rect.width(), rect.height()));
            cv::Mat cloneRoiMatUV(roiMatUV.clone());
            
            const size_t yPlanarSize = rect.width() * rect.height();
@@ -161,10 +161,10 @@
    }
    
    void release_crop()
    {
        delete[] (uint8_t*)croppedData;
        croppedDataSize = 0;
    }
    {
        delete[] (uint8_t*)croppedData;
        croppedDataSize = 0;
    }
    
    ~PLBG_Block()
    {
@@ -181,8 +181,8 @@
    const uint16_t groupID;
    
    std::vector<PLBG_Block> blocks; // sequence of paid blocks
    int paid_rect_interval;
    int paid_frame_count;
    int paid_rect_interval;
    int paid_frame_count;
    int _current_frame_accepted_blocks;
    
    PLBG_Group() : groupID(++_groupID), blocks(), paid_rect_interval(0), paid_frame_count(0), _current_frame_accepted_blocks(0)
@@ -276,7 +276,7 @@
bool plbg_pay_breaker(const PipeMaterial* pm, void* args)
{
    *(PipeMaterial**)args = const_cast<PipeMaterial*>(pm);
    *(PipeMaterial**)args = const_cast<PipeMaterial*>(pm);
    return false;
}
@@ -310,7 +310,7 @@
{
    PL_BlockGrouping_Internal* in = (PL_BlockGrouping_Internal*)internal;
    //#todo
    //#todo
    //if (in->mbfs.size() + 1 >= PLBG_MAX_CACHE_FRAMES)
    //{
    //    LOG_WARN << "cached frame exceed " << PLBG_MAX_CACHE_FRAMES << LOG_ENDL;
@@ -318,35 +318,35 @@
    //}
    PipeMaterial pmPtr;
    {
        PipeMaterial* _pmPtr = nullptr;
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, plbg_pay_breaker, &_pmPtr);
        if (_pmPtr == nullptr)
        {
            LOG_WARN << "PMT_PM_LIST need PMT_PTR" << LOG_ENDL;
            return false;
        }
        pmPtr = *_pmPtr;
    }
    PipeMaterial pmPtr;
    {
        PipeMaterial* _pmPtr = nullptr;
        pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, plbg_pay_breaker, &_pmPtr);
        if (_pmPtr == nullptr)
        {
            LOG_WARN << "PMT_PM_LIST need PMT_PTR" << LOG_ENDL;
            return false;
        }
        pmPtr = *_pmPtr;
    }
    MB_Frame mbfFrame;
    {
        PipeMaterial* _pmFrame = nullptr;
    MB_Frame mbfFrame;
    {
        PipeMaterial* _pmFrame = nullptr;
        if (_pmFrame == nullptr)
            pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, plbg_pay_breaker, &_pmFrame);
        if (_pmFrame == nullptr)
            pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, plbg_pay_breaker, &_pmFrame);
        if (_pmFrame == nullptr)
            pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, plbg_pay_breaker, &_pmFrame);
        if (_pmFrame == nullptr)
            pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, plbg_pay_breaker, &_pmFrame);
        if (_pmFrame == nullptr)
        {
            LOG_WARN << "PMT_PM_LIST need PMT_FRAME of (MBFT_YUV420 or MBFT_NV12)" << LOG_ENDL;
            return false;
        }
        if (_pmFrame == nullptr)
        {
            LOG_WARN << "PMT_PM_LIST need PMT_FRAME of (MBFT_YUV420 or MBFT_NV12)" << LOG_ENDL;
            return false;
        }
        mbfFrame = *(MB_Frame*)_pmFrame->buffer;
    }
        mbfFrame = *(MB_Frame*)_pmFrame->buffer;
    }
    // cache frame
    MBFrameWrapper* mbfWrapper = new MBFrameWrapper(in->config.copyData, mbfFrame);
@@ -359,40 +359,40 @@
    for (std::list<PLBG_Group>::iterator iterGrp = in->groups.begin(); iterGrp != in->groups.end(); ++iterGrp)
    {
        PLBG_Group& currGroup(*iterGrp);
        currGroup.paid_rect_interval++;
        currGroup.paid_frame_count++;
        currGroup.paid_rect_interval++;
        currGroup.paid_frame_count++;
        for (std::list<RectWrapper>::iterator iterRectw = rectws.begin(); iterRectw != rectws.end(); )
        {
            const RectWrapper& currRectw(*iterRectw);
            if (!test_group_accept_rect(in, currGroup, currRectw.rect))
            {
                ++iterRectw;
                continue;
            }
            {
                ++iterRectw;
                continue;
            }
            
            // accept this block into group
            PLBG_Block currBlock;
            currBlock.rectWrapper = currRectw;
            currBlock.mbfWrapper = mbfWrapper;
            mbfWrapper->reference();
            mbfWrapper->reference();
            currBlock.crop_data();
            currGroup.blocks.push_back(currBlock);
            currGroup.paid_rect_interval = 0;
            currGroup._current_frame_accepted_blocks++;
            currGroup.paid_rect_interval = 0;
            currGroup._current_frame_accepted_blocks++;
            iterRectw = rectws.erase(iterRectw);
            iterRectw = rectws.erase(iterRectw);
        }
    }
    //#todo
    //#todo
    // we test remain groups asume remain rectws
    //if (in->groups.size() > PLBG_MAX_GROUPS - rectws.size())
    //{
    //    if (mbfWrapper->frameRefCount == 0)
    //    {
    //        in->mbfs.erase(mbfWrapper);
    //        in->mbfs.erase(mbfWrapper);
    //        delete mbfWrapper;
    //        mbfWrapper = nullptr;
    //    }
@@ -409,14 +409,14 @@
        PLBG_Block currBlock;
        currBlock.rectWrapper = *rectws.begin();
        currBlock.mbfWrapper = mbfWrapper;
        mbfWrapper->reference();
        mbfWrapper->reference();
        currBlock.crop_data();
        rectws.erase(rectws.begin());
        
        PLBG_Group currGroup;
        currGroup.blocks.push_back(currBlock);
        currGroup.paid_rect_interval = 0;
        currGroup._current_frame_accepted_blocks++;
        currGroup.paid_rect_interval = 0;
        currGroup._current_frame_accepted_blocks++;
        iterNewGrp = in->groups.insert(in->groups.end(), currGroup);
    }
    
@@ -435,7 +435,7 @@
        PLBG_Block currBlock;
        currBlock.rectWrapper = *iterRectw;
        currBlock.mbfWrapper = mbfWrapper;
        mbfWrapper->reference();
        mbfWrapper->reference();
        currBlock.crop_data();
        if (iterGrp == in->groups.end())
        {
@@ -452,37 +452,37 @@
    for (std::list<PLBG_Group>::iterator iterGrp = in->groups.begin(); iterGrp != in->groups.end(); ++iterGrp)
        iterGrp->_current_frame_accepted_blocks = 0;
    if (mbfWrapper->frameRefCount != -1 && mbfWrapper->frameRefCount != 0)
        in->mbfws.insert(mbfWrapper);
    else
        delete mbfWrapper;
    if (mbfWrapper->frameRefCount != -1 && mbfWrapper->frameRefCount != 0)
        in->mbfws.insert(mbfWrapper);
    else
        delete mbfWrapper;
    return true;
}
float scoring_canny_focus(const PLBG_Block& block)
{
    const int w = block.rectWrapper.rect.width();
    const int h = block.rectWrapper.rect.height();
    if (block.croppedData == nullptr || block.croppedDataSize < w * h)
        return 0.0f;
    const int w = block.rectWrapper.rect.width();
    const int h = block.rectWrapper.rect.height();
    if (block.croppedData == nullptr || block.croppedDataSize < w * h)
        return 0.0f;
    cv::Mat yMat(cv::Size(w, h), CV_8UC1, block.croppedData);
    cv::Mat edges;
    cv::Mat yMat(cv::Size(w, h), CV_8UC1, block.croppedData);
    cv::Mat edges;
    cv::GaussianBlur(yMat, edges, cv::Size(5, 5), 1.5, 1.5);
    //cv::Canny(edges, edges, 50, 100, 3);
    cv::Sobel(yMat, edges, CV_8UC1, 1, 0, 3);
    cv::Canny(edges, edges, 50, 100, 3);
    //cv::Sobel(yMat, edges, CV_8UC1, 1, 0, 3);
    //memcpy(block.croppedData, edges.data, w * h);
    size_t sum = 0;
    std::for_each(edges.begin<uint8_t>(), edges.end<uint8_t>(), [&](uint8_t v)
    {
        sum += (v != 0);
    });
    size_t sum = 0;
    std::for_each(edges.begin<uint8_t>(), edges.end<uint8_t>(), [&](uint8_t v)
    {
        sum += (v != 0);
    });
    float focusRate = (float)sum / (w * h);
    return focusRate;
    float focusRate = (float)sum / (w * h);
    return focusRate;
}
float scoring_histogram_focus(const PLBG_Block& block)
@@ -501,23 +501,23 @@
        {
            iterBlk->mbfWrapper->release();
            iterBlk->release_crop();
            if (iterBlk->rectWrapper.user_data_deleter != nullptr)
                iterBlk->rectWrapper.user_data_deleter(iterBlk->rectWrapper);
        }
            if (iterBlk->rectWrapper.user_data_deleter != nullptr)
                iterBlk->rectWrapper.user_data_deleter(iterBlk->rectWrapper);
        }
    }
    
    in->groupsMature.clear();
    
    for (std::set<MBFrameWrapper*>::iterator iter = in->mbfws.begin(); iter != in->mbfws.end(); )
    {
        MBFrameWrapper* mbfw = *iter;
        MBFrameWrapper* mbfw = *iter;
        if (mbfw->frameRefCount == 0 || mbfw->frameRefCount == -1)
        {
            delete mbfw;
            iter = in->mbfws.erase(iter);
        }
        else
            ++iter;
        {
            delete mbfw;
            iter = in->mbfws.erase(iter);
        }
        else
            ++iter;
    }
    
    in->outputs.clear();
@@ -526,7 +526,7 @@
    for (std::list<PLBG_Group>::iterator iterGrp = in->groups.begin(); iterGrp != in->groups.end(); )
    {
        bool grpMature = false;
        PLBG_Group& currGroup(*iterGrp);
        PLBG_Group& currGroup(*iterGrp);
        if (currGroup.blocks.size() >= in->config.group_size_max)
            grpMature = true;
@@ -539,7 +539,7 @@
            //drop group
            for (std::vector<PLBG_Block>::iterator iterBlk = currGroup.blocks.begin(); iterBlk != currGroup.blocks.end(); ++iterBlk)
            {
                iterBlk->mbfWrapper->release();
                iterBlk->mbfWrapper->release();
                iterBlk->release_crop();
            }
            
@@ -552,16 +552,16 @@
            // generate output group
            in->groupsMature.push_back(currGroup);
            iterGrp = in->groups.erase(iterGrp);
            continue;
            continue;
        }
        ++iterGrp;
        ++iterGrp;
    }
    
    // scoring blocks
    const bool calc_canny = !MH_F_ZEQ(in->config.canny_focus);
    const bool calc_histogram = !MH_F_ZEQ(in->config.histogram_uniformy);
    const bool calc_user_score_1 = !MH_F_ZEQ(in->config.user_score_1_rate);
    const bool calc_histogram = !MH_F_ZEQ(in->config.histogram_uniformy);
    const bool calc_user_score_1 = !MH_F_ZEQ(in->config.user_score_1_rate);
    const float user_score_2_rate = 1.0f - in->config.canny_focus - in->config.histogram_uniformy - in->config.user_score_1_rate;
    const bool calc_user_score_2 = (!MH_F_ZEQ(user_score_2_rate) && in->config.user_score_2_func != nullptr);
    
@@ -576,18 +576,18 @@
            
            if (calc_canny)
            {
                //float cannyScore = scoring_canny_focus(*iterBlk) * 10.0f;
                //totalScore += in->config.canny_focus * cannyScore;
                float cannyScore = scoring_canny_focus(*iterBlk) * 10.0f;
                totalScore += in->config.canny_focus * cannyScore;
            }
            if (calc_histogram)
            {
                float histScore = scoring_histogram_focus(*iterBlk);
                totalScore += in->config.histogram_uniformy * histScore;
            }
            if (calc_user_score_1)
            {
                totalScore += in->config.user_score_1_rate * iterBlk->rectWrapper.user_score_1;
            }
            if (calc_user_score_1)
            {
                totalScore += in->config.user_score_1_rate * iterBlk->rectWrapper.user_score_1;
            }
            if (calc_user_score_2)
            {
                float userScore2 = in->config.user_score_2_func(&(iterBlk->mbfWrapper->frame), iterBlk->rectWrapper.rect, iterBlk->croppedData);
@@ -610,7 +610,7 @@
        output.rectInOriginFrame = iterBestBlk->rectWrapper;
        output.originframe = &(iterBestBlk->mbfWrapper->frame);
        output.score = bestScore;
        output.croppedData = iterBestBlk->croppedData;
        output.croppedData = iterBestBlk->croppedData;
        output.croppedDataSize = iterBestBlk->croppedDataSize;
        in->outputs.push_back(output);
RtspFace/PL_BlockGrouping.h
@@ -112,7 +112,7 @@
        get_rect_func(nullptr),
        group_size_min(1 + precfg_interval), group_size_max(2 * (precfg_fps * precfg_delay) / precfg_interval), frame_count_max(precfg_fps * precfg_delay), group_top_n(1),
        block_center_x_diff(100), block_center_y_diff(100),
        block_area_diff(20000), continuity_max(4.0 * precfg_interval), accept_blocks_max_per_frame(1), user_test_group_accept_rect(),
        block_area_diff(20000), continuity_max(8.0 * precfg_interval), accept_blocks_max_per_frame(1), user_test_group_accept_rect(),
        canny_focus(0.3), user_score_1_rate(0.4), histogram_uniformy(0.0), user_score_2_func(nullptr)
    { }
};
RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp
@@ -4,477 +4,529 @@
#include "MediaHelper.h"
#ifdef USE_OPENCV
#include <opencv2/opencv.hpp>
#endif
#include <cv_face.h>
class SensetimeFaceTrackThread {
class SensetimeFaceTrackThread
{
private:
    pthread_t track_thid;
    pthread_mutex_t thd_mutex;
    pthread_mutex_t res_mutex;
    mutable volatile bool thread_running;
    mutable volatile bool buffer_updated;
    mutable volatile bool res_updated;
    mutable volatile bool is_busy;
    unsigned char *image;
    cv_pixel_format pixel_format;
    int image_width;
    int image_height;
    int image_stride;
    int buffer_size;
    cv_face_orientation orientation;
    cv_face_t *p_faces;
    int faces_count;
    cv_result_t track_result;
    SensetimeFaceTrackConfig config;
    st_ff_vect_t faceFeatures;
    cv_handle_t tracker_handle;
public:
    SensetimeFaceTrackThread():
                                image_width(0), image_height(0),
                                image_stride(0),track_result(CV_OK),
                                thread_running(false) , buffer_updated(false),
                                image(nullptr),is_busy(true),config(),
                                buffer_size(0), res_updated(false)
    {
    }
    SensetimeFaceTrackThread() :
            image_width(0), image_height(0),
            image_stride(0), track_result(CV_OK),
            thread_running(false), buffer_updated(false),
            image(nullptr), is_busy(true), config(),
            buffer_size(0), res_updated(false)
    {
    }
    ~SensetimeFaceTrackThread() {
        thread_running = false;
        pthread_mutex_unlock(&thd_mutex);
        pthread_join(track_thid, nullptr);
        pthread_mutex_destroy(&thd_mutex);
        pthread_mutex_destroy(&res_mutex);
    ~SensetimeFaceTrackThread()
    {
        thread_running = false;
        pthread_mutex_unlock(&thd_mutex);
        pthread_join(track_thid, nullptr);
        pthread_mutex_destroy(&thd_mutex);
        pthread_mutex_destroy(&res_mutex);
        cv_face_destroy_tracker(tracker_handle);
        cv_face_destroy_tracker(tracker_handle);
        delete(image);
        image = nullptr;
    }
        delete (image);
        image = nullptr;
    }
    int initial(){
        int ret = pthread_create(&track_thid, NULL, track_thread, this);
        if (ret != 0) {
            LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        } else {
            thread_running = true;
        }
    int initial()
    {
        int ret = pthread_create(&track_thid, NULL, track_thread, this);
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        else
        {
            thread_running = true;
        }
        ret = pthread_mutex_init(&thd_mutex, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_init thd_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = pthread_mutex_init(&thd_mutex, nullptr);
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_mutex_init thd_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = pthread_mutex_init(&res_mutex, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_init res_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = pthread_mutex_init(&res_mutex, nullptr);
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_mutex_init res_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config);
        if (ret != 0) {
            LOGP(ERROR, "cv_face_create_tracker: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = cv_face_track_set_detect_face_cnt_limit(tracker_handle, config.detect_face_cnt_limit, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "cv_face_track_set_detect_face_cnt_limit: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        return ret;
    }
        ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config);
        if (ret != 0)
        {
            LOGP(ERROR, "cv_face_create_tracker: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
    void do_face_track(
            const unsigned char *image,
            cv_pixel_format pixel_format,
            int image_width,
            int image_height,
            int image_stride,
            cv_face_orientation orientation = CV_FACE_UP
    ) {
        if(is_busy)return;
        copy_image(image, image_height, image_stride);
        this->pixel_format = pixel_format;
        this->image_width = image_width;
        this->image_height = image_height;
        this->image_stride = image_stride;
        this->orientation = orientation;
        buffer_updated =true;
        int ret = pthread_mutex_unlock(&thd_mutex);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_unlock: %s/n", strerror(ret));
            thread_running = false;
        }
    }
        ret = cv_face_track_set_detect_face_cnt_limit(tracker_handle, config.detect_face_cnt_limit, nullptr);
        if (ret != 0)
        {
            LOGP(ERROR, "cv_face_track_set_detect_face_cnt_limit: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
    int initial_license(char *path) {
        int res = 0;
        FILE *licFile = fopen(path, "rb");
        if (licFile != nullptr) {
            char licBuffer[1025 * 5] = {'\0'};
            size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
            fclose(licFile);
        return ret;
    }
            if (licSize > 0) {
                res = cv_face_init_license_config(licBuffer);
                LOG_INFO << "cv_face_init_license_config 1 ret=" << res << LOG_ENDL;
                return res;
            }
        } else {
            LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
            res = errno;
            return res;
        }
        return res;
    }
    void do_face_track(
            const unsigned char *image,
            cv_pixel_format pixel_format,
            int image_width,
            int image_height,
            int image_stride,
            cv_face_orientation orientation = CV_FACE_UP)
    {
        if (is_busy)
            return;
        copy_image(image, image_height, image_stride);
        this->pixel_format = pixel_format;
        this->image_width = image_width;
        this->image_height = image_height;
        this->image_stride = image_stride;
        this->orientation = orientation;
        buffer_updated = true;
        int ret = pthread_mutex_unlock(&thd_mutex);
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_mutex_unlock: %s/n", strerror(ret));
            thread_running = false;
        }
    }
    void set_config(SensetimeFaceTrackConfig& cfg){
        config = cfg;
    }
    int initial_license(char *path)
    {
        int res = 0;
        FILE *licFile = fopen(path, "rb");
        if (licFile != nullptr)
        {
            char licBuffer[1025 * 5] = {'\0'};
            size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
            fclose(licFile);
    const SensetimeFaceTrackConfig* get_config(){
        return &config;
    }
            if (licSize > 0)
            {
                res = cv_face_init_license_config(licBuffer);
                LOG_INFO << "cv_face_init_license_config 1 ret=" << res << LOG_ENDL;
                return res;
            }
        }
        else
        {
            LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
            res = errno;
            return res;
        }
        return res;
    }
    void set_config(SensetimeFaceTrackConfig &cfg)
    {
        config = cfg;
    }
    const SensetimeFaceTrackConfig *get_config()
    {
        return &config;
    }
    void get_face_features(st_ff_vect_t &res_vector)
    {
        if (!res_updated) return;
        int ret = pthread_mutex_lock(&res_mutex);
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
        res_vector = faceFeatures;
        ret = pthread_mutex_unlock(&res_mutex);
        res_updated = false;
        if (ret != 0)
        {
            LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
    }
private:
    pthread_t track_thid;
    pthread_mutex_t thd_mutex;
    pthread_mutex_t res_mutex;
    mutable volatile bool thread_running;
    mutable volatile bool buffer_updated;
    mutable volatile bool res_updated;
    mutable volatile bool is_busy;
private:
    static void *track_thread(void *Args) {
        SensetimeFaceTrackThread* tracker = (SensetimeFaceTrackThread*)Args;
        while(tracker->thread_running)
        {
            tracker->is_busy = false;
            int ret = pthread_mutex_lock(&tracker->thd_mutex);
            tracker->is_busy = true;
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_lock: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            if(!tracker->buffer_updated)
                continue;
            tracker->track_result = cv_face_track(tracker->tracker_handle , tracker->image,
                                                  tracker->pixel_format, tracker->image_width,
                                                  tracker->image_height, tracker->image_stride,
                                                  tracker->orientation, &tracker->p_faces,
                                                  &tracker->faces_count);
    static void *track_thread(void *Args)
    {
        SensetimeFaceTrackThread *tracker = (SensetimeFaceTrackThread *) Args;
        while (tracker->thread_running)
        {
            tracker->is_busy = false;
            int ret = pthread_mutex_lock(&tracker->thd_mutex);
            tracker->is_busy = true;
            if (ret != 0)
            {
                LOGP(ERROR, "pthread_mutex_lock: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            if (!tracker->buffer_updated)
                continue;
            tracker->track_result = cv_face_track(tracker->tracker_handle, tracker->image,
                                                  tracker->pixel_format, tracker->image_width,
                                                  tracker->image_height, tracker->image_stride,
                                                  tracker->orientation, &tracker->p_faces,
                                                  &tracker->faces_count);
            if (ret != 0)
            {
                LOGP(ERROR, "cv_face_track: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            if (ret != 0) {
                LOGP(ERROR, "cv_face_track: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            ret = pthread_mutex_lock(&tracker->res_mutex);
            if (ret != 0)
            {
                LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            tracker->extract_features();
            ret = pthread_mutex_unlock(&tracker->res_mutex);
            if (ret != 0)
            {
                LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            ret = pthread_mutex_lock(&tracker->res_mutex);
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            tracker->extract_features();
            ret = pthread_mutex_unlock(&tracker->res_mutex);
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            cv_face_release_tracker_result(tracker->p_faces, tracker->faces_count);
            tracker->buffer_updated = false;
        }
    }
            cv_face_release_tracker_result(tracker->p_faces, tracker->faces_count);
            tracker->buffer_updated = false;
        }
    }
    void copy_image(const unsigned char *src, int height, int stride)
    {
        int size = height * stride * 1.5;
        if (image_size() < size)
        {
            if (image != nullptr)
            {
                delete (image);
            }
            image = new unsigned char[size];
            buffer_size = size;
        }
        memcpy(image, src, size);
    }
    void copy_image(const unsigned char *src, int height, int stride){
        int size = height * stride * 1.5;
        if(image_size() < size){
            if(image != nullptr){
                delete(image);
            }
            image = new unsigned char[size];
            buffer_size = size;
        }
        memcpy(image, src, size);
    }
    int image_size(){
        return buffer_size;
    }
private:
    st_ff_vect_t faceFeatures;
    cv_handle_t tracker_handle;
    void extract_features(){
        faceFeatures.clear();
        for (int i = 0; i < faces_count; i++) {
            if (MH_F_LT(p_faces[i].score, config.score_min)) {
                continue;
            }
            SensetimeFaceFeature faceFeature;
            faceFeature.rect.leftTop.X = p_faces[i].rect.left;
            faceFeature.rect.leftTop.Y = p_faces[i].rect.top;
            faceFeature.rect.rightBottom.X = p_faces[i].rect.right;
            faceFeature.rect.rightBottom.Y = p_faces[i].rect.bottom;
            faceFeature.id = p_faces[i].ID;
            faceFeature.score = p_faces[i].score;
            faceFeature.yaw = p_faces[i].yaw;
            faceFeature.pitch = p_faces[i].pitch;
            faceFeature.roll = p_faces[i].roll;
            faceFeature.eyeDistance = p_faces[i].eye_dist;
    int image_size()
    {
        return buffer_size;
    }
            LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
                 p_faces[i].rect.left, p_faces[i].rect.top,
                 p_faces[i].rect.right, p_faces[i].rect.bottom, p_faces[i].ID);
    void extract_features()
    {
        faceFeatures.clear();
        for (int i = 0; i < faces_count; i++)
        {
            if (MH_F_LT(p_faces[i].score, config.score_min))
            {
                continue;
            }
            SensetimeFaceFeature faceFeature;
            faceFeature.rect.leftTop.X = p_faces[i].rect.left;
            faceFeature.rect.leftTop.Y = p_faces[i].rect.top;
            faceFeature.rect.rightBottom.X = p_faces[i].rect.right;
            faceFeature.rect.rightBottom.Y = p_faces[i].rect.bottom;
            faceFeature.id = p_faces[i].ID;
            faceFeature.score = p_faces[i].score;
            faceFeature.yaw = p_faces[i].yaw;
            faceFeature.pitch = p_faces[i].pitch;
            faceFeature.roll = p_faces[i].roll;
            faceFeature.eyeDistance = p_faces[i].eye_dist;
            LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
                 p_faces[i].yaw, p_faces[i].pitch, p_faces[i].roll, p_faces[i].eye_dist);
            for (int j = 0; j < p_faces[i].points_count; j++) {
                PLGH_Point featurePoint;
                featurePoint.X = p_faces[i].points_array[j].x;
                featurePoint.Y = p_faces[i].points_array[j].y;
                faceFeature.featurePoints.points.push_back(featurePoint);
            }
            if (config.generate_face_point) {
                if (faceFeature.rect.leftTop.X < 0 ||
                    faceFeature.rect.rightBottom.X > image_height ||
                    faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > image_width)
                    faceFeature.outOfFrame = true;
            }
            if (config.generate_face_feature) {
                if (config.evenWidthHeight) {
                    if (faceFeature.rect.leftTop.X % 2 != 0) faceFeature.rect.leftTop.X--;
                    if (faceFeature.rect.leftTop.Y % 2 != 0) faceFeature.rect.leftTop.Y--;
                    if (faceFeature.rect.rightBottom.X % 2 != 0) faceFeature.rect.rightBottom.X--;
                    if (faceFeature.rect.rightBottom.Y % 2 != 0) faceFeature.rect.rightBottom.Y--;
                }
            LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
                 p_faces[i].rect.left, p_faces[i].rect.top,
                 p_faces[i].rect.right, p_faces[i].rect.bottom, p_faces[i].ID);
                // explode the range
                if (config.explode_feature_rect_x != 0) {
                    faceFeature.rect.leftTop.X = clamp(
                            faceFeature.rect.leftTop.X - config.explode_feature_rect_x, 0,
                            faceFeature.rect.leftTop.X);
                    faceFeature.rect.rightBottom.X = clamp(
                            faceFeature.rect.rightBottom.X + config.explode_feature_rect_x,
                            faceFeature.rect.rightBottom.X, int(image_width - 1));
                }
            LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
                 p_faces[i].yaw, p_faces[i].pitch, p_faces[i].roll, p_faces[i].eye_dist);
            for (int j = 0; j < p_faces[i].points_count; j++)
            {
                PLGH_Point featurePoint;
                featurePoint.X = p_faces[i].points_array[j].x;
                featurePoint.Y = p_faces[i].points_array[j].y;
                faceFeature.featurePoints.points.push_back(featurePoint);
            }
            if (config.generate_face_point)
            {
                if (faceFeature.rect.leftTop.X < 0 ||
                    faceFeature.rect.rightBottom.X > image_height ||
                    faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > image_width)
                    faceFeature.outOfFrame = true;
            }
            if (config.generate_face_feature)
            {
                if (config.evenWidthHeight)
                {
                    if (faceFeature.rect.leftTop.X % 2 != 0) faceFeature.rect.leftTop.X--;
                    if (faceFeature.rect.leftTop.Y % 2 != 0) faceFeature.rect.leftTop.Y--;
                    if (faceFeature.rect.rightBottom.X % 2 != 0) faceFeature.rect.rightBottom.X--;
                    if (faceFeature.rect.rightBottom.Y % 2 != 0) faceFeature.rect.rightBottom.Y--;
                }
                if (config.explode_feature_rect_y != 0) {
                    faceFeature.rect.leftTop.Y = clamp(
                            faceFeature.rect.leftTop.Y - config.explode_feature_rect_y, 0,
                            faceFeature.rect.leftTop.Y);
                    faceFeature.rect.rightBottom.Y = clamp(
                            faceFeature.rect.rightBottom.Y + config.explode_feature_rect_y,
                            faceFeature.rect.rightBottom.Y, int(image_height - 1));
                }
                faceFeatures.push_back(faceFeature);
                LOG_ERROR<<"Feature id: "<<faceFeature.id <<LOG_ERROR;
            }
        }
        res_updated = true;
    }
public:
    void get_face_features(st_ff_vect_t& res_vector){
        if(!res_updated) return;
        int ret = pthread_mutex_lock(&res_mutex);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
        res_vector = faceFeatures;
        ret = pthread_mutex_unlock(&res_mutex);
        res_updated = false;
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
    }
private:
    unsigned char *image;
    cv_pixel_format pixel_format;
    int image_width;
    int image_height;
    int image_stride;
    int buffer_size;
    cv_face_orientation orientation;
    cv_face_t *p_faces;
    int faces_count;
    cv_result_t track_result;
    SensetimeFaceTrackConfig config;
                // explode the range
                if (config.explode_feature_rect_x != 0)
                {
                    faceFeature.rect.leftTop.X = clamp(
                            faceFeature.rect.leftTop.X - config.explode_feature_rect_x, 0,
                            faceFeature.rect.leftTop.X);
                    faceFeature.rect.rightBottom.X = clamp(
                            faceFeature.rect.rightBottom.X + config.explode_feature_rect_x,
                            faceFeature.rect.rightBottom.X, int(image_width - 1));
                }
                if (config.explode_feature_rect_y != 0)
                {
                    faceFeature.rect.leftTop.Y = clamp(
                            faceFeature.rect.leftTop.Y - config.explode_feature_rect_y, 0,
                            faceFeature.rect.leftTop.Y);
                    faceFeature.rect.rightBottom.Y = clamp(
                            faceFeature.rect.rightBottom.Y + config.explode_feature_rect_y,
                            faceFeature.rect.rightBottom.Y, int(image_height - 1));
                }
                faceFeatures.push_back(faceFeature);
                LOG_ERROR << "Feature id: " << faceFeature.id << LOG_ERROR;
            }
        }
        res_updated = true;
    }
};
struct PL_SensetimeFaceTrackMultiTrd_Internal {
    //uint8_t buffer[1920*1080*4];
    //size_t buffSize;
    //size_t buffSizeMax;
    MB_Frame lastFrame;
    PipeMaterial pmList[2];
    SensetimeFaceTrackThread trackThread;
    st_ff_vect_t faceFeatures;
    bool payError;
struct PL_SensetimeFaceTrackMultiTrd_Internal
{
    //uint8_t buffer[1920*1080*4];
    //size_t buffSize;
    //size_t buffSizeMax;
    MB_Frame lastFrame;
    PipeMaterial pmList[2];
    SensetimeFaceTrackThread trackThread;
    st_ff_vect_t faceFeatures;
    bool payError;
    size_t frameCount;
    size_t frameCount;
    PL_SensetimeFaceTrackMultiTrd_Internal() :
    //buffSize(0), buffSizeMax(sizeof(buffer)),
            lastFrame(), pmList(), frameCount(0) {
    }
    PL_SensetimeFaceTrackMultiTrd_Internal() :
    //buffSize(0), buffSizeMax(sizeof(buffer)),
            lastFrame(), pmList(), frameCount(0)
    {
    }
    ~PL_SensetimeFaceTrackMultiTrd_Internal() {
    }
    ~PL_SensetimeFaceTrackMultiTrd_Internal()
    {
    }
    void reset() {
        //buffSize = 0;
        payError = true;
    void reset()
    {
        //buffSize = 0;
        payError = true;
        MB_Frame _lastFrame;
        lastFrame = _lastFrame;
        MB_Frame _lastFrame;
        lastFrame = _lastFrame;
        PipeMaterial _pm;
        pmList[0] = _pm;
        pmList[1] = _pm;
        frameCount = 0;
    }
        PipeMaterial _pm;
        pmList[0] = _pm;
        pmList[1] = _pm;
        frameCount = 0;
    }
};
PipeLineElem *create_PL_SensetimeFaceTrackMultiTrd() {
    return new PL_SensetimeFaceTrackMultiTrd;
PipeLineElem *create_PL_SensetimeFaceTrackMultiTrd()
{
    return new PL_SensetimeFaceTrackMultiTrd;
}
PL_SensetimeFaceTrackMultiTrd::PL_SensetimeFaceTrackMultiTrd() : internal(
        new PL_SensetimeFaceTrackMultiTrd_Internal){
        new PL_SensetimeFaceTrackMultiTrd_Internal)
{
}
PL_SensetimeFaceTrackMultiTrd::~PL_SensetimeFaceTrackMultiTrd() {
    delete (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    internal = nullptr;
    pthread_mutex_destroy(&pay_mutex);;
    pthread_mutex_destroy(&gain_mutex);
PL_SensetimeFaceTrackMultiTrd::~PL_SensetimeFaceTrackMultiTrd()
{
    delete (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    internal = nullptr;
}
bool PL_SensetimeFaceTrackMultiTrd::init(void *args) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->reset();
    SensetimeFaceTrackConfig *config = (SensetimeFaceTrackConfig *) args;
    if (config->point_size == 21)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_21;
    else if (config->point_size == 106)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_106;
    else {
        LOG_ERROR << "alignment point size must be 21 or 106" << LOG_ENDL;
        return false;
    }
    int res = in->trackThread.initial_license("/data/license.lic");
    if(res!=0)return false;
    in->trackThread.set_config(*config);
    res = in->trackThread.initial();
    if(res!=0)return false;
    return true;
bool PL_SensetimeFaceTrackMultiTrd::init(void *args)
{
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->reset();
    SensetimeFaceTrackConfig *config = (SensetimeFaceTrackConfig *) args;
    if (config->point_size == 21)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_21;
    else if (config->point_size == 106)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_106;
    else
    {
        LOG_ERROR << "alignment point size must be 21 or 106" << LOG_ENDL;
        return false;
    }
    int res = in->trackThread.initial_license("/data/license.lic");
    if (res != 0)return false;
    in->trackThread.set_config(*config);
    res = in->trackThread.initial();
    if (res != 0)return false;
    return true;
}
void PL_SensetimeFaceTrackMultiTrd::finit() {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
void PL_SensetimeFaceTrackMultiTrd::finit()
{
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
}
int doFaceTrack(PL_SensetimeFaceTrackMultiTrd_Internal *in,
                uint8_t *buffer, size_t width, size_t height, size_t stride,
                cv_pixel_format cvPixFmt) {
    PipeLineElemTimingDebugger td(nullptr);
    in->trackThread.do_face_track(buffer, cvPixFmt, width, height, stride);
    return 0;
                uint8_t *buffer, size_t width, size_t height, size_t stride,
                cv_pixel_format cvPixFmt)
{
    PipeLineElemTimingDebugger td(nullptr);
    in->trackThread.do_face_track(buffer, cvPixFmt, width, height, stride);
    return 0;
}
/*static*/ bool
PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV(const PipeMaterial *pm, void *args) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) args;
/*static*/ bool PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV(const PipeMaterial *pm, void *args)
{
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) args;
    if (pm->type != PipeMaterial::PMT_FRAME) {
        LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
        return false;
    }
    if (pm->type != PipeMaterial::PMT_FRAME)
    {
        LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
        return false;
    }
    if (pm->buffer == nullptr)
        return false;
    if (pm->buffer == nullptr)
        return false;
    MB_Frame *frame = (MB_Frame *) pm->buffer;
    if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12) {
        LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
        return false;
    }
    MB_Frame *frame = (MB_Frame *) pm->buffer;
    if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
    {
        LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
        return false;
    }
    int res = 0;
    if (frame->type == MB_Frame::MBFT_YUV420)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height,
                                 frame->width, CV_PIX_FMT_YUV420P);
    else if (frame->type == MB_Frame::MBFT_NV12)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height,
                                 frame->width, CV_PIX_FMT_NV12);
    int res = 0;
    if (frame->type == MB_Frame::MBFT_YUV420)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
    else if (frame->type == MB_Frame::MBFT_NV12)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12);
    if (res < 0) {
        in->payError = true;
        return false;
    } else
        in->payError = false;
    if (res < 0)
    {
        in->payError = true;
        return false;
    }
    else
        in->payError = false;
    //in->buffer readly
    //in->buffer readly
    in->lastFrame.type = frame->type;
    in->lastFrame.buffer = frame->buffer;//#todo should copy
    in->lastFrame.buffSize = frame->buffSize;
    in->lastFrame.width = frame->width;
    in->lastFrame.height = frame->height;
    in->lastFrame.pts = frame->pts;
    in->lastFrame.type = frame->type;
    in->lastFrame.buffer = frame->buffer;//#todo should copy
    in->lastFrame.buffSize = frame->buffSize;
    in->lastFrame.width = frame->width;
    in->lastFrame.height = frame->height;
    in->lastFrame.pts = frame->pts;
    return false;
    return false;
}
bool PL_SensetimeFaceTrackMultiTrd::pay(const PipeMaterial &pm) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    //LOG_ERROR << "PL_SensetimeFaceTrackMultiTrd pay" << LOG_ENDL;
    in->payError = true;
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
bool PL_SensetimeFaceTrackMultiTrd::pay(const PipeMaterial &pm)
{
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    //LOG_ERROR << "PL_SensetimeFaceTrackMultiTrd pay" << LOG_ENDL;
    in->payError = true;
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    in->frameCount++;
    return !(in->payError);
    in->frameCount++;
    return !(in->payError);
}
bool PL_SensetimeFaceTrackMultiTrd::gain(PipeMaterial &pm) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->trackThread.get_face_features(in->faceFeatures);
    if (in->payError) {
        pm.former = this;
        return false;
    }
bool PL_SensetimeFaceTrackMultiTrd::gain(PipeMaterial &pm)
{
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->trackThread.get_face_features(in->faceFeatures);
    if (in->payError)
    {
        pm.former = this;
        return false;
    }
    if (!in->trackThread.get_config()->generate_face_feature) {
        pm.type = PipeMaterial::PMT_FRAME;
        pm.buffer = &(in->lastFrame);
        pm.buffSize = 0;
    } else {
        in->pmList[0].type = PipeMaterial::PMT_FRAME;
        in->pmList[0].buffer = &(in->lastFrame);
        in->pmList[0].buffSize = 0;
        in->pmList[0].former = this;
    if (!in->trackThread.get_config()->generate_face_feature)
    {
        pm.type = PipeMaterial::PMT_FRAME;
        pm.buffer = &(in->lastFrame);
        pm.buffSize = 0;
    }
    else
    {
        in->pmList[0].type = PipeMaterial::PMT_FRAME;
        in->pmList[0].buffer = &(in->lastFrame);
        in->pmList[0].buffSize = 0;
        in->pmList[0].former = this;
        in->pmList[1].type = PipeMaterial::PMT_PTR;
        in->pmList[1].buffer = &(in->faceFeatures);
        in->pmList[1].buffSize = 0;
        in->pmList[1].former = this;
        in->pmList[1].type = PipeMaterial::PMT_PTR;
        in->pmList[1].buffer = &(in->faceFeatures);
        in->pmList[1].buffSize = 0;
        in->pmList[1].former = this;
        pm.type = PipeMaterial::PMT_PM_LIST;
        pm.buffer = in->pmList;
        pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
    }
        pm.type = PipeMaterial::PMT_PM_LIST;
        pm.buffer = in->pmList;
        pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
    }
    pm.former = this;
    return true;
}
void *PL_SensetimeFaceTrackMultiTrd::pay_thd(void *arg) {
    pm.former = this;
    return true;
}
RtspFace/PL_SensetimeFaceTrackMultiTrd.h
@@ -6,85 +6,7 @@
#include <vector>
#include <cmath>
struct SensetimeFaceFeature
{
    PLGH_Rect rect;
    int id;
    float score;
    /* Camera vision vector point to face
     *  * * *
     *  * * *
     *  * * *
     */
    float yaw;
    /* Camera vision vector point to face
     *  * * *
     *  * * *
     *  * * *
     */
    float pitch;
    /* Camera vision vector point to face
     *  * * *
     *  * * *
     *  * * *
     */
    float roll;
    float eyeDistance;
    PLGH_Path featurePoints;
    bool outOfFrame;
    SensetimeFaceFeature() :
        rect(), id(0), score(0.0), yaw(0.0), pitch(0.0), roll(0.0), eyeDistance(0.0), featurePoints(),
        outOfFrame(false)
    {}
    bool test_face_in_cone(float _yaw, float _pitch, float _roll) const
    {
        return  (std::abs(yaw) < _yaw && std::abs(pitch) < _pitch && std::abs(roll) < _roll);
    }
};
typedef std::vector<SensetimeFaceFeature> st_ff_vect_t;
struct SensetimeFaceTrackConfig
{
    int point_size; // 21 / 106
    int point_size_config; // CV_DETECT_ENABLE_ALIGN_21 / CV_DETECT_ENABLE_ALIGN_106
    int detect_face_cnt_limit; // -1
    bool draw_face_rect;
    bool draw_face_feature_point;
    bool generate_face_feature;
    bool generate_face_point;
    int explode_feature_rect_x;
    int explode_feature_rect_y;
    bool clamp_feature_rect; // clamp fr width and height
    int doTrackPerFrame;
    std::string license_file_path;
    std::string license_str;
    float visionConeAngle;
    bool evenWidthHeight;
    float score_min;
    SensetimeFaceTrackConfig() :
        point_size(21), point_size_config(-1), detect_face_cnt_limit(-1),
        draw_face_rect(true), draw_face_feature_point(true), generate_face_feature(false), generate_face_point(false),
        explode_feature_rect_x(0), explode_feature_rect_y(0),
        clamp_feature_rect(false), doTrackPerFrame(1),
        license_file_path(), license_str(),
        visionConeAngle(90.1), evenWidthHeight(true),
        score_min(0.0f)
    { }
};
#include "PL_SensetimeFaceTrack.h"
class PL_SensetimeFaceTrackMultiTrd : public PipeLineElem
{
@@ -97,14 +19,12 @@
    virtual bool pay(const PipeMaterial& pm);
    virtual bool gain(PipeMaterial& pm);
private:
    static bool pay_breaker_MBFT_YUV(const PipeMaterial* pm, void* args);
private:
    static void* pay_thd(void *arg);
    void* internal;
    pthread_mutex_t pay_mutex;
    pthread_mutex_t gain_mutex;
};
PipeLineElem* create_PL_SensetimeFaceTrackMultiTrd();