houxiao
2017-08-01 ab5f950eb26752a7a26ea746dd22a41a00b1074a
RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp
@@ -4,477 +4,529 @@
#include "MediaHelper.h"
#ifdef USE_OPENCV
#include <opencv2/opencv.hpp>
#endif
#include <cv_face.h>
class SensetimeFaceTrackThread {
class SensetimeFaceTrackThread
{
private:
   pthread_t track_thid;
   pthread_mutex_t thd_mutex;
   pthread_mutex_t res_mutex;
   mutable volatile bool thread_running;
   mutable volatile bool buffer_updated;
   mutable volatile bool res_updated;
   mutable volatile bool is_busy;
   unsigned char *image;
   cv_pixel_format pixel_format;
   int image_width;
   int image_height;
   int image_stride;
   int buffer_size;
   cv_face_orientation orientation;
   cv_face_t *p_faces;
   int faces_count;
   cv_result_t track_result;
   SensetimeFaceTrackConfig config;
   st_ff_vect_t faceFeatures;
   cv_handle_t tracker_handle;
public:
    SensetimeFaceTrackThread():
                                image_width(0), image_height(0),
                                image_stride(0),track_result(CV_OK),
                                thread_running(false) , buffer_updated(false),
                                image(nullptr),is_busy(true),config(),
                                buffer_size(0), res_updated(false)
    {
    }
   SensetimeFaceTrackThread() :
         image_width(0), image_height(0),
         image_stride(0), track_result(CV_OK),
         thread_running(false), buffer_updated(false),
         image(nullptr), is_busy(true), config(),
         buffer_size(0), res_updated(false)
   {
   }
    ~SensetimeFaceTrackThread() {
        thread_running = false;
        pthread_mutex_unlock(&thd_mutex);
        pthread_join(track_thid, nullptr);
        pthread_mutex_destroy(&thd_mutex);
        pthread_mutex_destroy(&res_mutex);
   ~SensetimeFaceTrackThread()
   {
      thread_running = false;
      pthread_mutex_unlock(&thd_mutex);
      pthread_join(track_thid, nullptr);
      pthread_mutex_destroy(&thd_mutex);
      pthread_mutex_destroy(&res_mutex);
        cv_face_destroy_tracker(tracker_handle);
      cv_face_destroy_tracker(tracker_handle);
        delete(image);
        image = nullptr;
    }
      delete (image);
      image = nullptr;
   }
    int initial(){
        int ret = pthread_create(&track_thid, NULL, track_thread, this);
        if (ret != 0) {
            LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        } else {
            thread_running = true;
        }
   int initial()
   {
      int ret = pthread_create(&track_thid, NULL, track_thread, this);
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
         thread_running = false;
         return ret;
      }
      else
      {
         thread_running = true;
      }
        ret = pthread_mutex_init(&thd_mutex, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_init thd_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
      ret = pthread_mutex_init(&thd_mutex, nullptr);
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_mutex_init thd_mutex: %s/n", strerror(ret));
         thread_running = false;
         return ret;
      }
        ret = pthread_mutex_init(&res_mutex, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_init res_mutex: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
      ret = pthread_mutex_init(&res_mutex, nullptr);
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_mutex_init res_mutex: %s/n", strerror(ret));
         thread_running = false;
         return ret;
      }
        ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config);
        if (ret != 0) {
            LOGP(ERROR, "cv_face_create_tracker: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        ret = cv_face_track_set_detect_face_cnt_limit(tracker_handle, config.detect_face_cnt_limit, nullptr);
        if (ret != 0) {
            LOGP(ERROR, "cv_face_track_set_detect_face_cnt_limit: %s/n", strerror(ret));
            thread_running = false;
            return ret;
        }
        return ret;
    }
      ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config);
      if (ret != 0)
      {
         LOGP(ERROR, "cv_face_create_tracker: %s/n", strerror(ret));
         thread_running = false;
         return ret;
      }
    void do_face_track(
            const unsigned char *image,
            cv_pixel_format pixel_format,
            int image_width,
            int image_height,
            int image_stride,
            cv_face_orientation orientation = CV_FACE_UP
    ) {
        if(is_busy)return;
        copy_image(image, image_height, image_stride);
        this->pixel_format = pixel_format;
        this->image_width = image_width;
        this->image_height = image_height;
        this->image_stride = image_stride;
        this->orientation = orientation;
        buffer_updated =true;
        int ret = pthread_mutex_unlock(&thd_mutex);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_unlock: %s/n", strerror(ret));
            thread_running = false;
        }
    }
      ret = cv_face_track_set_detect_face_cnt_limit(tracker_handle, config.detect_face_cnt_limit, nullptr);
      if (ret != 0)
      {
         LOGP(ERROR, "cv_face_track_set_detect_face_cnt_limit: %s/n", strerror(ret));
         thread_running = false;
         return ret;
      }
    int initial_license(char *path) {
        int res = 0;
        FILE *licFile = fopen(path, "rb");
        if (licFile != nullptr) {
            char licBuffer[1025 * 5] = {'\0'};
            size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
            fclose(licFile);
      return ret;
   }
            if (licSize > 0) {
                res = cv_face_init_license_config(licBuffer);
                LOG_INFO << "cv_face_init_license_config 1 ret=" << res << LOG_ENDL;
                return res;
            }
        } else {
            LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
            res = errno;
            return res;
        }
        return res;
    }
   void do_face_track(
         const unsigned char *image,
         cv_pixel_format pixel_format,
         int image_width,
         int image_height,
         int image_stride,
         cv_face_orientation orientation = CV_FACE_UP)
   {
      if (is_busy)
         return;
      copy_image(image, image_height, image_stride);
      this->pixel_format = pixel_format;
      this->image_width = image_width;
      this->image_height = image_height;
      this->image_stride = image_stride;
      this->orientation = orientation;
      buffer_updated = true;
      int ret = pthread_mutex_unlock(&thd_mutex);
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_mutex_unlock: %s/n", strerror(ret));
         thread_running = false;
      }
   }
    void set_config(SensetimeFaceTrackConfig& cfg){
        config = cfg;
    }
   int initial_license(char *path)
   {
      int res = 0;
      FILE *licFile = fopen(path, "rb");
      if (licFile != nullptr)
      {
         char licBuffer[1025 * 5] = {'\0'};
         size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
         fclose(licFile);
    const SensetimeFaceTrackConfig* get_config(){
        return &config;
    }
         if (licSize > 0)
         {
            res = cv_face_init_license_config(licBuffer);
            LOG_INFO << "cv_face_init_license_config 1 ret=" << res << LOG_ENDL;
            return res;
         }
      }
      else
      {
         LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
         res = errno;
         return res;
      }
      return res;
   }
   void set_config(SensetimeFaceTrackConfig &cfg)
   {
      config = cfg;
   }
   const SensetimeFaceTrackConfig *get_config()
   {
      return &config;
   }
   void get_face_features(st_ff_vect_t &res_vector)
   {
      if (!res_updated) return;
      int ret = pthread_mutex_lock(&res_mutex);
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
         thread_running = false;
      }
      res_vector = faceFeatures;
      ret = pthread_mutex_unlock(&res_mutex);
      res_updated = false;
      if (ret != 0)
      {
         LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
         thread_running = false;
      }
   }
private:
    pthread_t track_thid;
    pthread_mutex_t thd_mutex;
    pthread_mutex_t res_mutex;
    mutable volatile bool thread_running;
    mutable volatile bool buffer_updated;
    mutable volatile bool res_updated;
    mutable volatile bool is_busy;
private:
    static void *track_thread(void *Args) {
        SensetimeFaceTrackThread* tracker = (SensetimeFaceTrackThread*)Args;
        while(tracker->thread_running)
        {
            tracker->is_busy = false;
            int ret = pthread_mutex_lock(&tracker->thd_mutex);
            tracker->is_busy = true;
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_lock: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            if(!tracker->buffer_updated)
                continue;
            tracker->track_result = cv_face_track(tracker->tracker_handle , tracker->image,
                                                  tracker->pixel_format, tracker->image_width,
                                                  tracker->image_height, tracker->image_stride,
                                                  tracker->orientation, &tracker->p_faces,
                                                  &tracker->faces_count);
   static void *track_thread(void *Args)
   {
      SensetimeFaceTrackThread *tracker = (SensetimeFaceTrackThread *) Args;
      while (tracker->thread_running)
      {
         tracker->is_busy = false;
         int ret = pthread_mutex_lock(&tracker->thd_mutex);
         tracker->is_busy = true;
         if (ret != 0)
         {
            LOGP(ERROR, "pthread_mutex_lock: %s/n", strerror(ret));
            tracker->thread_running = false;
            break;
         }
         if (!tracker->buffer_updated)
            continue;
         tracker->track_result = cv_face_track(tracker->tracker_handle, tracker->image,
                                      tracker->pixel_format, tracker->image_width,
                                      tracker->image_height, tracker->image_stride,
                                      tracker->orientation, &tracker->p_faces,
                                      &tracker->faces_count);
         if (ret != 0)
         {
            LOGP(ERROR, "cv_face_track: %s/n", strerror(ret));
            tracker->thread_running = false;
            break;
         }
            if (ret != 0) {
                LOGP(ERROR, "cv_face_track: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
         ret = pthread_mutex_lock(&tracker->res_mutex);
         if (ret != 0)
         {
            LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
            tracker->thread_running = false;
            break;
         }
         tracker->extract_features();
         ret = pthread_mutex_unlock(&tracker->res_mutex);
         if (ret != 0)
         {
            LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
            tracker->thread_running = false;
            break;
         }
            ret = pthread_mutex_lock(&tracker->res_mutex);
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
            tracker->extract_features();
            ret = pthread_mutex_unlock(&tracker->res_mutex);
            if (ret != 0) {
                LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
                tracker->thread_running = false;
                break;
            }
         cv_face_release_tracker_result(tracker->p_faces, tracker->faces_count);
         tracker->buffer_updated = false;
      }
   }
            cv_face_release_tracker_result(tracker->p_faces, tracker->faces_count);
            tracker->buffer_updated = false;
        }
    }
   void copy_image(const unsigned char *src, int height, int stride)
   {
      int size = height * stride * 1.5;
      if (image_size() < size)
      {
         if (image != nullptr)
         {
            delete (image);
         }
         image = new unsigned char[size];
         buffer_size = size;
      }
      memcpy(image, src, size);
   }
    void copy_image(const unsigned char *src, int height, int stride){
        int size = height * stride * 1.5;
        if(image_size() < size){
            if(image != nullptr){
                delete(image);
            }
            image = new unsigned char[size];
            buffer_size = size;
        }
        memcpy(image, src, size);
    }
    int image_size(){
        return buffer_size;
    }
private:
    st_ff_vect_t faceFeatures;
    cv_handle_t tracker_handle;
    void extract_features(){
        faceFeatures.clear();
        for (int i = 0; i < faces_count; i++) {
            if (MH_F_LT(p_faces[i].score, config.score_min)) {
                continue;
            }
            SensetimeFaceFeature faceFeature;
            faceFeature.rect.leftTop.X = p_faces[i].rect.left;
            faceFeature.rect.leftTop.Y = p_faces[i].rect.top;
            faceFeature.rect.rightBottom.X = p_faces[i].rect.right;
            faceFeature.rect.rightBottom.Y = p_faces[i].rect.bottom;
            faceFeature.id = p_faces[i].ID;
            faceFeature.score = p_faces[i].score;
            faceFeature.yaw = p_faces[i].yaw;
            faceFeature.pitch = p_faces[i].pitch;
            faceFeature.roll = p_faces[i].roll;
            faceFeature.eyeDistance = p_faces[i].eye_dist;
   int image_size()
   {
      return buffer_size;
   }
            LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
                 p_faces[i].rect.left, p_faces[i].rect.top,
                 p_faces[i].rect.right, p_faces[i].rect.bottom, p_faces[i].ID);
   void extract_features()
   {
      faceFeatures.clear();
      for (int i = 0; i < faces_count; i++)
      {
         if (MH_F_LT(p_faces[i].score, config.score_min))
         {
            continue;
         }
         SensetimeFaceFeature faceFeature;
         faceFeature.rect.leftTop.X = p_faces[i].rect.left;
         faceFeature.rect.leftTop.Y = p_faces[i].rect.top;
         faceFeature.rect.rightBottom.X = p_faces[i].rect.right;
         faceFeature.rect.rightBottom.Y = p_faces[i].rect.bottom;
         faceFeature.id = p_faces[i].ID;
         faceFeature.score = p_faces[i].score;
         faceFeature.yaw = p_faces[i].yaw;
         faceFeature.pitch = p_faces[i].pitch;
         faceFeature.roll = p_faces[i].roll;
         faceFeature.eyeDistance = p_faces[i].eye_dist;
            LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
                 p_faces[i].yaw, p_faces[i].pitch, p_faces[i].roll, p_faces[i].eye_dist);
            for (int j = 0; j < p_faces[i].points_count; j++) {
                PLGH_Point featurePoint;
                featurePoint.X = p_faces[i].points_array[j].x;
                featurePoint.Y = p_faces[i].points_array[j].y;
                faceFeature.featurePoints.points.push_back(featurePoint);
            }
            if (config.generate_face_point) {
                if (faceFeature.rect.leftTop.X < 0 ||
                    faceFeature.rect.rightBottom.X > image_height ||
                    faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > image_width)
                    faceFeature.outOfFrame = true;
            }
            if (config.generate_face_feature) {
                if (config.evenWidthHeight) {
                    if (faceFeature.rect.leftTop.X % 2 != 0) faceFeature.rect.leftTop.X--;
                    if (faceFeature.rect.leftTop.Y % 2 != 0) faceFeature.rect.leftTop.Y--;
                    if (faceFeature.rect.rightBottom.X % 2 != 0) faceFeature.rect.rightBottom.X--;
                    if (faceFeature.rect.rightBottom.Y % 2 != 0) faceFeature.rect.rightBottom.Y--;
                }
         LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
             p_faces[i].rect.left, p_faces[i].rect.top,
             p_faces[i].rect.right, p_faces[i].rect.bottom, p_faces[i].ID);
                // explode the range
                if (config.explode_feature_rect_x != 0) {
                    faceFeature.rect.leftTop.X = clamp(
                            faceFeature.rect.leftTop.X - config.explode_feature_rect_x, 0,
                            faceFeature.rect.leftTop.X);
                    faceFeature.rect.rightBottom.X = clamp(
                            faceFeature.rect.rightBottom.X + config.explode_feature_rect_x,
                            faceFeature.rect.rightBottom.X, int(image_width - 1));
                }
         LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
             p_faces[i].yaw, p_faces[i].pitch, p_faces[i].roll, p_faces[i].eye_dist);
         for (int j = 0; j < p_faces[i].points_count; j++)
         {
            PLGH_Point featurePoint;
            featurePoint.X = p_faces[i].points_array[j].x;
            featurePoint.Y = p_faces[i].points_array[j].y;
            faceFeature.featurePoints.points.push_back(featurePoint);
         }
         if (config.generate_face_point)
         {
            if (faceFeature.rect.leftTop.X < 0 ||
               faceFeature.rect.rightBottom.X > image_height ||
               faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > image_width)
               faceFeature.outOfFrame = true;
         }
         if (config.generate_face_feature)
         {
            if (config.evenWidthHeight)
            {
               if (faceFeature.rect.leftTop.X % 2 != 0) faceFeature.rect.leftTop.X--;
               if (faceFeature.rect.leftTop.Y % 2 != 0) faceFeature.rect.leftTop.Y--;
               if (faceFeature.rect.rightBottom.X % 2 != 0) faceFeature.rect.rightBottom.X--;
               if (faceFeature.rect.rightBottom.Y % 2 != 0) faceFeature.rect.rightBottom.Y--;
            }
                if (config.explode_feature_rect_y != 0) {
                    faceFeature.rect.leftTop.Y = clamp(
                            faceFeature.rect.leftTop.Y - config.explode_feature_rect_y, 0,
                            faceFeature.rect.leftTop.Y);
                    faceFeature.rect.rightBottom.Y = clamp(
                            faceFeature.rect.rightBottom.Y + config.explode_feature_rect_y,
                            faceFeature.rect.rightBottom.Y, int(image_height - 1));
                }
                faceFeatures.push_back(faceFeature);
                LOG_ERROR<<"Feature id: "<<faceFeature.id <<LOG_ERROR;
            }
        }
        res_updated = true;
    }
public:
    void get_face_features(st_ff_vect_t& res_vector){
        if(!res_updated) return;
        int ret = pthread_mutex_lock(&res_mutex);
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_lock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
        res_vector = faceFeatures;
        ret = pthread_mutex_unlock(&res_mutex);
        res_updated = false;
        if (ret != 0) {
            LOGP(ERROR, "pthread_mutex_unlock res_mutex: %s/n", strerror(ret));
            thread_running = false;
        }
    }
private:
    unsigned char *image;
    cv_pixel_format pixel_format;
    int image_width;
    int image_height;
    int image_stride;
    int buffer_size;
    cv_face_orientation orientation;
    cv_face_t *p_faces;
    int faces_count;
    cv_result_t track_result;
    SensetimeFaceTrackConfig config;
            // explode the range
            if (config.explode_feature_rect_x != 0)
            {
               faceFeature.rect.leftTop.X = clamp(
                     faceFeature.rect.leftTop.X - config.explode_feature_rect_x, 0,
                     faceFeature.rect.leftTop.X);
               faceFeature.rect.rightBottom.X = clamp(
                     faceFeature.rect.rightBottom.X + config.explode_feature_rect_x,
                     faceFeature.rect.rightBottom.X, int(image_width - 1));
            }
            if (config.explode_feature_rect_y != 0)
            {
               faceFeature.rect.leftTop.Y = clamp(
                     faceFeature.rect.leftTop.Y - config.explode_feature_rect_y, 0,
                     faceFeature.rect.leftTop.Y);
               faceFeature.rect.rightBottom.Y = clamp(
                     faceFeature.rect.rightBottom.Y + config.explode_feature_rect_y,
                     faceFeature.rect.rightBottom.Y, int(image_height - 1));
            }
            faceFeatures.push_back(faceFeature);
            LOG_ERROR << "Feature id: " << faceFeature.id << LOG_ERROR;
         }
      }
      res_updated = true;
   }
};
struct PL_SensetimeFaceTrackMultiTrd_Internal {
    //uint8_t buffer[1920*1080*4];
    //size_t buffSize;
    //size_t buffSizeMax;
    MB_Frame lastFrame;
    PipeMaterial pmList[2];
    SensetimeFaceTrackThread trackThread;
    st_ff_vect_t faceFeatures;
    bool payError;
struct PL_SensetimeFaceTrackMultiTrd_Internal
{
   //uint8_t buffer[1920*1080*4];
   //size_t buffSize;
   //size_t buffSizeMax;
   MB_Frame lastFrame;
   PipeMaterial pmList[2];
   SensetimeFaceTrackThread trackThread;
   st_ff_vect_t faceFeatures;
   bool payError;
    size_t frameCount;
   size_t frameCount;
    PL_SensetimeFaceTrackMultiTrd_Internal() :
    //buffSize(0), buffSizeMax(sizeof(buffer)),
            lastFrame(), pmList(), frameCount(0) {
    }
   PL_SensetimeFaceTrackMultiTrd_Internal() :
   //buffSize(0), buffSizeMax(sizeof(buffer)),
         lastFrame(), pmList(), frameCount(0)
   {
   }
    ~PL_SensetimeFaceTrackMultiTrd_Internal() {
    }
   ~PL_SensetimeFaceTrackMultiTrd_Internal()
   {
   }
    void reset() {
        //buffSize = 0;
        payError = true;
   void reset()
   {
      //buffSize = 0;
      payError = true;
        MB_Frame _lastFrame;
        lastFrame = _lastFrame;
      MB_Frame _lastFrame;
      lastFrame = _lastFrame;
        PipeMaterial _pm;
        pmList[0] = _pm;
        pmList[1] = _pm;
        frameCount = 0;
    }
      PipeMaterial _pm;
      pmList[0] = _pm;
      pmList[1] = _pm;
      frameCount = 0;
   }
};
PipeLineElem *create_PL_SensetimeFaceTrackMultiTrd() {
    return new PL_SensetimeFaceTrackMultiTrd;
PipeLineElem *create_PL_SensetimeFaceTrackMultiTrd()
{
   return new PL_SensetimeFaceTrackMultiTrd;
}
PL_SensetimeFaceTrackMultiTrd::PL_SensetimeFaceTrackMultiTrd() : internal(
        new PL_SensetimeFaceTrackMultiTrd_Internal){
      new PL_SensetimeFaceTrackMultiTrd_Internal)
{
}
PL_SensetimeFaceTrackMultiTrd::~PL_SensetimeFaceTrackMultiTrd() {
    delete (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    internal = nullptr;
    pthread_mutex_destroy(&pay_mutex);;
    pthread_mutex_destroy(&gain_mutex);
PL_SensetimeFaceTrackMultiTrd::~PL_SensetimeFaceTrackMultiTrd()
{
   delete (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
   internal = nullptr;
}
bool PL_SensetimeFaceTrackMultiTrd::init(void *args) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->reset();
    SensetimeFaceTrackConfig *config = (SensetimeFaceTrackConfig *) args;
    if (config->point_size == 21)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_21;
    else if (config->point_size == 106)
        config->point_size_config = CV_DETECT_ENABLE_ALIGN_106;
    else {
        LOG_ERROR << "alignment point size must be 21 or 106" << LOG_ENDL;
        return false;
    }
    int res = in->trackThread.initial_license("/data/license.lic");
    if(res!=0)return false;
    in->trackThread.set_config(*config);
    res = in->trackThread.initial();
    if(res!=0)return false;
    return true;
bool PL_SensetimeFaceTrackMultiTrd::init(void *args)
{
   PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
   in->reset();
   SensetimeFaceTrackConfig *config = (SensetimeFaceTrackConfig *) args;
   if (config->point_size == 21)
      config->point_size_config = CV_DETECT_ENABLE_ALIGN_21;
   else if (config->point_size == 106)
      config->point_size_config = CV_DETECT_ENABLE_ALIGN_106;
   else
   {
      LOG_ERROR << "alignment point size must be 21 or 106" << LOG_ENDL;
      return false;
   }
   int res = in->trackThread.initial_license("/data/license.lic");
   if (res != 0)return false;
   in->trackThread.set_config(*config);
   res = in->trackThread.initial();
   if (res != 0)return false;
   return true;
}
void PL_SensetimeFaceTrackMultiTrd::finit() {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
void PL_SensetimeFaceTrackMultiTrd::finit()
{
   PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
}
int doFaceTrack(PL_SensetimeFaceTrackMultiTrd_Internal *in,
                uint8_t *buffer, size_t width, size_t height, size_t stride,
                cv_pixel_format cvPixFmt) {
    PipeLineElemTimingDebugger td(nullptr);
    in->trackThread.do_face_track(buffer, cvPixFmt, width, height, stride);
    return 0;
            uint8_t *buffer, size_t width, size_t height, size_t stride,
            cv_pixel_format cvPixFmt)
{
   PipeLineElemTimingDebugger td(nullptr);
   in->trackThread.do_face_track(buffer, cvPixFmt, width, height, stride);
   return 0;
}
/*static*/ bool
PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV(const PipeMaterial *pm, void *args) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) args;
/*static*/ bool PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV(const PipeMaterial *pm, void *args)
{
   PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) args;
    if (pm->type != PipeMaterial::PMT_FRAME) {
        LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
        return false;
    }
   if (pm->type != PipeMaterial::PMT_FRAME)
   {
      LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
      return false;
   }
    if (pm->buffer == nullptr)
        return false;
   if (pm->buffer == nullptr)
      return false;
    MB_Frame *frame = (MB_Frame *) pm->buffer;
    if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12) {
        LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
        return false;
    }
   MB_Frame *frame = (MB_Frame *) pm->buffer;
   if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
   {
      LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
      return false;
   }
    int res = 0;
    if (frame->type == MB_Frame::MBFT_YUV420)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height,
                                 frame->width, CV_PIX_FMT_YUV420P);
    else if (frame->type == MB_Frame::MBFT_NV12)
        res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height,
                                 frame->width, CV_PIX_FMT_NV12);
   int res = 0;
   if (frame->type == MB_Frame::MBFT_YUV420)
      res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
   else if (frame->type == MB_Frame::MBFT_NV12)
      res = doFaceTrack(in, (uint8_t *) frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12);
    if (res < 0) {
        in->payError = true;
        return false;
    } else
        in->payError = false;
   if (res < 0)
   {
      in->payError = true;
      return false;
   }
   else
      in->payError = false;
    //in->buffer readly
   //in->buffer readly
    in->lastFrame.type = frame->type;
    in->lastFrame.buffer = frame->buffer;//#todo should copy
    in->lastFrame.buffSize = frame->buffSize;
    in->lastFrame.width = frame->width;
    in->lastFrame.height = frame->height;
    in->lastFrame.pts = frame->pts;
   in->lastFrame.type = frame->type;
   in->lastFrame.buffer = frame->buffer;//#todo should copy
   in->lastFrame.buffSize = frame->buffSize;
   in->lastFrame.width = frame->width;
   in->lastFrame.height = frame->height;
   in->lastFrame.pts = frame->pts;
    return false;
   return false;
}
bool PL_SensetimeFaceTrackMultiTrd::pay(const PipeMaterial &pm) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    //LOG_ERROR << "PL_SensetimeFaceTrackMultiTrd pay" << LOG_ENDL;
    in->payError = true;
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12,
                  PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
bool PL_SensetimeFaceTrackMultiTrd::pay(const PipeMaterial &pm)
{
   PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
   //LOG_ERROR << "PL_SensetimeFaceTrackMultiTrd pay" << LOG_ENDL;
   in->payError = true;
   if (in->payError)
      pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
   if (in->payError)
      pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
   if (in->payError)
      pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
   if (in->payError)
      pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrackMultiTrd::pay_breaker_MBFT_YUV, in);
    in->frameCount++;
    return !(in->payError);
   in->frameCount++;
   return !(in->payError);
}
bool PL_SensetimeFaceTrackMultiTrd::gain(PipeMaterial &pm) {
    PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
    in->trackThread.get_face_features(in->faceFeatures);
    if (in->payError) {
        pm.former = this;
        return false;
    }
bool PL_SensetimeFaceTrackMultiTrd::gain(PipeMaterial &pm)
{
   PL_SensetimeFaceTrackMultiTrd_Internal *in = (PL_SensetimeFaceTrackMultiTrd_Internal *) internal;
   in->trackThread.get_face_features(in->faceFeatures);
   if (in->payError)
   {
      pm.former = this;
      return false;
   }
    if (!in->trackThread.get_config()->generate_face_feature) {
        pm.type = PipeMaterial::PMT_FRAME;
        pm.buffer = &(in->lastFrame);
        pm.buffSize = 0;
    } else {
        in->pmList[0].type = PipeMaterial::PMT_FRAME;
        in->pmList[0].buffer = &(in->lastFrame);
        in->pmList[0].buffSize = 0;
        in->pmList[0].former = this;
   if (!in->trackThread.get_config()->generate_face_feature)
   {
      pm.type = PipeMaterial::PMT_FRAME;
      pm.buffer = &(in->lastFrame);
      pm.buffSize = 0;
   }
   else
   {
      in->pmList[0].type = PipeMaterial::PMT_FRAME;
      in->pmList[0].buffer = &(in->lastFrame);
      in->pmList[0].buffSize = 0;
      in->pmList[0].former = this;
        in->pmList[1].type = PipeMaterial::PMT_PTR;
        in->pmList[1].buffer = &(in->faceFeatures);
        in->pmList[1].buffSize = 0;
        in->pmList[1].former = this;
      in->pmList[1].type = PipeMaterial::PMT_PTR;
      in->pmList[1].buffer = &(in->faceFeatures);
      in->pmList[1].buffSize = 0;
      in->pmList[1].former = this;
        pm.type = PipeMaterial::PMT_PM_LIST;
        pm.buffer = in->pmList;
        pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
    }
      pm.type = PipeMaterial::PMT_PM_LIST;
      pm.buffer = in->pmList;
      pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
   }
    pm.former = this;
    return true;
}
void *PL_SensetimeFaceTrackMultiTrd::pay_thd(void *arg) {
   pm.former = this;
   return true;
}