houxiao
2017-07-13 b022b91c0c6fa807424b6c12cc92ac5946838083
RtspFace/PL_SensetimeFaceTrack.cpp
@@ -1,8 +1,12 @@
#include "PL_SensetimeFaceTrack.h"
#include "MaterialBuffer.h"
#include "logger.h"
#include "MediaHelper.h"
#ifdef USE_OPENCV
#include <opencv2/opencv.hpp>
#endif
#include <cv_face.h>
struct PL_SensetimeFaceTrack_Internal
@@ -19,10 +23,13 @@
   
   cv_handle_t handle_track;
   
   size_t frameCount;
   PL_SensetimeFaceTrack_Internal() : 
      //buffSize(0), buffSizeMax(sizeof(buffer)), 
      lastFrame(), pmList(), config(), faceFeatures(), payError(true), 
      handle_track(nullptr)
      handle_track(nullptr),
      frameCount(0)
   {
   }
   
@@ -46,6 +53,8 @@
      config = _config;
      
      handle_track = nullptr;
      frameCount = 0;
   }
};
@@ -69,6 +78,52 @@
   PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
   in->reset();
   
#ifdef __ANDROID__
   {
      bool retLic = false;
        if (in->config.license_str.empty())
        {
            if (in->config.license_file_path.empty())
                in->config.license_file_path = "/data/license.lic";
            FILE * licFile = fopen (in->config.license_file_path.c_str(),"rb");
            if (licFile != nullptr)
            {
                char licBuffer[1025 * 5] = {'\0'};
                size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
                fclose (licFile);
                if (licSize > 0)
                {
                    int ret = cv_face_init_license_config(licBuffer);
                    LOG_INFO << "cv_face_init_license_config 1 ret=" << ret << LOG_ENDL;
                    retLic = true;
                }
            }
            else
            {
                LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
            }
        }
        else
        {
            int ret = cv_face_init_license_config(in->config.license_str.c_str());
            LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
            retLic = true;
        }
        //int ret = cv_face_init_license_config(_lic);
        //LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
        //retLic = true;
      if (!retLic)
        {
            LOG_WARN << "stface for android no license" << LOG_ENDL;
            return false;
        }
   }
#endif
   SensetimeFaceTrackConfig* config = (SensetimeFaceTrackConfig*)args;
   in->config = *config;
   if (in->config.point_size == 21)
@@ -77,16 +132,24 @@
      in->config.point_size_config = CV_DETECT_ENABLE_ALIGN_106;
   else
   {
      LOG_ERROR << "alignment point size must be 21 or 106" << std::endl;
      LOG_ERROR << "alignment point size must be 21 or 106" << LOG_ENDL;
      return false;
   }
   //#test
   //in->config.point_size_config = CV_DETECT_ENABLE_ALIGN_106;
   //in->config.generate_face_point = true;
   //in->config.draw_face_feature_point = true;
    // if not use CV_FACE_TRACKING_TWO_THREAD, stfacesdk detect face per 20 frame and light flow tracking interval
    // per detect in RK3288: 800ms@1920w,200ms@640w; with CV_FACE_TRACKING_TWO_THREAD 10ms@1920w
   // init handle
   cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr, 
                        in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD);
                        in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << std::endl;
      LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << LOG_ENDL;
      return false;
   }
@@ -94,11 +157,11 @@
   cv_result = cv_face_track_set_detect_face_cnt_limit(in->handle_track, in->config.detect_face_cnt_limit, &val);
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_track_set_detect_face_cnt_limit failed, error : " << cv_result << std::endl;
      LOG_ERROR << "cv_face_track_set_detect_face_cnt_limit failed, error : " << cv_result << LOG_ENDL;
      return false;
   }
   else
      LOG_ERROR << "detect face count limit : " << val << std::endl;
      LOG_ERROR << "detect face count limit : " << val << LOG_ENDL;
   
   return true;
}
@@ -112,9 +175,47 @@
   in->handle_track = nullptr;
}
static void test_dump_feature(cv_face_t* p_face, int face_count)
{
    static std::fstream dumpfile("/data/temp/dump-106-photo2.txt", std::ios_base::out | std::ios_base::trunc);
    for (int i = 0; i < face_count; i++)
    {
        dumpfile << p_face[i].rect.left << "\t"
                 << p_face[i].rect.top << "\t"
                 << p_face[i].rect.right << "\t"
                 << p_face[i].rect.bottom << "\t";
        dumpfile << p_face[i].score << "\t"
                << p_face[i].points_count << "\t"
                << p_face[i].yaw << "\t"
                << p_face[i].pitch << "\t"
                << p_face[i].roll << "\t"
                << p_face[i].eye_dist << "\t"
                << p_face[i].ID << "\t";
        cv_pointf_t points_array[256];
        for (int j = 0; j < p_face[i].points_count; j++)
        {
            dumpfile << p_face[i].points_array[j].x << "\t"
                    << p_face[i].points_array[j].y << "\t";
        }
        dumpfile << std::endl;
    }
}
int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, 
            uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt)
{
    //PipeLineElemTimingDebugger td(nullptr);
    if (in->config.doTrackPerFrame == 0)
      return 0;
   if (in->frameCount % in->config.doTrackPerFrame != 0)
      return 0;
   //resize(bgr_frame, bgr_frame, Size(frame_width, frame_height), 0, 0, INTER_LINEAR);
   int face_count = 0;
@@ -127,27 +228,38 @@
                     CV_FACE_UP, &p_face, &face_count);
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_track failed, error : " << cv_result << std::endl;
      LOG_ERROR << "cv_face_track failed, error : " << cv_result << LOG_ENDL;
      cv_face_release_tracker_result(p_face, face_count);
      return -1;
   }
    //#test
    //test_dump_feature(p_face, face_count);
#ifdef USE_OPENCV
   // draw the video
   //cv::Mat yuvMat(cv::Size(width,height), CV_8UC3, buffer);
   cv::Mat yMat(cv::Size(width,height), CV_8UC1, buffer);
#endif
   for (int i = 0; i < face_count; i++)
   {
      if (MH_F_LT(p_face[i].score, in->config.score_min))
      {
         continue;
      }
      SensetimeFaceFeature faceFeature;
      faceFeature.rect.leftTop.x = p_face[i].rect.left;
      faceFeature.rect.leftTop.y = p_face[i].rect.top;
      faceFeature.rect.rightBottom.x = p_face[i].rect.right;
      faceFeature.rect.rightBottom.y = p_face[i].rect.bottom;
      faceFeature.rect.leftTop.X = p_face[i].rect.left;
      faceFeature.rect.leftTop.Y = p_face[i].rect.top;
      faceFeature.rect.rightBottom.X = p_face[i].rect.right;
      faceFeature.rect.rightBottom.Y = p_face[i].rect.bottom;
      faceFeature.id = p_face[i].ID;
      faceFeature.score = p_face[i].score;
      faceFeature.yaw = p_face[i].yaw;
      faceFeature.pitch = p_face[i].pitch;
      faceFeature.roll = p_face[i].roll;
      faceFeature.eyeDistance = p_face[i].eye_dist;
      LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
         p_face[i].rect.left, p_face[i].rect.top,
         p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
@@ -156,6 +268,7 @@
         p_face[i].yaw,
         p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
#ifdef USE_OPENCV
      if (in->config.draw_face_rect)
      {
         cv::Scalar scalar_color = CV_RGB(p_face[i].ID * 53 % 256,
@@ -170,66 +283,157 @@
            cv::Point2f(static_cast<float>(p_face[i].rect.right),
            static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
      }
#endif
        if (in->config.generate_face_point)
        {
            for (int j = 0; j < p_face[i].points_count; j++)
            {
                PLGH_Point featurePoint;
                featurePoint.X = p_face[i].points_array[j].x;
                featurePoint.Y = p_face[i].points_array[j].y;
                faceFeature.featurePoints.points.push_back(featurePoint);
      for (int j = 0; j < p_face[i].points_count; j++)
      {
         FacePoint featurePoint;
         featurePoint.x = p_face[i].points_array[j].x;
         featurePoint.y = p_face[i].points_array[j].y;
         faceFeature.featurePoints.push_back(featurePoint);
         if (in->config.draw_face_feature_point)
         {
            cv::circle(yMat, cv::Point2f(p_face[i].points_array[j].x,
               p_face[i].points_array[j].y), 1, cv::Scalar(255, 255, 255));
         }
      }
#ifdef USE_OPENCV
                if (in->config.draw_face_feature_point)
                {
                    cv::circle(yMat, cv::Point2f(p_face[i].points_array[j].x, p_face[i].points_array[j].y), 1, cv::Scalar(255, 255, 255));
                }
#endif
            }
            //int p46x = p_face[i].points_array[46].x;
            //int p46y = p_face[i].points_array[46].y;
            //int p6x = p_face[i].points_array[6].x;
            //int p6y = p_face[i].points_array[6].y;
            //double dist1 = std::sqrt((p46x-p6x)*(p46x-p6x)+(p46y-p6y)*(p46y-p6y));
            //int p43x = p_face[i].points_array[43].x;
            //int p43y = p_face[i].points_array[43].y;
            //int p2x = p_face[i].points_array[2].x;
            //int p2y = p_face[i].points_array[2].y;
            //double dist2 = std::sqrt((p43x-p2x)*(p43x-p2x)+(p43y-p2y)*(p43y-p2y));
            //LOGP(ERROR, "dist46_6/dist43_2=%f", dist1 / dist2);
        }
        if (faceFeature.rect.leftTop.X < 0 || faceFeature.rect.rightBottom.X > width ||
                faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height)
            faceFeature.outOfFrame = true;
      if (in->config.generate_face_feature)
         in->faceFeatures.push_back(faceFeature);
      {
         if (in->config.evenWidthHeight)
         {
            if (faceFeature.rect.leftTop.X % 2 != 0) faceFeature.rect.leftTop.X--;
            if (faceFeature.rect.leftTop.Y % 2 != 0) faceFeature.rect.leftTop.Y--;
            if (faceFeature.rect.rightBottom.X % 2 != 0) faceFeature.rect.rightBottom.X--;
            if (faceFeature.rect.rightBottom.Y % 2 != 0) faceFeature.rect.rightBottom.Y--;
         }
         // explode the range
         if (in->config.explode_feature_rect_x != 0)
         {
            faceFeature.rect.leftTop.X =     clamp(faceFeature.rect.leftTop.X - in->config.explode_feature_rect_x, 0,  faceFeature.rect.leftTop.X);
            faceFeature.rect.rightBottom.X = clamp(faceFeature.rect.rightBottom.X + in->config.explode_feature_rect_x, faceFeature.rect.rightBottom.X, int(width - 1));
         }
         if (in->config.explode_feature_rect_y != 0)
         {
            faceFeature.rect.leftTop.Y = clamp(faceFeature.rect.leftTop.Y - in->config.explode_feature_rect_y, 0, faceFeature.rect.leftTop.Y);
            faceFeature.rect.rightBottom.Y = clamp(faceFeature.rect.rightBottom.Y + in->config.explode_feature_rect_y, faceFeature.rect.rightBottom.Y, int(height - 1));
         }
            //f (in->config.clamp_feature_rect)
            //
            //   int dW = width - faceFeature.rect.width();
            //   if (dW < 0)
            //   {
            //       dW = std::abs(dW) / 2 + 1;
            //       faceFeature.rect.leftTop.x += dW;
            //       faceFeature.rect.rightBottom.x -= dW;
            //       faceFeature.rectClamp = true;
            //   }
            //   int dH = height - faceFeature.rect.height();
            //   if (dH < 0)
            //   {
            //       dH = std::abs(dH) / 2 + 1;
            //       faceFeature.rect.leftTop.y += dH;
            //       faceFeature.rect.rightBottom.y -= dH;
            //       faceFeature.rectClamp = true;
            //   }
            //
            in->faceFeatures.push_back(faceFeature);
        }
   }
   //if (face_count > 0)
   //{
   //   static size_t f=0;
   //   char fname[50];
   //   sprintf(fname, "face-%u.yuv420", ++f);
   //   sprintf(fname, "/sdcard/face-%u.nv12", ++f);
   //   FILE * pFile = fopen (fname,"wb");
   //   fwrite (yuvMat.data , sizeof(char), 1920*1080*1.5, pFile);
   //   fwrite (yMat.data , sizeof(char), 1920*1080*1.5, pFile);
   //   printf("write face file %s\n", fname);
   //   fclose(pFile);
    //  if (f>20)exit(0);
   //}
   // release the memory of face
   cv_face_release_tracker_result(p_face, face_count);
    //#debug
    //if (face_count == 0)
    //{
    //    face_count = 2;
    //
    //    SensetimeFaceFeature faceFeature;
    //    faceFeature.rect.leftTop.x = 50;
    //    faceFeature.rect.leftTop.y = 50;
    //    faceFeature.rect.rightBottom.x = 50+128;
    //    faceFeature.rect.rightBottom.y = 50+128;
    //    in->faceFeatures.push_back(faceFeature);
   //
   //   faceFeature.rect.leftTop.x = 300;
    //    faceFeature.rect.leftTop.y = 400;
    //    faceFeature.rect.rightBottom.x = 300+50;
    //    faceFeature.rect.rightBottom.y = 400+60;
    //    in->faceFeatures.push_back(faceFeature);
   //   LOG_WARN << "PL_SensetimeFaceTrack doFaceTrack add test data" << LOG_ENDL;
    //}
   return face_count;
}
bool PL_SensetimeFaceTrack::pay(const PipeMaterial& pm)
/*static*/ bool PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV(const PipeMaterial* pm, void* args)
{
   PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
   PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)args;
   if (pm.type != PipeMaterial::PMT_FRAME)
   if (pm->type != PipeMaterial::PMT_FRAME)
   {
      LOG_ERROR << "Only support PMT_FRAME" << std::endl;
      LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
      return false;
   }
   
   if (pm.buffer == nullptr)
   if (pm->buffer == nullptr)
      return false;
   
   MB_Frame* frame = (MB_Frame*)pm.buffer;
   if (frame->type != MB_Frame::MBFT_YUV420)
   MB_Frame* frame = (MB_Frame*)pm->buffer;
   if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
   {
      LOG_ERROR << "Only support MBFT_YUV420" << std::endl;
      LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
      return false;
   }
   in->faceFeatures.clear();
   int face_count = doFaceTrack(
                  in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
   int face_count = 0;
   if (frame->type == MB_Frame::MBFT_YUV420)
      face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
   else if (frame->type == MB_Frame::MBFT_NV12)
      face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12);
   if (face_count < 0)
   {
      in->payError = true;
@@ -240,14 +444,34 @@
   
   //in->buffer readly
   in->lastFrame.type = MB_Frame::MBFT_YUV420;
   in->lastFrame.type = frame->type;
   in->lastFrame.buffer = frame->buffer;//#todo should copy
   in->lastFrame.buffSize = frame->buffSize;
   in->lastFrame.width = frame->width;
   in->lastFrame.height = frame->height;
   in->lastFrame.pts = frame->pts;
   return false;
}
   return true;
bool PL_SensetimeFaceTrack::pay(const PipeMaterial& pm)
{
   PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
   //LOG_ERROR << "PL_SensetimeFaceTrack pay" << LOG_ENDL;
   in->payError = true;
    if (in->payError)
       pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
    if (in->payError)
        pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
   in->frameCount++;
   return !(in->payError);
}
bool PL_SensetimeFaceTrack::gain(PipeMaterial& pm)
@@ -273,7 +497,7 @@
      in->pmList[0].buffSize = 0;
      in->pmList[0].former = this;
      
      in->pmList[1].type = PipeMaterial::PMT_BYTES;
      in->pmList[1].type = PipeMaterial::PMT_PTR;
      in->pmList[1].buffer = &(in->faceFeatures);
      in->pmList[1].buffSize = 0;
      in->pmList[1].former = this;