xuxiuxi
2017-05-11 109ffe9a777658936a38d0c146579a67c60a0d17
RtspFace/PL_SensetimeFaceTrack.cpp
@@ -11,7 +11,9 @@
   //size_t buffSize;
   //size_t buffSizeMax;
   MB_Frame lastFrame;
   PipeMaterial pmList[2];
   SensetimeFaceTrackConfig config;
   st_ff_vect_t faceFeatures;
   bool payError;
   
@@ -19,7 +21,7 @@
   
   PL_SensetimeFaceTrack_Internal() : 
      //buffSize(0), buffSizeMax(sizeof(buffer)), 
      lastFrame(), config(), payError(true),
      lastFrame(), pmList(), config(), faceFeatures(), payError(true),
      handle_track(nullptr)
   {
   }
@@ -35,6 +37,11 @@
      
      MB_Frame _lastFrame;
      lastFrame = _lastFrame;
      PipeMaterial _pm;
      pmList[0] = _pm;
      pmList[1] = _pm;
      SensetimeFaceTrackConfig _config;
      config = _config;
      
@@ -70,7 +77,7 @@
      in->config.point_size_config = CV_DETECT_ENABLE_ALIGN_106;
   else
   {
      LOG_ERROR << "alignment point size must be 21 or 106";
      LOG_ERROR << "alignment point size must be 21 or 106" << std::endl;
      return false;
   }
@@ -79,7 +86,7 @@
                        in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD);
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result;
      LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << std::endl;
      return false;
   }
@@ -87,11 +94,11 @@
   cv_result = cv_face_track_set_detect_face_cnt_limit(in->handle_track, in->config.detect_face_cnt_limit, &val);
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_track_set_detect_face_cnt_limit failed, error : " << cv_result;
      LOG_ERROR << "cv_face_track_set_detect_face_cnt_limit failed, error : " << cv_result << std::endl;
      return false;
   }
   else
      LOG_ERROR << "detect face count limit : " << val;
      LOG_ERROR << "detect face count limit : " << val << std::endl;
   
   return true;
}
@@ -120,7 +127,7 @@
                     CV_FACE_UP, &p_face, &face_count);
   if (cv_result != CV_OK)
   {
      LOG_ERROR << "cv_face_track failed, error : " << cv_result;
      LOG_ERROR << "cv_face_track failed, error : " << cv_result << std::endl;
      cv_face_release_tracker_result(p_face, face_count);
      return -1;
   }
@@ -130,6 +137,17 @@
   cv::Mat yMat(cv::Size(width,height), CV_8UC1, buffer);
   for (int i = 0; i < face_count; i++)
   {
      SensetimeFaceFeature faceFeature;
      faceFeature.rect.leftTop.x = p_face[i].rect.left;
      faceFeature.rect.leftTop.y = p_face[i].rect.top;
      faceFeature.rect.rightBottom.x = p_face[i].rect.right;
      faceFeature.rect.rightBottom.y = p_face[i].rect.bottom;
      faceFeature.id = p_face[i].ID;
      faceFeature.yaw = p_face[i].yaw;
      faceFeature.pitch = p_face[i].pitch;
      faceFeature.roll = p_face[i].roll;
      faceFeature.eyeDistance = p_face[i].eye_dist;
      LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
         p_face[i].rect.left, p_face[i].rect.top,
         p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
@@ -156,7 +174,10 @@
      for (int j = 0; j < p_face[i].points_count; j++)
      {
         FacePoint featurePoint;
         featurePoint.x = p_face[i].points_array[j].x;
         featurePoint.y = p_face[i].points_array[j].y;
         faceFeature.featurePoints.push_back(featurePoint);
         
         if (in->config.draw_face_feature_point)
         {
@@ -164,6 +185,9 @@
               p_face[i].points_array[j].y), 1, cv::Scalar(255, 255, 255));
         }
      }
      if (in->config.generate_face_feature)
         in->faceFeatures.push_back(faceFeature);
   }
   //if (face_count > 0)
@@ -189,7 +213,7 @@
   if (pm.type != PipeMaterial::PMT_FRAME)
   {
      LOG_ERROR << "PL_H264Encoder::pay only support PMT_FRAME";
      LOG_ERROR << "Only support PMT_FRAME" << std::endl;
      return false;
   }
   
@@ -199,10 +223,11 @@
   MB_Frame* frame = (MB_Frame*)pm.buffer;
   if (frame->type != MB_Frame::MBFT_YUV420)
   {
      LOG_ERROR << "PL_H264Encoder::pay only support MBFT_YUV420";
      LOG_ERROR << "Only support MBFT_YUV420" << std::endl;
      return false;
   }
   in->faceFeatures.clear();
   int face_count = doFaceTrack(
                  in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
   if (face_count < 0)
@@ -229,13 +254,35 @@
{
   PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
   if (!in->payError)
   if (in->payError)
   {
      pm.former = this;
      return false;
   }
   if (!in->config.generate_face_feature)
   {
      pm.type = PipeMaterial::PMT_FRAME;
      pm.buffer = &(in->lastFrame);
      pm.buffSize = 0;
      pm.former = this;
   }
   else
   {
      in->pmList[0].type = PipeMaterial::PMT_FRAME;
      in->pmList[0].buffer = &(in->lastFrame);
      in->pmList[0].buffSize = 0;
      in->pmList[0].former = this;
      in->pmList[1].type = PipeMaterial::PMT_BYTES;
      in->pmList[1].buffer = &(in->faceFeatures);
      in->pmList[1].buffSize = 0;
      in->pmList[1].former = this;
      pm.type = PipeMaterial::PMT_PM_LIST;
      pm.buffer = in->pmList;
      pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
   }
   pm.former = this;
   return !in->payError;
   return true;
}