From 5e9814a090f20c2b9c39d6efdc9a732b6097ee7d Mon Sep 17 00:00:00 2001 From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674> Date: 星期一, 24 七月 2017 20:12:53 +0800 Subject: [PATCH] aaa --- RtspFace/PL_SensetimeFaceTrack.cpp | 140 ++++++++++++++++++++++++++-------------------- 1 files changed, 80 insertions(+), 60 deletions(-) diff --git a/RtspFace/PL_SensetimeFaceTrack.cpp b/RtspFace/PL_SensetimeFaceTrack.cpp index 8bb4c37..6335e63 100644 --- a/RtspFace/PL_SensetimeFaceTrack.cpp +++ b/RtspFace/PL_SensetimeFaceTrack.cpp @@ -20,40 +20,40 @@ st_ff_vect_t faceFeatures; bool payError; - + cv_handle_t handle_track; - + size_t frameCount; - - PL_SensetimeFaceTrack_Internal() : - //buffSize(0), buffSizeMax(sizeof(buffer)), - lastFrame(), pmList(), config(), faceFeatures(), payError(true), - handle_track(nullptr), - frameCount(0) + + PL_SensetimeFaceTrack_Internal() : + //buffSize(0), buffSizeMax(sizeof(buffer)), + lastFrame(), pmList(), config(), faceFeatures(), payError(true), + handle_track(nullptr), + frameCount(0) { } - + ~PL_SensetimeFaceTrack_Internal() { } - + void reset() { //buffSize = 0; payError = true; - + MB_Frame _lastFrame; lastFrame = _lastFrame; - + PipeMaterial _pm; pmList[0] = _pm; pmList[1] = _pm; - + SensetimeFaceTrackConfig _config; config = _config; - + handle_track = nullptr; - + frameCount = 0; } }; @@ -77,7 +77,7 @@ { PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal; in->reset(); - + #ifdef __ANDROID__ { bool retLic = false; @@ -123,7 +123,7 @@ } } #endif - + SensetimeFaceTrackConfig* config = (SensetimeFaceTrackConfig*)args; in->config = *config; if (in->config.point_size == 21) @@ -145,8 +145,8 @@ // per detect in RK3288: 800ms@1920w,200ms@640w; with CV_FACE_TRACKING_TWO_THREAD 10ms@1920w // init handle - cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr, - in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX + cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr, + in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX if (cv_result != CV_OK) { LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << LOG_ENDL; @@ -162,14 +162,14 @@ } else LOG_ERROR << "detect face count limit : " << val << LOG_ENDL; - + return true; } void PL_SensetimeFaceTrack::finit() { PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal; - + // destroy track handle cv_face_destroy_tracker(in->handle_track); in->handle_track = nullptr; @@ -189,27 +189,26 @@ << p_face[i].rect.bottom << "\t"; dumpfile << p_face[i].score << "\t" - << p_face[i].points_count << "\t" - << p_face[i].yaw << "\t" - << p_face[i].pitch << "\t" - << p_face[i].roll << "\t" - << p_face[i].eye_dist << "\t" - << p_face[i].ID << "\t"; + << p_face[i].points_count << "\t" + << p_face[i].yaw << "\t" + << p_face[i].pitch << "\t" + << p_face[i].roll << "\t" + << p_face[i].eye_dist << "\t" + << p_face[i].ID << "\t"; cv_pointf_t points_array[256]; for (int j = 0; j < p_face[i].points_count; j++) { dumpfile << p_face[i].points_array[j].x << "\t" - << p_face[i].points_array[j].y << "\t"; + << p_face[i].points_array[j].y << "\t"; } dumpfile << std::endl; } } -int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, - uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt) +int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, MB_Frame* frame) { //PipeLineElemTimingDebugger td(nullptr); @@ -218,16 +217,39 @@ if (in->frameCount % in->config.doTrackPerFrame != 0) return 0; + //if (true) + //{ + // struct timeval now; + // gettimeofday(&now, nullptr); + // const int fps = 20; + // const int ft = 1000 / fps; // ms + // if (now.tv_usec - frame->pts.tv_usec > 0.5 * ft * 1000) + // return 0; + //} + //resize(bgr_frame, bgr_frame, Size(frame_width, frame_height), 0, 0, INTER_LINEAR); + + uint8_t* buffer = (uint8_t*)frame->buffer; + const size_t width = frame->width; + const size_t height = frame->height; + const size_t stride = frame->width; + cv_pixel_format cvPixFmt; + if (frame->type == MB_Frame::MBFT_YUV420) + cvPixFmt = CV_PIX_FMT_YUV420P; + else if (frame->type == MB_Frame::MBFT_NV12) + cvPixFmt = CV_PIX_FMT_NV12; + else + return -1; int face_count = 0; cv_result_t cv_result = CV_OK; cv_face_t* p_face = nullptr; - + + //#test + //cvPixFmt = CV_PIX_FMT_GRAY8; + // realtime track - cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt, - width, height, stride, - CV_FACE_UP, &p_face, &face_count); + cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt, width, height, stride, CV_FACE_UP, &p_face, &face_count); if (cv_result != CV_OK) { LOG_ERROR << "cv_face_track failed, error : " << cv_result << LOG_ENDL; @@ -263,27 +285,27 @@ faceFeature.eyeDistance = p_face[i].eye_dist; LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i, - p_face[i].rect.left, p_face[i].rect.top, - p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID); - + p_face[i].rect.left, p_face[i].rect.top, + p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID); + LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]", - p_face[i].yaw, - p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist); + p_face[i].yaw, + p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist); #ifdef USE_OPENCV if (in->config.draw_face_rect) { cv::Scalar scalar_color = CV_RGB(p_face[i].ID * 53 % 256, - p_face[i].ID * 93 % 256, - p_face[i].ID * 143 % 256); - + p_face[i].ID * 93 % 256, + p_face[i].ID * 143 % 256); + //cv::rectangle(yMat, cv::Point2f(0, 0), cv::Point2f(50, 50), scalar_color, 2); //cv::rectangle(yMat, cv::Point2f(500, 500), cv::Point2f(550, 550), scalar_color, 2); - + cv::rectangle(yMat, cv::Point2f(static_cast<float>(p_face[i].rect.left), - static_cast<float>(p_face[i].rect.top)), - cv::Point2f(static_cast<float>(p_face[i].rect.right), - static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2); + static_cast<float>(p_face[i].rect.top)), + cv::Point2f(static_cast<float>(p_face[i].rect.right), + static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2); } #endif @@ -320,7 +342,7 @@ } if (faceFeature.rect.leftTop.X < 0 || faceFeature.rect.rightBottom.X > width || - faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height) + faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height) faceFeature.outOfFrame = true; if (in->config.generate_face_feature) @@ -397,7 +419,7 @@ // faceFeature.rect.rightBottom.x = 50+128; // faceFeature.rect.rightBottom.y = 50+128; // in->faceFeatures.push_back(faceFeature); - // + // // faceFeature.rect.leftTop.x = 300; // faceFeature.rect.leftTop.y = 400; // faceFeature.rect.rightBottom.x = 300+50; @@ -418,10 +440,10 @@ LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL; return false; } - + if (pm->buffer == nullptr) return false; - + MB_Frame* frame = (MB_Frame*)pm->buffer; if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12) { @@ -431,10 +453,8 @@ in->faceFeatures.clear(); int face_count = 0; - if (frame->type == MB_Frame::MBFT_YUV420) - face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P); - else if (frame->type == MB_Frame::MBFT_NV12) - face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12); + if (frame->type == MB_Frame::MBFT_YUV420 || frame->type == MB_Frame::MBFT_NV12) + face_count = doFaceTrack(in, frame); if (face_count < 0) { @@ -443,7 +463,7 @@ } else in->payError = false; - + //in->buffer readly in->lastFrame.type = frame->type; @@ -452,7 +472,7 @@ in->lastFrame.width = frame->width; in->lastFrame.height = frame->height; in->lastFrame.pts = frame->pts; - + return false; } @@ -472,7 +492,7 @@ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in); in->frameCount++; - + return !(in->payError); } @@ -498,17 +518,17 @@ in->pmList[0].buffer = &(in->lastFrame); in->pmList[0].buffSize = 0; in->pmList[0].former = this; - + in->pmList[1].type = PipeMaterial::PMT_PTR; in->pmList[1].buffer = &(in->faceFeatures); in->pmList[1].buffSize = 0; in->pmList[1].former = this; - + pm.type = PipeMaterial::PMT_PM_LIST; pm.buffer = in->pmList; pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial); } - + pm.former = this; return true; } -- Gitblit v1.8.0