From 035ea5433e1e5a7688c5ca7cb229aaded0626775 Mon Sep 17 00:00:00 2001
From: pans <pans@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期三, 28 十二月 2016 09:52:32 +0800
Subject: [PATCH]
---
RtspFace/demo/faceAPI.cpp | 2
RtspFace/demo/新建文本文档.txt | 1
RtspFace/demo/sample_face_track.cpp | 118 +++++++++++++++++++++++++++++++++++++++
3 files changed, 120 insertions(+), 1 deletions(-)
diff --git a/RtspFace/demo/faceAPI.cpp b/RtspFace/demo/faceAPI.cpp
index 128c124..c49a42c 100644
--- a/RtspFace/demo/faceAPI.cpp
+++ b/RtspFace/demo/faceAPI.cpp
@@ -1,7 +1,7 @@
#include "faceAPI.h"
#include "faceDB.h"
-faceDB fdb = faceDB::faceDB();
+faceDB fdb = faceDB();
faceAPI::faceAPI(){
//创建静态人脸识别句柄,同时创建状态标量
cv_result_t cv_result = 0;
diff --git a/RtspFace/demo/sample_face_track.cpp b/RtspFace/demo/sample_face_track.cpp
new file mode 100644
index 0000000..69781fe
--- /dev/null
+++ b/RtspFace/demo/sample_face_track.cpp
@@ -0,0 +1,118 @@
+#include <vector>
+#include <stdio.h>
+#include <cv_face.h>
+
+#include <opencv2/opencv.hpp>
+
+using namespace std;
+using namespace cv;
+
+int main(int argc, char *argv[]) {
+ if (argc < 2) {
+ fprintf(stderr, "test_sample_face_track [alignment point size(21 or 106)] [detect face cont limit]\n");
+ fprintf(stderr, "for example: \"test_sample_face_track 21 1\"\n");
+ return -1;
+ }
+
+ VideoCapture capture;
+ capture.open(0); // open the camera
+ if (!capture.isOpened()) {
+ fprintf(stderr, "can not open camera!\n");
+ return -1;
+ }
+ namedWindow("TrackingTest");
+ int frame_width = capture.get(CV_CAP_PROP_FRAME_WIDTH);
+ int frame_height = capture.get(CV_CAP_PROP_FRAME_HEIGHT);
+
+ int point_size = atoi(argv[1]);
+ int config;
+ if (point_size == 21) {
+ config = CV_DETECT_ENABLE_ALIGN_21;
+ }
+ else if (point_size == 106) {
+ config = CV_DETECT_ENABLE_ALIGN_106;
+ }
+ else {
+ fprintf(stderr, "alignment point size must be 21 or 106\n");
+ return -1;
+ }
+
+ cv_handle_t handle_track = NULL;
+ cv_result_t cv_result = CV_OK;
+ do {
+ // init handle
+ cv_result = cv_face_create_tracker(&handle_track, NULL, config | CV_FACE_TRACKING_TWO_THREAD);
+ if (cv_result != CV_OK) {
+ fprintf(stderr, "cv_face_create_tracker failed, error code %d\n", cv_result);
+ break;
+ }
+
+ if (argc == 3) {
+ int detect_face_cnt_limit = atoi(argv[2]);
+ if (detect_face_cnt_limit < -1) {
+ detect_face_cnt_limit = -1;
+ }
+ int val = 0;
+ cv_result = cv_face_track_set_detect_face_cnt_limit(handle_track, detect_face_cnt_limit, &val);
+ if (cv_result != CV_OK) {
+ fprintf(stderr, "cv_face_track_set_detect_face_cnt_limit failed, error : %d\n", cv_result);
+ break;
+ } else {
+ fprintf(stderr, "detect face count limit : %d\n", val);
+ }
+ }
+
+ Mat bgr_frame;
+ cv_face_t *p_face = NULL;
+ int face_count = 0;
+ while (capture.read(bgr_frame)) { // CV_PIX_FMT_BGR888
+ resize(bgr_frame, bgr_frame, Size(frame_width, frame_height), 0, 0,
+ INTER_LINEAR);
+ // realtime track
+ face_count = 0;
+ cv_result = cv_face_track(handle_track, bgr_frame.data, CV_PIX_FMT_BGR888,
+ bgr_frame.cols, bgr_frame.rows, bgr_frame.step,
+ CV_FACE_UP, &p_face, &face_count);
+ if (cv_result != CV_OK) {
+ fprintf(stderr, "cv_face_track failed, error : %d\n", cv_result);
+ cv_face_release_tracker_result(p_face, face_count);
+ break;
+ }
+
+ for (int i = 0; i < face_count; i++) {
+ fprintf(stderr, "face: %d-----[%d, %d, %d, %d]-----id: %d\n", i,
+ p_face[i].rect.left, p_face[i].rect.top,
+ p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
+ fprintf(stderr, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]\n",
+ p_face[i].yaw,
+ p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
+
+ // draw the video
+ Scalar scalar_color = CV_RGB(p_face[i].ID * 53 % 256,
+ p_face[i].ID * 93 % 256,
+ p_face[i].ID * 143 % 256);
+ rectangle(bgr_frame, Point2f(static_cast<float>(p_face[i].rect.left),
+ static_cast<float>(p_face[i].rect.top)),
+ Point2f(static_cast<float>(p_face[i].rect.right),
+ static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
+ for (int j = 0; j < p_face[i].points_count; j++) {
+ circle(bgr_frame, Point2f(p_face[i].points_array[j].x,
+ p_face[i].points_array[j].y), 1, Scalar(0, 255, 0));
+ }
+ }
+
+ // release the memory of face
+ cv_face_release_tracker_result(p_face, face_count);
+ imshow("TrackingTest", bgr_frame);
+ if (waitKey(1) != -1)
+ break;
+ }
+
+ } while (0);
+
+ // destroy track handle
+ cv_face_destroy_tracker(handle_track);
+
+ return 0;
+}
+
diff --git "a/RtspFace/demo/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt" "b/RtspFace/demo/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
new file mode 100644
index 0000000..3f608a3
--- /dev/null
+++ "b/RtspFace/demo/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
@@ -0,0 +1 @@
+#include "faceAPI.h"?#include "faceDB.h"??faceDB fdb = faceDB::faceDB();?faceAPI::faceAPI(){? //创建静态人脸识别句柄,同时创建状态标量? cv_result_t cv_result = 0;? cv_face_create_detector(&handle_detect, NULL, CV_DETECT_ENABLE_ALIGN_21);? cv_verify_create_handle(&handle_verify1, "verify.model");? //faceDB* f_db = faceDB.GetInstance();?}??faceAPI::~faceAPI(){??}??int faceAPI::do_reasch(cv::Mat image){? if (!image.data) {? fprintf(stderr, "fail to read img\n");? return -1;? }else? fprintf(stderr, "read img\n");? p_f = extract_feature(image);? if (!p_f)? {? fprintf(stderr, "p_f is null\n");? return -2;? }? return p_img_search(p_f); ?}??int faceAPI::do_reasch(char* s_feature){? get_feature(s_feature,p_f);? return p_img_search(p_f);?}??int faceAPI::do_register(cv::Mat image){? p_f = extract_feature(image);? //db add? int indx = 1;? return indx;?}???int faceAPI::p_img_search(cv_feature_t *p_feature){? std::cout<<"p img search"<<std::endl;? ? return fdb.search_db(p_feature);?}??cv_feature_t* faceAPI::extract_feature(const cv::Mat image_color) {?? if (handle_verify1 != NULL)? {? fprintf(stderr, "handle_verify1 is not null!\n");? } ? else? {? fprintf(stderr, "handle_verify1 is null!\n");? }?? cv_feature_t *p_feature = NULL;? cv_face_t *p_face = NULL;? int face_count = 0;? cv_result_t st_result = CV_OK;? st_result = cv_face_detect(handle_detect, image_color.data, CV_PIX_FMT_BGR888,? image_color.cols, image_color.rows, image_color.step,? CV_FACE_UP, &p_face, &face_count);? if (face_count >= 1) {? st_result = cv_verify_get_feature(handle_verify1,? (unsigned char *)image_color.data, CV_PIX_FMT_BGR888,? image_color.cols, image_color.rows, image_color.step,? p_face, &p_feature, NULL);? if (st_result != CV_OK) {? fprintf(stderr, "cv_verify_get_feature failed, error code %d\n", st_result);? }? } else {? fprintf(stderr, "can't find face in ");? }? // release the memory of face? cv_face_release_detector_result(p_face, face_count);? return p_feature;?}???//@brief 特征值转换为字符串?//@param 特征值?//@return 字符串格式的特征值?int faceAPI::get_char(cv_feature_t *p_feature,char* feature_str){?? return cv_verify_serialize_feature(p_feature,feature_str);?}??//@brief 字符串转换为特征值?//@param 字符串格式的特征值?//@return 特征值?int faceAPI::get_feature(char *feature_str,cv_feature_t *p_feature){?? p_feature = cv_verify_deserialize_feature(feature_str);? if(p_feature != NULL){? return 0;? }else? return -1;?}
\ No newline at end of file
--
Gitblit v1.8.0