QiaoJiaSystem/FaceDetectServer/FaceDetectServerI.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/CMakeLists.txt | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/FaceDefine.h | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/FaceRpcElement.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/FaceRpcElement.h | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/FaceTrackingWrapper.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/FaceTrackingWrapper.h | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/StructureApp/TrackingTrigger.h | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/VideoAnalysFromHC/CMakeLists.txt | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/VideoAnalysFromHC/RtspAnalysElement.cpp | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
QiaoJiaSystem/build/DataWebServer | 补丁 | 查看 | 原始文档 | blame | 历史 |
QiaoJiaSystem/FaceDetectServer/FaceDetectServerI.cpp
@@ -58,6 +58,7 @@ memcpy(&face, &pos, sizeof(pos) - sizeof(pos.pFacialData)); face.pFacialData.resize(sizeof(pos.pFacialData)); memcpy(face.pFacialData.data(), pos.pFacialData, sizeof(pos.pFacialData)); face.pfaceId = -1; // DBG(face.fAngle.confidence); faces.push_back(face); } QiaoJiaSystem/StructureApp/CMakeLists.txt
@@ -118,9 +118,3 @@ ${LIBS} ) #add_executable(AppPipeControllerTest # AppPipeControllerTest.cpp # ${SOURCES}) #target_link_libraries(AppPipeControllerTest # ${LIBS} # ) QiaoJiaSystem/StructureApp/FaceDefine.h
New file @@ -0,0 +1,85 @@ // // Created by ps on 18-12-18. // #ifndef TESTCODE_FACEDEFINE_H #define TESTCODE_FACEDEFINE_H #include <vector> #include <string> //using namespace std; using std::vector; using std::string; namespace BasicFace { typedef vector<unsigned char> Feature; struct InitParam { int nDeviceID;//device id for GPU device.eg:0,1,2,3..... int nImageWidth;//image width of video int nImageHeight;//image height of video int nMaxFaceNum;//max face number for tracking int nSampleSize;//down sample size for face detection int nDetectionIntervalFrame;//interval frame number of face detection for face tracking InitParam() { nMaxFaceNum = 100; nSampleSize = 640; nDeviceID = 0; nDetectionIntervalFrame = 5; } }; struct FaceFeatureResult { Feature feature; float score; }; struct FaceDetectResult { FaceDetectResult() : attributes(256), trackingId(-1) {} int id; int left; int top; int width; int height; float score; float yaw; // 水平转角,真实度量的左负右正, 单位,角度 float pitch; // 俯仰角,真实度量的上负下正, 单位,角度 float roll; // 旋转角,真实度量的左负右正, 单位,角度 float angle; // sqrt(yaw*yaw/3+pitch*pitch/3+roll*roll/3) vector<char> attributes; long trackingId; }; struct DbSearchResult { int index; float confidence; string dbId; }; struct FaceSearchResult { int index; int left; int top; int width; int height; float score; float confidence; string dbId; }; struct FaceImage { int width; int height; int stride; unsigned char *data; }; } #endif //TESTCODE_FACEDEFINE_H QiaoJiaSystem/StructureApp/FaceRpcElement.cpp
@@ -4,6 +4,7 @@ #include <QtCore/QString> #include <basic/timer_counter/Clocktimer.h> #include <basic/util/opencv/CvUtil.h> #include "FaceTrackingWrapper.h" #define GETSCORE(IDENT) appPref.getFloatData(IDENT) == -1 ? 95 : appPref.getFloatData(IDENT); @@ -37,7 +38,9 @@ sharedMemory(nullptr), trackingTrigger(nullptr) { sharedMemory = new QSharedMemory(QString(shareMemoryName.c_str())); if (!sharedMemory->create(4608 * 2592 * 4)) { sharedMemory->attach(); sharedMemory-> attach(); DBG("size is " << sharedMemory->size()); } @@ -45,7 +48,8 @@ //#todo string t_camIdex; if (shareMemoryName.find("/")) { string_replace(shareMemoryName, "//", "/"); string_replace(shareMemoryName, "//", "/"); auto dev_pos = shareMemoryName.find("/cut/") + 5; auto ch_pos = shareMemoryName.find("/", dev_pos) + 1; auto str_device_id = shareMemoryName.substr(dev_pos, ch_pos - dev_pos - 1); @@ -62,6 +66,7 @@ t_score = t_score / 100; trackingTrigger = new TrackingTrigger(t_score); m_trackingRet = appPref.getIntData("FaceTrackingRet"); } FaceRpcElement::~FaceRpcElement() { @@ -86,9 +91,15 @@ try { auto server = rpcClient.getServer(); if (!server) ERR("server is null"); if (m_trackingRet) { // #todo xxxx.detectFace faces = faceTrackingFunc(m_channel, image); } else { faces = server->faceDetect(image.cols, image.rows, sharedMemory->key().toStdString()); } // DBG("faces.size " << faces.size()); for (auto face: faces) { for (auto &face: faces) { ::FaceDetect::RECT &rect = face.rcFace; ScoredRect scoredRect; int x = face.rcFace.left; @@ -98,7 +109,9 @@ scoredRect.rect = {x, y, w, h}; scoredRect.score = (float) face.fAngle.confidence; if (trackingTrigger->triggerOnce(scoredRect)) { bool newFaceRet = m_trackingRet ? trackingTrigger->triggerOnce(scoredRect, face.pfaceId) : trackingTrigger->triggerOnce(scoredRect); if (newFaceRet) { auto property = server->faceProperty(image.cols, image.rows, face, sharedMemory->key().toStdString()); trackingTrigger->getLastRect().properties["id"] = to_string(scoredRect.id); trackingTrigger->getLastRect().properties["age"] = to_string(property.age); QiaoJiaSystem/StructureApp/FaceRpcElement.h
@@ -42,11 +42,18 @@ QSharedMemory *sharedMemory; ::FaceDetect::Faces faces; TrackingTrigger *trackingTrigger; // 当前帧新增人脸 ? ::FaceDetect::Faces triggerFaces; // 当前帧新增人脸图片? std::vector<cv::Mat> triggerMats; // 当前帧新增人脸位置? std::vector<ScoredRect> triggerScoredRects; //录像触发 TriggerElement m_triggerElement; // 是否使用sdk跟踪 bool m_trackingRet; }; #endif // FACERPCELEMENT_H QiaoJiaSystem/StructureApp/FaceTrackingWrapper.cpp
New file @@ -0,0 +1,140 @@ // // Created by ps on 18-12-18. // #include "FaceTrackingWrapper.h" #include "Debug.h" #include <FaceDetectServer/rpc/FaceServer.h> #include <opencv2/opencv.hpp> static FaceTrackingWrapper g_faceTrackingWrapper; static std::map<std::string, int> g_channelCache; static ::FaceDetect::Faces faceTrackingFunc(int channel, cv::Mat &image) { FaceDetect::Faces faces; int channel = 0; BasicFace::FaceImage faceImage{image.cols, image.rows, image.step, image.data}; THFT_FaceInfo facePos[MAX_DETECT_FACE]; int faceNum = THFT_FaceTracking(channel, image.data, facePos); if (faceNum > 0) { for (int i = 0; i < faceNum; i++) { FaceDetect::FacePos face; auto &pos = facesPos[i]; memcpy(&face, &pos, sizeof(pos) - sizeof(pos.pFacialData) - sizeof(pos.nFaceID)); face.pFacialData.resize(sizeof(pos.pFacialData)); memcpy(face.pFacialData.data(), pos.pFacialData, sizeof(pos.pFacialData)); face.pfaceId = pos.nFaceID; // DBG(face.fAngle.confidence); faces.push_back(face); } } else { DBG("Face num is 0"); } } FaceTrackingWrapper::FaceTrackingWrapper() { } FaceTrackingWrapper::~FaceTrackingWrapper() { } void FaceTrackingWrapper::setChannelParam(int channel, const BasicFace::InitParam &initParam) { m_mapParam.insert(std::make_pair(channel, initParam)); } bool FaceTrackingWrapper::initHandle() { // todo add gpu support int size = m_mapParam.size(); THFT_Param *param = new THFT_Param[size]; for (auto &item :m_mapParam) { int pos = item.first; auto t_param = item.second; param[pos].nDeviceID = t_param.nDeviceID; param[pos].nImageWidth = t_param.nImageWidth; param[pos].nImageHeight = t_param.nImageHeight; param[pos].nMaxFaceNum = t_param.nMaxFaceNum; param[pos].nSampleSize = t_param.nSampleSize; param[pos].nDetectionIntervalFrame = t_param.nDetectionIntervalFrame; } int nNum = -1; nNum = THFT_Create(size, param); delete[] param; return (nNum > 0); } /*** * @todo * * @param image * @return */ std::vector<BasicFace::FaceDetectResult> FaceTrackingWrapper::detectFace(const BasicFace::FaceImage &image) { return vector<BasicFace::FaceDetectResult>(); } std::vector<BasicFace::FaceDetectResult> FaceTrackingWrapper::trackingFace(int channel, const BasicFace::FaceImage &image) { vector<BasicFace::FaceDetectResult> results; // ClockTimer ct("CasiaFaceWapper::detectFace"); if (channel == -1) { ERR("invalid face channel, face detect faild"); return results; } THFT_FaceInfo facePos[MAX_DETECT_FACE]; int faceNum = THFT_FaceTracking(channel, image.data, facePos); //int faceNum = THFI_DetectFace(channel, image.data, 24, image.width, image.height, facePos, MAX_DETECT_FACE); if (faceNum < 0) { ERR("THFI_DetectFace return " << faceNum); } else { results.resize(faceNum); for (int i = 0; i < faceNum; i++) { BasicFace::FaceDetectResult &result = results[i]; THFT_FaceInfo &face = facePos[i]; result.angle = sqrt(face.fAngle.pitch * face.fAngle.pitch / 3 + face.fAngle.roll * face.fAngle.roll / 3 + face.fAngle.yaw * face.fAngle.yaw / 3); result.yaw = face.fAngle.yaw; result.pitch = face.fAngle.pitch; result.roll = face.fAngle.roll; result.left = face.rcFace.left; result.top = face.rcFace.top; result.width = face.rcFace.right - face.rcFace.left; result.height = face.rcFace.bottom - face.rcFace.top; // result.score = face.nQuality / 100.0f; result.score = face.fAngle.confidence; result.trackingId = face.nFaceID; } } return results; } /*** * @todo * * @param image * @return */ vector<BasicFace::FaceFeatureResult> FaceTrackingWrapper::extractFace(const BasicFace::FaceImage &image) { return vector<BasicFace::FaceFeatureResult>(); } /*** * @todo * @param feature1 * @param feature2 * @return */ float FaceTrackingWrapper::compareFeature(BasicFace::Feature &feature1, BasicFace::Feature &feature2) { return 0; } QiaoJiaSystem/StructureApp/FaceTrackingWrapper.h
New file @@ -0,0 +1,56 @@ // // Created by ps on 18-12-18. // #ifndef TESTCODE_FACETRACKINGWRAPPER_H #define TESTCODE_FACETRACKINGWRAPPER_H #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/time.h> #include <time.h> #include <list> #include <opencv2/opencv.hpp> #include <FiStdDefEx.h> #include <THFaceTracking_i.h> #include "FaceDefine.h" //typedef std::list<FaceTrackingInfo> ObjectList; #define MAX_DETECT_FACE 50 class FaceTrackingWrapper { public: explicit FaceTrackingWrapper(); virtual ~FaceTrackingWrapper(); void setChannelParam(int channel, const BasicFace::InitParam &); bool initHandle(); std::vector<BasicFace::FaceDetectResult> detectFace(const BasicFace::FaceImage &image); std::vector<BasicFace::FaceDetectResult> trackingFace(int channel, const BasicFace::FaceImage &image); vector<BasicFace::FaceFeatureResult> extractFace(const BasicFace::FaceImage &image); static float compareFeature(BasicFace::Feature &feature1, BasicFace::Feature &feature2); private: int nGPUDeviceID = 0; //sdk 初始化参数 std::map<int, BasicFace::InitParam> m_mapParam; //保存上次跟踪的目标 // std::map<int, ObjectList> m_objListCache; }; #endif //TESTCODE_FACETRACKINGWRAPPER_H QiaoJiaSystem/StructureApp/TrackingTrigger.h
@@ -10,6 +10,7 @@ struct ScoredRect { ScoredRect() : id(-1) ,isMask(false){} bool isMask; float score; cv::Rect rect; @@ -22,7 +23,50 @@ TrackingTrigger(float threshold) : threshold(threshold), faceTrackingId(0) {} bool triggerOnce(ScoredRect &rect) { bool triggerOnce(ScoredRect &rect, long faceId = -1) { if (faceId < 0) { return triggerOnce(rect, false); } else { bool found = false; for (auto lastRect: lastScoreRects) { if (lastRect.id >= 0 && lastRect.id == faceId) { found = true; rect.id = faceId; rect.properties = lastRect.properties; tempScoreRects.push_back(rect); break; } } if (!found) { if (rect.score < threshold) { // tempScoreRects.push_back(rect); return false; } else { rect.id = faceId; tempScoreRects.push_back(rect); return true; } } return false; } } void triggerLine() { lastScoreRects.swap(tempScoreRects);// = tempScoreRects; tempScoreRects.clear(); } ScoredRect &getLastRect() { return tempScoreRects[tempScoreRects.size() - 1]; } std::vector<ScoredRect> getLastScoreRects() const { return lastScoreRects; } private: bool triggerOnce(ScoredRect &rect, bool) { bool found = false; for (auto lastRect: lastScoreRects) { if (lastRect.id >= 0 && (rect.rect & lastRect.rect).area() > lastRect.rect.area() * 0.4) { @@ -46,23 +90,12 @@ return false; } void triggerLine() { lastScoreRects = tempScoreRects; tempScoreRects.clear(); } ScoredRect &getLastRect() { return tempScoreRects[tempScoreRects.size() - 1]; } std::vector<ScoredRect> getLastScoreRects() const { return lastScoreRects; } private: float threshold; std::vector<ScoredRect> lastScoreRects; std::vector<ScoredRect> tempScoreRects; std::vector<int> lastScoreInts; std::vector<int> tempScoreInts; std::atomic<long> faceTrackingId; }; QiaoJiaSystem/VideoAnalysFromHC/CMakeLists.txt
@@ -52,6 +52,7 @@ ../StructureApp/RecordVideoElement.cpp ../StructureApp/JudgmentRetrogradeTool.cpp ../StructureApp/PerimeterElement.cpp ../StructureApp/FaceTrackingWrapper.cpp ../StructureApp/NewRecordVideoElement.cpp QiaoJiaSystem/VideoAnalysFromHC/RtspAnalysElement.cpp
@@ -68,6 +68,7 @@ } void RtspAnalysElement::init() { appPref.setIntData("FaceTrackingRet", 1); auto lst = m_lDBTool->searchCamDevTableAll(); auto lst_dev = m_lDBTool->searchConfigTableWithinServerInfo(); @@ -116,6 +117,7 @@ SETSCORE(sdkDetCoVec, i, t_camIdex + "face.det"); SETSCORE(sdkComCoVec, i, t_camIdex + "face.cmp"); DBG(" TESTCODE " << sdkDetCoVec[i] << " " << sdkComCoVec[i]); // #todo add param break; } @@ -266,11 +268,10 @@ } return result; } void RtspAnalysElement::setDataByType(int type,Json::Value& json,int camId) { void RtspAnalysElement::setDataByType(int type, Json::Value &json, int camId) { switch (type) { case 4: { case 4: { auto rule=m_lDBTool->searchPerimeterRuleByCamId(camId); json["perimeter.area"]=rule.strAreas.toStdString(); json["perimeter.num"]=rule.nAlarmPeopleNum; @@ -278,8 +279,7 @@ json["perimeter.tolerance"]=rule.nTriggertolerance; break; } case 5: { case 5: { auto rule=m_lDBTool->searchCrowdRuleByCamId(camId); json["crowd.area"]=rule.strAreas.toStdString(); json["crowd.num"]=rule.nAlarmPeopleNum; @@ -287,8 +287,7 @@ json["crowd.tolerance"]=rule.nTriggertolerance; break; } case 6: { case 6: { auto rule=m_lDBTool->searchActRuleByCamId(camId); json["keepRight.leftArea"]=rule.strAreas.toStdString(); json["keepRight.leftLine"]=rule.strLine.toStdString(); QiaoJiaSystem/build/DataWebServerBinary files differ