// // Created by Scheaven on 2020/3/23. // #include "ari_manager.h" #include "../additional/fall_run_wander.h" AriManager::AriManager() { Config config; config.net_type = COMMON; if(m_staticStruct::type==2) config.net_type = SMALL; else config.net_type = COMMON; this->detector = std::shared_ptr(new Detector()); this->detector->init(config); } AriManager::~AriManager() { } void AriManager::release() { DetecterManager::getInstance()->release(); } void AriManager::init_load_model() { DEBUG("::loading detecter model!"); DetecterManager::getInstance(); } bool AriManager::add_cam(const int cam_id) { if (cam_id==(this->CAMERAS_VCT.size()+1)) { auto cam_tracker = std::make_shared(max_cosine_distance, nn_budget); this->CAMERAS_VCT.push_back(cam_tracker); return true; }else if(cam_idCAMERAS_VCT.size()) { WARN("The camera ID " + to_string(cam_id) + "is occupied!"); return false; }else { WARN("Camera ID" + to_string(cam_id) + "discontinuous!"); return false; } } void AriManager::single_SDK(const int cam_id, const void *img, TResult *t_result, char* img_time, const char* mode) { TImage *frame_img = (TImage*)img; // DEBUG((boost::format("%f, %f")%frame_img->width %frame_img->height).str()); cv::Mat frame(Size(frame_img->width, frame_img->height), CV_8UC3); // size 是w,h Mat是h,w frame.data = (uchar*)frame_img->data; //注意不能写为:(uchar*)pFrameBGR->data cv::Mat draw_frame = frame.clone(); Timer s_timer; s_timer.reset(); DETECTIONS detections; DetecterManager::getInstance()->detecter_main(draw_frame, detections); s_timer.out("eve detecter_main"); std::string mode_type = mode; DEBUG("detections human size::" + to_string(detections.size())); if(detections.size()>0) { if (mode_type == "video") { // if(HUMAN_STRUCT::reid_Extractor->featsEncoder(frame, detections) == false) // { // WARN("Encoder human feature failed!"); // } // auto t_strat2 = std::chrono::steady_clock::now(); // s_timer.out(to_string(detections.size())+" :eve reid_Extractor"); single_tracker(cam_id, detections, img_time); s_timer.out("eve single_tracker"); // auto t_strat3 = std::chrono::steady_clock::now(); // std::cout<< "detections.size()" << detections.size()<< std::endl; switch_SDK_TResult(cam_id, frame, detections, t_result); // auto t_strat4 = std::chrono::steady_clock::now(); s_timer.out("eve switch_SDK_TResult"); // double t_9 = std::chrono::duration(t_strat8 - t_strat9).count(); // double t_8 = std::chrono::duration(t_strat7 - t_strat8).count(); // double t_7 = std::chrono::duration(t_strat6 - t_strat7).count(); // double t_6 = std::chrono::duration(t_strat1 - t_strat6).count(); // double t_1 = std::chrono::duration(t_strat2 - t_strat1).count(); // double t_2 = std::chrono::duration(t_strat3 - t_strat2).count(); // double t_3 = std::chrono::duration(t_strat4 - t_strat3).count(); // double t_0 = std::chrono::duration(t_strat4 - t_strat9).count(); // std::cout << "fps time:" << to_string(t_9) << " : " << to_string(t_8) << " : " << to_string(t_7) << " : " << to_string(t_6) <width, frame_img->height), CV_8UC3); frame.data = (uchar*)frame_img->data; std::vector result_vec; std::vector batch_res; std::vector batch_img; // cv::Mat image0 = cv::imread("/data/disk1/project/03_tensorRT/yolo-tensorrt/configs/dog.jpg", cv::IMREAD_UNCHANGED); // cv::Mat image1 = cv::imread("/data/disk1/project/03_tensorRT/yolo-tensorrt/configs/person.jpg", cv::IMREAD_UNCHANGED); batch_img.push_back(frame.clone()); // batch_img.push_back(image1.clone()); // batch_img.push_back(image1.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image0.clone()); // batch_img.push_back(image1.clone()); // cv::imshow("img",image0); // cv::waitKey(0); this->detector->detect(batch_img, batch_res); t_result->targets = (Target*)malloc(sizeof(Target)*batch_res[0].size()); int w_count = 0; for (const auto &result_box:batch_res[0]) { if(result_box.id == 1) { Target target; init_target(&target); target.rect.left = result_box.rect.x; target.rect.top = result_box.rect.y; target.rect.right = result_box.rect.x + result_box.rect.width; target.rect.bottom = result_box.rect.y + result_box.rect.height; target.confidence = result_box.prob*100; // target.id = 1; // // target.attribute 可根据实际情况来添加,输出格式要求是json格式 float mv_velocity = 0; int runScore = 0; string attribute_json = "{"; if (m_staticStruct::fall_rate!=0) { // detect_fall(tmpbox, track); attribute_json += "\"fallScore\":" + to_string(0)+","; } if (m_staticStruct::mv_velocity!=0) { attribute_json += "\"runScore\":" + to_string(0)+","; } attribute_json += "\"hatScore\":" + to_string(0)+","; attribute_json += "\"helmetScore\":" + to_string(0)+","; attribute_json += "\"headScore\":" + to_string(0)+","; attribute_json += "\"maskScore\":" + to_string(0)+","; attribute_json += "\"smokingScore\":" + to_string(0)+","; DEBUG("image attribute_json:: "+ attribute_json); if(attribute_json.length()>2) { //转换输出的json格式 {"fallScore":100,"runScore":15.8,"wanderScore":10} attribute_json = attribute_json.substr(0, attribute_json.length()-1) +"}"; target.attribute = new char[strlen(attribute_json.c_str())+1]; target.attribute_size = strlen(attribute_json.c_str()); strcpy(target.attribute, attribute_json.c_str()); } t_result->targets[w_count] = target; w_count ++; } } std::cout << "eve batch_res size:: "<< batch_res[0].size() << " w_count: " << w_count <count = w_count; // draw_SDK_result(cam_id, frame, t_result); } void AriManager::switch_SDK_TResult(DETECTIONS detections, TResult *t_result) { t_result->targets = (Target*)malloc(sizeof(Target) * detections.size()); int w_count = 0; for(auto& detection :detections) { Target target; init_target(&target); // RESULT_STRUCT result; DETECTBOX tmpbox = detection.tlwh; istringstream iss(random_int(6)); iss >> target.id; target.rect.left = tmpbox(0); target.rect.top = tmpbox(1); target.rect.right = tmpbox(0) + tmpbox(2); target.rect.bottom = tmpbox(1) + tmpbox(3); target.confidence = detection.confidence*100; float mv_velocity = 0; int runScore = 0; string attribute_json = "{"; if (m_staticStruct::fall_rate!=0) { // detect_fall(tmpbox, track); attribute_json += "\"fallScore\":" + to_string(detection.fallScore)+","; } if (m_staticStruct::mv_velocity!=0) { attribute_json += "\"runScore\":" + to_string(detection.runScore)+","; } attribute_json += "\"hatScore\":" + to_string(detection.hatScore)+","; attribute_json += "\"helmetScore\":" + to_string(detection.helmetScore)+","; attribute_json += "\"headScore\":" + to_string(detection.headScore)+","; attribute_json += "\"maskScore\":" + to_string(detection.maskScore)+","; attribute_json += "\"smokingScore\":" + to_string(detection.smokeScore)+","; DEBUG("image attribute_json:: "+ attribute_json); if(attribute_json.length()>2) { //转换输出的json格式 {"fallScore":100,"runScore":15.8,"wanderScore":10} attribute_json = attribute_json.substr(0, attribute_json.length()-1) +"}"; target.attribute = new char[strlen(attribute_json.c_str())+1]; target.attribute_size = strlen(attribute_json.c_str()); strcpy(target.attribute, attribute_json.c_str()); } t_result->targets[w_count] = target; w_count ++; } t_result->count = w_count; } // 转换成SDK 想要的结果 void AriManager::switch_SDK_TResult(int cam_id, cv::Mat img, DETECTIONS detections, TResult *t_result) { t_result->targets = (Target*)malloc(sizeof(Target) * this->CAMERAS_VCT[cam_id]->tracks.size()); int w_count = 0; for(auto& track :this->CAMERAS_VCT[cam_id]->tracks) { // if(not track->isCurrent) //当前画面没有则返回 // continue; Target target; init_target(&target); // RESULT_STRUCT result; if(!track->is_confirmed() || track->time_since_update >= 1) continue; // if(track->is_confirmed() && track->time_since_update >= 1) continue; /* * 算法思想:跟踪器跟踪的时候,是否能够强制将预测错位置的box框强制更新成新的box框,而id不发生变化 * */ // DETECTBOX tmpbox = track->to_tlwh(); DETECTBOX tmpbox = track->to_xywh(); target.rect.left = tmpbox(0); target.rect.top = tmpbox(1); target.rect.right = tmpbox(0) + tmpbox(2); target.rect.bottom = tmpbox(1) + tmpbox(3); // DEBUG((boost::format("%s:%d, %s:%d, %s:%d, %s:%d") %"id:" %track->track_id %"conf:" %track->confidence %", to tmpbox y:" %tmpbox(1) %", tmpbox(3) h:" %tmpbox(3)).str()); target.id = track->track_id; target.confidence = track->confidence; float mv_velocity = 0; int runScore = 0; string attribute_json = "{"; if (m_staticStruct::fall_rate!=0) { // detect_fall(tmpbox, track); attribute_json += "\"fallScore\":" + to_string(track->rateScale)+"," + "\"fallTime\":" + to_string(track->fall_total_time) + ","; } if (m_staticStruct::mv_velocity!=0) { if(track->isRuning) { mv_velocity = detect_runing(tmpbox, track, img); float run_velocity = mv_velocity; if(tmpbox(2)/tmpbox(3)<0) { run_velocity*=(img.cols*img.rows)/(tmpbox(2)*tmpbox(3)); }else { run_velocity*=10; } // std::cout << mv_velocity << "mv_velocity::::::::::" << run_velocity << "::::::::::::"<confidence+2)*int(std::min(4*run_velocity, 100.0f)); attribute_json += "\"runScore\":" + to_string(runScore)+","; }else if(track->is_hat) { mv_velocity = detect_runing(tmpbox, track, img); float run_velocity = mv_velocity; if(tmpbox(2)/tmpbox(3)<0) { run_velocity*=(img.cols*img.rows)/(tmpbox(2)*tmpbox(3)); }else { run_velocity*=15; } // std::cout << mv_velocity << "mv_velocity::::::::::" << run_velocity << "::::::::::::"<track_id <<"-- human wander last time:" << track->last_time << ":" << m_staticStruct::wander_time <last_time>m_staticStruct::wander_time) track->isWander = true; else track->isWander = false; attribute_json += "\"wanderTime\":" + to_string(track->last_time)+","; } if(track->is_hat) { attribute_json += "\"hatScore\":" + to_string(track->hatScore)+","; }else { attribute_json += "\"hatScore\":" + to_string(10)+","; } if(track->is_mask) { attribute_json += "\"maskScore\":" + to_string(track->maskScore)+","; }else { attribute_json += "\"maskScore\":" + to_string(10)+","; } if(track->is_smoke) { attribute_json += "\"smokingScore\":" + to_string(track->smokeScore)+","; }else { attribute_json += "\"smokingScore\":" + to_string(10)+","; } // cout << ":::::::::::::"<2) { //转换输出的json格式 {"fallScore":100,"runScore":15.8,"wanderScore":10} attribute_json = attribute_json.substr(0, attribute_json.length()-1) +"}"; target.attribute = new char[strlen(attribute_json.c_str())+1]; target.attribute_size = strlen(attribute_json.c_str()); strcpy(target.attribute, attribute_json.c_str()); } t_result->targets[w_count] = target; w_count ++; } t_result->count = w_count; // printf("================================%d\n\n\n\n", t_result->count); #ifdef S_DEBUG draw_SDK_result(cam_id, img, t_result); #endif } void AriManager::single_tracker(int cam_id, DETECTIONS& detections, char* img_time) { // DETECTIONS detections = deal_features(boxes, feats_vec); // 使用传递特征追踪时需要打开该方法 // printf("cam_id::::%d",cam_id); std::string str2 = img_time; img_time[str2.find_last_of(":")] = '.'; // DEBUG( img_time); auto cam_tracker = this->CAMERAS_VCT[cam_id]; DEBUG("single_tracker start:: "); cam_tracker->predict(); cam_tracker->update(CAMERAS_VCT, detections, cam_id, img_time); DEBUG("single_tracker::"); this->CAMERAS_VCT[cam_id] = cam_tracker; } void AriManager::init_target(Target *t) { t->attribute = NULL; t->feature = NULL; t->id = 0; t->rect = TRect{0,0,0,0}; t->confidence = 0; t->attribute_size = 0; t->feature_size = 0; }