From 168af40fe9a3cc81c6ee16b3e81f154780c36bdb Mon Sep 17 00:00:00 2001 From: Scheaven <xuepengqiang> Date: 星期四, 03 六月 2021 15:03:27 +0800 Subject: [PATCH] up new v4 --- main.cpp | 113 ++++++++++++++++++++++++++++++++------------------------ 1 files changed, 64 insertions(+), 49 deletions(-) diff --git a/main.cpp b/main.cpp index 812f0d7..2127ec4 100644 --- a/main.cpp +++ b/main.cpp @@ -1,14 +1,11 @@ // // Created by Scheaven on 2019/11/18. // -#include "lib/core/tracker_manager.h" -#include <opencv2/opencv.hpp> -#include "opencv2/core/core.hpp" -#include "opencv2/highgui/highgui.hpp" -#include "tensorflow/core/public/session.h" -#include "tensorflow/core/protobuf/meta_graph.pb.h" -#include "tensorflow/core/graph/default_device.h" -#include "tensorflow/core/platform/env.h" +#include "h_interface.h" +#include "std_target.h" +#include "time.h" + + #ifdef OPENCV #include <opencv2/opencv.hpp> // C++ #pragma comment(lib, "opencv_core249.lib") @@ -17,62 +14,80 @@ #endif // OPENCV using namespace std; -std::vector<cv::Rect> boxes; //瀛樺偍box - +using namespace cv; +void* handle; int main(int argc, char *argv[]) { - //瑙嗛娴佷俊鎭� +// //瑙嗛娴佷俊鎭� VideoCapture cap; VideoCapture cap2; - int cam_id = 1; - int cam_id2 = 2; + int cam_id = 0; + int cam_id2 = 1; - try { - cap.open("../data/cs01.avi"); - cap2.open("../data/cs02.avi"); - }catch(exception){ - cout<<"杈撳叆瑙嗛"<<endl; - return 0; - } Mat frame, blob; Mat frame2, blob2; + const char *conf = "../config.json"; + int *max_chan; + int x = 5; + max_chan=&x; + handle = create(conf, max_chan); - TrackerManager TM; - TM.add_cam(cam_id); - TM.add_cam(cam_id2); - while (true) { - std::vector<int> cam_ids; - std::vector<cv::Mat> frame_vec; - std::vector<FRAME_RESULT> results_vec; - cam_ids.push_back(cam_id); - cam_ids.push_back(cam_id2); - //璇诲彇瑙嗛甯� + void *sr; + void *sr2; + TImage *img = new TImage(); + TImage *img2 = new TImage(); + int i = 0; + cap = VideoCapture("/data/disk2/01_dataset/03_human/34983128.mp4"); + + // string path="/data/disk1/workspace/07_CPP/build/re"; + // std::vector<string> video_path_vec; + + // globFile(path, video_path_vec, "jpg"); + + // for (auto& file:video_path_vec) + // { + while(1){ /* code */ + // printf("----start--%s\n", file); + // cout<<"----------------\n\n"<<file<<endl; + // frame = imread(file); cap >> frame; - cap2 >> frame2; - frame_vec.push_back(frame); - frame_vec.push_back(frame2); - cout << "start--" << endl; - if(!frame.empty()){ - FRAME_RESULT result; - FRAME_RESULT result2; - results_vec.push_back(result); - results_vec.push_back(result2); -// TM.single_detect_tracking(cam_id, frame, result); - TM.mul_detect_tracking(cam_ids, frame_vec, results_vec); -// cout<< "result: "<< result[0] << endl; + // frame = imread("/data/disk1/project/data/04_human/error/4.png"); + clock_t t_strat2 = clock(); + if(!frame.empty()) + { + img->width = frame.cols; + img->height = frame.rows; + img->channel = frame.channels(); + img->data = frame.data; + sr = get_result(handle, img, 0); + + TResult * t_result = (TResult*) sr; + + cout << "=======t_result->count==" << t_result->count << endl; + for (int i=0; i<t_result->count; i++) + cout << "====1111111111===confidence:" << t_result->targets[i].confidence << endl; + + // if (t_result->count>0) + // { + // imwrite("/data/disk2/01_dataset/03_human/02_person/positive/"+file.substr(file.find_last_of("/")), frame); + // }else + // { + // imwrite("/data/disk2/01_dataset/03_human/02_person/negate/"+file.substr(file.find_last_of("/")), frame); + // } + + }else{ cout << "-----------------------over--" << endl; + release(handle); + release_result(sr); break; } -// //璁$畻鏁堢巼 -// std::vector<double> layersTimes; -// double freq = cv::getTickFrequency() / 1000; -// double t = net.getPerfProfile(layersTimes) / freq; -// std::string label = cv::format("Inference time for a frame : %.2f ms", t); -// putText(frame, label, cv::Point(0, 15), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255)); -// waitKey(1); + + clock_t t_strat3 = clock(); + cout << "rps---"<< CLOCKS_PER_SEC/(t_strat3 - t_strat2) <<endl; } return 0; } + -- Gitblit v1.8.0