// // Created by Scheaven on 2019/11/18. // #include "lib/core/tracker_manager.h" #include #include "opencv2/core/core.hpp" #include "opencv2/highgui/highgui.hpp" #include "tensorflow/core/public/session.h" #include "tensorflow/core/protobuf/meta_graph.pb.h" #include "tensorflow/core/graph/default_device.h" #include "tensorflow/core/platform/env.h" #ifdef OPENCV #include // C++ #pragma comment(lib, "opencv_core249.lib") #pragma comment(lib, "opencv_imgproc249.lib") #pragma comment(lib, "opencv_highgui249.lib") #endif // OPENCV using namespace std; std::vector boxes; //存储box int main(int argc, char *argv[]) { //视频流信息 VideoCapture cap; VideoCapture cap2; int cam_id = 1; int cam_id2 = 2; try { cap.open("../data/cs01.avi"); cap2.open("../data/cs02.avi"); }catch(exception){ cout<<"输入视频"< cam_ids; std::vector frame_vec; std::vector results_vec; cam_ids.push_back(cam_id); cam_ids.push_back(cam_id2); //读取视频帧 cap >> frame; cap2 >> frame2; frame_vec.push_back(frame); frame_vec.push_back(frame2); cout << "start--" << endl; if(!frame.empty()){ FRAME_RESULT result; FRAME_RESULT result2; results_vec.push_back(result); results_vec.push_back(result2); // TM.single_detect_tracking(cam_id, frame, result); TM.mul_detect_tracking(cam_ids, frame_vec, results_vec); // cout<< "result: "<< result[0] << endl; }else{ cout << "-----------------------over--" << endl; break; } // //计算效率 // std::vector layersTimes; // double freq = cv::getTickFrequency() / 1000; // double t = net.getPerfProfile(layersTimes) / freq; // std::string label = cv::format("Inference time for a frame : %.2f ms", t); // putText(frame, label, cv::Point(0, 15), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255)); // waitKey(1); } return 0; }