pansen
2018-12-19 ae87bf6bbddb781ff7316d032f8dc2302fb608ec
Merge QiaojiaoSystem Code (svn version is r3345)
8个文件已修改
404 ■■■■■ 已修改文件
QiaoJiaSystem/DataManagerServer/http_configserver.cpp 43 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/DataManagerServer/http_configserver.h 2 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/RapidStructureApp/TriggerElement.cpp 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/StructureApp/AppPipeController.cpp 167 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/StructureApp/AppPipeController.h 18 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/StructureApp/FaceRpcElement.cpp 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/StructureApp/NewRecordVideoElement.cpp 139 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/StructureApp/NewRecordVideoElement.h 31 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/DataManagerServer/http_configserver.cpp
@@ -2336,7 +2336,7 @@
            // sub = sub > 3000 ? sub - 3000 : 0000;
            //shijian yuejie buchang
            // sub = sub < 0 ? 0000 : sub;
            int haomiao = sub % 1000;
            int haomiao = (sub % 1000) > 50 ? ((sub % 1000) - 50) : 0;
            sub /= 1000;
            int m = sub;
            int shi, fen, miao;
@@ -2345,10 +2345,10 @@
                shi = m / 3600;
                fen = m / 60 % 60;
                miao = m % 60;
                sprintf(ch_time, "%02d:%02d:%02d.%03d", shi, fen, miao, haomiao);
                cout << ch_time << endl;
            } else printf("输入数值太大");
                sprintf(ch_time, "%02d:%02d:%02d.%02d", shi, fen, miao, haomiao / 10);
            } else printf("输入数值太大");
            DBG("ch_time" << ch_time);
            std::string str_time(ch_time);
            //#todo 按照时间命名
            std::string str_imgName(AppUtil::getTimeString() + ".jpg");// = "test.jpg";
@@ -2360,7 +2360,7 @@
            if (str_time > str_tmpTime) {
                ERR("{\"error\":\"Time error\"}" << str_time << " str_tmpTime " << str_tmpTime);
                str_time = str_tmpTime.size() > 0 ? str_tmpTime.substr(0, str_tmpTime.rfind(".")) : str_time;
                str_time = str_tmpTime.size() > 0 ? str_tmpTime : str_time;
//            return "{\"error\":\"Time error\"}";
            }
@@ -2541,16 +2541,19 @@
                DBG("path=" << vec[i]);
                t_FilePath += vec[i] + ".mp4";
                sub = t - t1;
                find = true;
                find = sub < getVideoTime(t_FilePath);
                break;
            }
        } else {
            sub = t - t1;
            if (t >= t1 && sub <= 3000) {
            if (t >= t1) {
                DBG("path=" << vec[i]);
                t_FilePath += vec[i] + ".mp4";
                find = true;
                find = sub < getVideoTime(t_FilePath);
                DBG("find:" << find);
                break;
            }
        }
@@ -2565,6 +2568,30 @@
}
qint64 devHttpServer_c::getVideoTime(const std::string &videoPath) {
    std::string cmd_getVideoFileTime(
        " ffmpeg -i '" + videoPath + "' 2>&1 | grep 'Duration' | cut -d ' ' -f 4 | sed s/,//");
    std::string str_tmpTime = AppUtil::getCmdResult(cmd_getVideoFileTime);
    DBG("str_tmpTime:" << str_tmpTime);//00:00:06.89
    qint64 len_ms = 0;
    QStringList list = QString::fromStdString(str_tmpTime).split(":");
    if (list.size() == 3) {
        len_ms = list[0].toInt() * 60 * 60 * 1000;
        len_ms += list[1].toInt() * 60 * 1000;
        QStringList s = list[2].split(".");
        if (s.size() == 2) {
            len_ms += s[0].toInt() * 1000;
            len_ms += s[1].toInt() * 10;
        }
    }
    DBG("len_ms:" << len_ms);
    return len_ms;
}
std::vector<std::string> devHttpServer_c::forEachFile(const std::string &dir_name) {
    std::vector<std::string> v;
    auto dir = opendir(dir_name.data());
QiaoJiaSystem/DataManagerServer/http_configserver.h
@@ -173,6 +173,8 @@
    std::string getVideoPathByTime(const std::string& time,const std::string& camId,qint64& sub);
    std::vector<std::string> forEachFile(const std::string &dir_name);
    qint64 getVideoTime(const std::string& videoPath);
};
#endif
QiaoJiaSystem/RapidStructureApp/TriggerElement.cpp
@@ -70,7 +70,7 @@
}
bool TriggerElement::getTriggerState() const {
    return triggerState;
    return state;
}
void TriggerElement::threadInitial() {
QiaoJiaSystem/StructureApp/AppPipeController.cpp
@@ -36,17 +36,16 @@
    faceRpcElement(camId + "faceRpc"),
    m_json(json),
    faceExtractElement(camId + "faceExtract"),
   // peTriggerElement(json["perimeter.tolerance"].asInt(), json["perimeter.delay"].asInt()),
    // peTriggerElement(json["perimeter.tolerance"].asInt(), json["perimeter.delay"].asInt()),
//    leftTriggerElement(json["keepRight.tolerance"].asInt(), json["keepRight.delay"].asInt()),
//    rightTriggerElement(json["keepRight.tolerance"].asInt(), json["keepRight.delay"].asInt()),
   // crowdTriggerElement(json["crowd.tolerance"].asInt(), json["crowd.delay"].asInt()),
    // crowdTriggerElement(json["crowd.tolerance"].asInt(), json["crowd.delay"].asInt()),
    triggerElement(25, 4),
    recordVideoElement(camId, json["rtsp"].asString()),
    newRecordVideoElement(camId),
    perimeterElement(1),
    crowdElement(json["crowd.num"].asInt()),
    bRecordVideoEnable(RecordVideoEnable)
{
    bRecordVideoEnable(RecordVideoEnable) {
    init();
    initPerimeter();
    initCrowd();
@@ -54,10 +53,7 @@
}
AppPipeController::~AppPipeController() {
      if(recordStatus==RECORD_DOING)
      {
          newRecordVideoElement.endRecord();
      }
}
//************************************
@@ -155,23 +151,33 @@
    }
   //#todo
  //  int max=appPref.getIntData("n_cut_max_duration");
    recordInit(20,100);
    //#todo
    //  int max=appPref.getIntData("n_cut_max_duration");
    // recordInit(40,100);
    videoCaptureElement.registerConnector([&] {
        cv::Mat imageTemp = videoCaptureElement.getImage();
        std::string strNewTime;
        strNewTime = AppUtil::getTimeUSecString();
//        cv::putText(imageTemp, strNewTime, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN, 5,
//                    cv::Scalar(255, 255, 0), 2);
        if (m_camId.size() > 0) {
            strNewTime = AppUtil::getTimeUSecString();
            if(bRecordVideoEnable)
            {
                newRecordVideoElement.pushImgBuf(strNewTime,videoCaptureElement.getImage());
                setSdkTrigger();
                doRecord();
            if (bRecordVideoEnable) {
                newRecordVideoElement.pushImgBuf(strNewTime, imageTemp);
                newRecordVideoElement.setSdkTrigger(faceRpcElement.getTriggerState() ||//TODO
                                                    yoloRpcElement.getTrigger() ||
                                                    leftJudgment.getTriggerState() ||
                                                    rightJudgment.getTriggerState() ||
                                                    perimeterElement.getTriggerState() ||
                                                    crowdElement.getTriggerState());
                if (!newRecordVideoElement.isBusy()) {
                    newRecordVideoElement.submit();
                }
            }
        } else {
@@ -193,7 +199,7 @@
            //#todo
            faceRpcElement.setProperty("time", strNewTime);
//            faceRpcElement.setProperty("time", uuid);
            faceRpcElement.setImage(videoCaptureElement.getImage());
            faceRpcElement.setImage(imageTemp);
            faceRpcElement.submit();
        }
@@ -201,12 +207,12 @@
//            yoloRpcElement.setProperty("uuid", uuid);
            //#todo
            yoloRpcElement.setProperty("time", strNewTime);
            yoloRpcElement.setImage(videoCaptureElement.getImage());
            yoloRpcElement.setImage(imageTemp);
            yoloRpcElement.submit();
        }
        if (!imageDrawElement.isBusy()) {
            imageDrawElement.setImage(videoCaptureElement.getImage());
            imageDrawElement.setImage(imageTemp);
            imageDrawElement.submit();
        }
@@ -243,6 +249,18 @@
            ImageShowElement::showImage(to_string(this->m_index), *imageDrawElement.getImage());
        }
    });
    newRecordVideoElement.registerConnector([&] {
        if (faceRpcElement.getTriggerState() ||//TODO
            yoloRpcElement.getTrigger() ||
            leftJudgment.getTriggerState() ||
            rightJudgment.getTriggerState() ||
            perimeterElement.getTriggerState() ||
            crowdElement.getTriggerState()
            ) {
        }
    });
    registerElement(videoCaptureElement);
    if (m_json["yolo.enable"].asString() == "1") {
@@ -253,8 +271,8 @@
        registerElement(faceExtractElement);
    }
    registerElement(imageDrawElement);
    videoCaptureElement.setOutPutInterval(3);
    registerElement(newRecordVideoElement);
    videoCaptureElement.setOutPutInterval(5);
    faceExtractElement.setProperty("index", to_string(m_index));
    //#todo setProperty
//    faceExtractElement.setProperty("index", to_string(m_index));
@@ -319,7 +337,7 @@
    //area="[{\"x\":100,\"y\":6},{\"x\":100,\"y\":1200},{\"x\":1800,\"y\":1200},{\"x\":1800,\"y\":6}]";
    perimeterElement.setMask(area);
    perimeterElement.setFdfs(fdfsClient);
    perimeterElement.setProperty("sdkType","perimeter");
    perimeterElement.setProperty("sdkType", "perimeter");
    yoloRpcElement.registerConnector([&] {
        if (!perimeterElement.isBusy()) {
@@ -356,7 +374,7 @@
    //area="[{\"x\":100,\"y\":6},{\"x\":100,\"y\":1200},{\"x\":1800,\"y\":1200},{\"x\":1800,\"y\":6}]";
    crowdElement.setMask(area);
    crowdElement.setFdfs(fdfsClient);
    crowdElement.setProperty("sdkType","crowd");
    crowdElement.setProperty("sdkType", "crowd");
    yoloRpcElement.registerConnector([&] {
        if (!crowdElement.isBusy()) {
            crowdElement.setObjsResults(yoloRpcElement.getObjects());
@@ -405,104 +423,5 @@
    registerElement(leftJudgment);
    registerElement(rightJudgment);
}
//================
void AppPipeController::recordInit(int videoMin, int videoMax) {
    sdkTrigger = false;
    fileMin = videoMin;
    fileMax = videoMax;
    triggerDelay = fileMin/2;
    recordStatus = RECORD_STOP;
    videoLength = 0;
    recordDelay = 0;
}
void AppPipeController::setSdkTrigger() {
    if( faceRpcElement.getTriggerState() ||//TODO
        yoloRpcElement.getTrigger() ||
        leftJudgment.getTriggerState()||
        rightJudgment.getTriggerState() ||
        perimeterElement.getTriggerState() ||
        crowdElement.getTriggerState()
            ){
        triggerDelay = 0;
        sdkTrigger = true;
    } else{
        if(triggerDelay++ >= fileMin/2) {
            sdkTrigger = false;
        }
        else{
            sdkTrigger = true;
        }
    }
}
void AppPipeController::doRecord() {
    switch(recordStatus)
    {
        case RECORD_STOP:
//        DBG("recordDelay:" << recordDelay);
//        DBG("videoLength:" << videoLength);
//        DBG("sdkTrigger:" << sdkTrigger);
            videoLength = 0;
            recordDelay = 0;
            if(sdkTrigger){
                recordStatus = RECORD_DOING;
                newRecordVideoElement.startRecord();
            }
            else
            {
                ImgInfo info;
                newRecordVideoElement.getImg(info);
            }
            break;
        case RECORD_DOING:
            videoLength++;
            if(sdkTrigger){
                if(videoLength < fileMax){
                    newRecordVideoElement.doRecord();
                }
                else{
                    recordStatus = RECORD_STOP;
                    newRecordVideoElement.endRecord();
                }
            }else{
                recordStatus = RECORD_ENDING;
                newRecordVideoElement.doRecord();
            }
            break;
        case RECORD_ENDING:
            DBG("recordDelay:" << recordDelay);
            DBG("videoLength:" << videoLength);
            DBG("sdkTrigger:" << sdkTrigger);
            recordDelay++;
            videoLength++;
            if(sdkTrigger){
                if( (recordDelay < fileMin/4) &&
                    (videoLength < fileMax)){
                    newRecordVideoElement.doRecord();
                }else{
                    recordStatus = RECORD_STOP;
                    newRecordVideoElement.endRecord();
                }
            }else{
                if( (recordDelay < fileMin/2) &&
                    (videoLength < fileMax)){
                    newRecordVideoElement.doRecord();
                }else{
                    recordStatus = RECORD_STOP;
                    newRecordVideoElement.endRecord();
                }
            }
            break;
        default:
            break;
    }
}
QiaoJiaSystem/StructureApp/AppPipeController.h
@@ -93,24 +93,6 @@
    std::string getFullFileName();
    void recordInit(int videoMin, int videoMax);
    void setSdkTrigger();
    void doRecord();
    int fileMin;
    int fileMax;
    bool sdkTrigger;
    int triggerDelay;
#define RECORD_STOP (0)
#define RECORD_DOING (1)
#define RECORD_ENDING (2)
    int recordStatus;
    int videoLength;
    int recordDelay;
};
#endif // APPPIPECONTROLLER_H
QiaoJiaSystem/StructureApp/FaceRpcElement.cpp
@@ -32,7 +32,7 @@
FaceRpcElement::FaceRpcElement(string shareMemoryName) :
//#todo
    rpcClient(appPref.getStringData("faceDete.proxy"), appPref.getStringData("faceDete.ip"),
              appPref.getIntData("faceDete.port"), "tcp"), m_triggerElement(0, 50),
              appPref.getIntData("faceDete.port"), "tcp"), m_triggerElement(0, 0),
//    rpcClient("faceServer","",10002,"tcp"),
    sharedMemory(nullptr), trackingTrigger(nullptr) {
    sharedMemory = new QSharedMemory(QString(shareMemoryName.c_str()));
QiaoJiaSystem/StructureApp/NewRecordVideoElement.cpp
@@ -3,7 +3,7 @@
//#todo index int -> string
NewRecordVideoElement::NewRecordVideoElement(std::string camid) :
videoEncoderElement(cv::Size(1920, 1080), 9, 0),
videoEncoderElement(cv::Size(1920, 1080), 5, 0),
camID(camid)
{
@@ -12,7 +12,10 @@
}
NewRecordVideoElement::~NewRecordVideoElement() {
m_imgBufQue.clear();
    queue<ImgInfo> empty;
    empty.swap(m_imgBufQue);
//m_imgBufQue.clear();
}
std::string NewRecordVideoElement::startRecord() {
@@ -22,7 +25,7 @@
     getImg(info);
    std::string srcPath= getFileName(info.time);
//    DBG("fisrt fps time"<<info.time);
    try {
        videoEncoderElement.threadInitial(srcPath, info.img);
    }
@@ -33,18 +36,20 @@
}
void NewRecordVideoElement::endRecord() {
    ImgInfo info;
    getImg(info);
    doRecord();
    doRecord();
    doRecord();
    videoEncoderElement.threadClosing();
}
void NewRecordVideoElement::doRecord() {
//    ImgInfo info=m_HiredisTool.getImage(camID);
  //  ImgInfo info=m_HiredisTool.getImage(camID);
     ImgInfo info;
     getImg(info);
     DBG(" time="<<info.time);
//    DBG(" time="<<info.time);
    videoEncoderElement.doFunc(info.img);
}
@@ -84,29 +89,131 @@
    return t_FilePath;
}
void NewRecordVideoElement::pushImgBuf(const std::string& time,cv::Mat img)
void NewRecordVideoElement::pushImgBuf(const std::string& time,cv::Mat& img)
{
     ImgInfo info;
     info.img=img;
     img.copyTo(info.img);
//     info.img=img;
     info.time=time;
     m_imgBufQue.push_front(info);
     m_imgBufQue.push(info);
//     int size=m_imgBufQue.size();
//     DBG("m_imgBufQue size="<<size);
}
 void NewRecordVideoElement::getImg(ImgInfo& info)
 {
        //todo
        int len=10;
         info=m_imgBufQue.back();
        int len=20;
         info=m_imgBufQue.front();
        int size=m_imgBufQue.size();
        //DBG("m_imgBufQue size="<<size<<" time="<<info.time);
//        DBG("m_imgBufQue size="<<size<<" time="<<info.time);
        if(size>len)
        {
             m_imgBufQue.pop_back();
             m_imgBufQue.pop();
        }
 }
 void NewRecordVideoElement::threadFunc()
 {
        Record();
 }
 void NewRecordVideoElement::threadInitial()
 {
     recordInit(40,100);
 }
 void NewRecordVideoElement::Record() {
     switch(recordStatus)
     {
         case RECORD_STOP:
 //        DBG("recordDelay:" << recordDelay);
 //        DBG("videoLength:" << videoLength);
 //        DBG("sdkTrigger:" << sdkTrigger);
             videoLength = 0;
             recordDelay = 0;
             if(sdkTrigger){
                 recordStatus = RECORD_DOING;
                 startRecord();
             }
             else
             {
                 ImgInfo info;
                 getImg(info);
             }
             break;
         case RECORD_DOING:
             videoLength++;
             if(sdkTrigger){
                 if(videoLength < fileMax){
                     doRecord();
                 }
                 else
                 {
                     recordStatus = RECORD_STOP;
                     endRecord();
                 }
             }
             else
             {
                 recordStatus = RECORD_ENDING;
                 doRecord();
             }
             break;
         case RECORD_ENDING:
//             DBG("recordDelay:" << recordDelay);
//             DBG("videoLength:" << videoLength);
//             DBG("sdkTrigger:" << sdkTrigger);
             recordDelay++;
             videoLength++;
             if(sdkTrigger){
                 if( (recordDelay < fileMin/4) &&
                     (videoLength < fileMax)){
                     doRecord();
                 }else{
                     recordStatus = RECORD_STOP;
                     endRecord();
                 }
             }else{
                 if( (recordDelay < fileMin/2) &&
                     (videoLength < fileMax)){
                    doRecord();
                 }else{
                     recordStatus = RECORD_STOP;
                     endRecord();
                 }
             }
             break;
         default:
             break;
     }
 }
 void NewRecordVideoElement::setSdkTrigger(bool isTrigger) {
     if(isTrigger)
     {
         triggerDelay = 0;
         sdkTrigger = true;
     }
     else
     {
         if(triggerDelay++ >= fileMin/2) {
             sdkTrigger = false;
         }
         else{
             sdkTrigger = true;
         }
     }
 }
 void NewRecordVideoElement::recordInit(int videoMin, int videoMax) {
     sdkTrigger = false;
     fileMin = videoMin;
     fileMax = videoMax;
     triggerDelay = fileMin/2;
     recordStatus = RECORD_STOP;
     videoLength = 0;
     recordDelay = 0;
 }
QiaoJiaSystem/StructureApp/NewRecordVideoElement.h
@@ -1,13 +1,13 @@
#ifndef NEWRECORDVIDEOELEMENT_H
#define NEWRECORDVIDEOELEMENT_H
#include <basic/pipe/PipeElement.h>
#include <basic/pipe_element/ffmpeg/FfmpegElement.h>
struct ImgInfo
{
    std::string time;
    cv::Mat img;
};
class NewRecordVideoElement {
class NewRecordVideoElement : public basic::PipeElement{
public:
@@ -17,14 +17,35 @@
    ~NewRecordVideoElement();
    std::string startRecord();
    void setSdkTrigger(bool isTrigger);
    void doRecord();
    void endRecord();
    void pushImgBuf(const std::string& time,cv::Mat img);
    void pushImgBuf(const std::string& time,cv::Mat& img);
    void getImg(ImgInfo& info);
private:
    virtual void threadFunc() override;
    virtual void threadInitial() override;
private:
    void recordInit(int videoMin, int videoMax);
    void Record();
    int fileMin;
    int fileMax;
    bool sdkTrigger;
    int triggerDelay;
#define RECORD_STOP (0)
#define RECORD_DOING (1)
#define RECORD_ENDING (2)
    int recordStatus;
    int videoLength;
    int recordDelay;
private:
    std::string getFileName(std::string timeStamp);
@@ -32,7 +53,7 @@
    ffmpeg::VideoEncodeElement videoEncoderElement;
    std::string camID;
    std::string m_cutPath;
    std::list<ImgInfo> m_imgBufQue;
    std::queue<ImgInfo> m_imgBufQue;
};
#endif // RECORDVIDEOELEMENT_H