pansen
2019-03-03 3d4c378b6f65f03f9fab19bdee5decb2be989b92
添加GB28181解码模块
10个文件已添加
1个文件已修改
1613 ■■■■■ 已修改文件
QiaoJiaSystem/CMakeLists.txt 7 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/CMakeLists.txt 101 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/FFmpegDecoderJPG.cpp 536 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/FFmpegDecoderJPG.h 248 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/GB28181Server.cpp 56 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/GB28181Server.h 93 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/GlobalSignalWaitLock.hpp 115 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/SpinLock.hpp 30 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/VideoCaptureElementWithRtp.cpp 210 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/VideoCaptureElementWithRtp.h 94 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/GB28181DecoderModel/main.cpp 123 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
QiaoJiaSystem/CMakeLists.txt
@@ -12,8 +12,14 @@
    ./
    ../../BasicPlatForm/
    ../../BasicPlatForm/libs/jsoncpp/include
    ../../BasicPlatForm/libs/glog/include
    ../../BasicPlatForm/libs/opencv/include
    ../../BasicPlatForm/libs/libuuid/include
)
link_directories(
    ../../BasicPlatForm/libs/glog/lib
)
#add_subdirectory(StructureApp)
@@ -28,5 +34,6 @@
#add_subdirectory(VideoToImage)
add_subdirectory(UnitTest)
add_subdirectory(VideoToImageMulth)
add_subdirectory(GB28181DecoderModel)
#add_subdirectory(FaceSearchDbWithImg)
QiaoJiaSystem/GB28181DecoderModel/CMakeLists.txt
New file
@@ -0,0 +1,101 @@
cmake_minimum_required(VERSION 3.5)
STRING(REGEX REPLACE ".*/(.*)" "\\1" CURRENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR})
project(${CURRENT_FOLDER})
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_BUILD_TYPE debug)
add_compile_options(-fPIC)
add_definitions(-DGLOG)
#add_definitions(-DTestCode)
add_definitions(-DDEBUG_ERR -DDEBUG_INFO -fpermissive)
set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/../build)
set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/../build)
# packages
#find_package(CUDA)
#include_directories ("${PROJECT_SOURCE_DIR}")
# nvcc flags
#set(CUDA_NVCC_FLAGS -O3;-G;-g)
#set(CUDA_NVCC_FLAGS -gencode arch=compute_20,code=sm_20;-G;-g)
#set(CUDA_NVCC_FLAGS -gencode arch=compute_52,code=sm_52;-G;-g)
#file(GLOB_RECURSE CURRENT_HEADERS *.h *.hpp *.cuh)
#file(GLOB CURRENT_SOURCES *.cpp *.cu)
#source_group("Include" FILES ${CURRENT_HEADERS})
#source_group("Source" FILES ${CURRENT_SOURCES})
#cuda_add_executable(${PROJECT_NAME} ${CURRENT_HEADERS} ${CURRENT_SOURCES})
#cuda_add_library(${PROJECT_NAME} SHARED ${CURRENT_HEADERS} ${CURRENT_SOURCES})
#cuda_add_library(${PROJECT_NAME} STATIC ${CURRENT_HEADERS} ${CURRENT_SOURCES})
SET(SOURCES
    GlobalSignalWaitLock.hpp
    )
SET(LIBS
    glog
    avformat
    avcodec
    swresample
    swscale
    avutil
    bz2 dl z
    Qt5Core
    opencv_world
    28181sdk
    mysqlclient
    StreamParse
    pthread
    )
include_directories(
    #glog
    ../../../BasicPlatForm/libs/glog/include
    ../../../BasicPlatForm/libs/GB28181/include
    ../../../BasicPlatForm/libs/opencv/include
    ../../../BasicPlatForm/libs/ffmpeg/include
    ../../../BasicPlatForm/basic/util/opencv/
    ../../../BasicPlatForm/basic/debug/
    ../../../BasicPlatForm/
    ../../../BasicPlatForm/basic/pipe_element/ffmpeg/
    /usr/include/x86_64-linux-gnu/qt5
)
link_directories(
    #glog
    /usr/local/lib/
    #glog
    ../../../BasicPlatForm/libs/glog/lib
    ../../../BasicPlatForm/libs/GB28181/libs
    ../../../BasicPlatForm/libs/opencv/lib
    ../../../BasicPlatForm/libs/ffmpeg/lib
)
add_executable(${PROJECT_NAME}
    #    testmain.cpp
    main.cpp
    ./GB28181Server.cpp
    ./FFmpegDecoderJPG.cpp
    VideoCaptureElementWithRtp.cpp
    ../../../BasicPlatForm/basic/timer_counter/Clocktimer.cpp
    ${SOURCES}
    )
target_link_libraries(${PROJECT_NAME}
    ${LIBS}
    )
QiaoJiaSystem/GB28181DecoderModel/FFmpegDecoderJPG.cpp
New file
@@ -0,0 +1,536 @@
//
// Created by ps on 19-1-11.
//
#include <zconf.h>
#include <opencv2/opencv.hpp>
#include "FFmpegDecoderJPG.h"
void BASICGB28181::initFFmpeg() {
    av_register_all();
    avformat_network_init();
}
static bool initFFmpegRet = (BASICGB28181::initFFmpeg(), true);
//MyQueue<BASICGB28181::frameBuffInfo *>  BASICGB28181::FFmpegDecoderJPG::m_rtpQueue;
//cv::Mat BASICGB28181::FFmpegDecoderJPG::m_image;
cv::Mat BASICGB28181::avframe_to_cvmat(AVFrame *frame) {
    AVFrame dst;
    memset(&dst, 0, sizeof(dst));
    int w = frame->width, h = frame->height;
    cv::Mat m = std::move(cv::Mat(h, w, CV_8UC3));
    dst.data[0] = (uint8_t *) m.data;
    avpicture_fill((AVPicture *) &dst, dst.data[0], AV_PIX_FMT_BGR24, w, h);
    struct SwsContext *convert_ctx = NULL;
//    PixelFormat src_pixfmt = (enum PixelFormat) frame->format;
//    PixelFormat dst_pixfmt = AV_PIX_FMT_BGR24;
    convert_ctx = sws_getContext(w, h, frame->format, w, h, AV_PIX_FMT_BGR24,
                                 SWS_FAST_BILINEAR, NULL, NULL, NULL);
    sws_scale(convert_ctx, frame->data, frame->linesize, 0, h,
              dst.data, dst.linesize);
    sws_freeContext(convert_ctx);
    DBG("m.size is " << m.size());
    return m;
}
BASICGB28181::FFmpegDecoderJPG::FFmpegDecoderJPG() : m_buf_size(32768), m_running(false), m_PackageState(false),
                                                     m_readData(false), m_rtpQueue(), frame_number(0),
                                                     first_frame_number(-1) {
}
BASICGB28181::FFmpegDecoderJPG::~FFmpegDecoderJPG() {
    while (m_rtpQueue.count_queue()) {
        m_rtpQueue.popNotWait();
    }
}
bool BASICGB28181::FFmpegDecoderJPG::pushInfo(unsigned char *data, int datalen, const std::string &camIdx) {
    TryCath(
        if (!m_running) {
            ERR(" m_running is false");
            return false;
        }
#ifdef TestCode
        DBG(camIdx << " dataLen is " << datalen);
#endif
        frameBuffInfo *info = new frameBuffInfo();
        info->buff = new unsigned char[datalen];
        info->buffLen = datalen;
        info->camIdx = camIdx;
        memcpy(info->buff, data, datalen);
#ifdef TestCode
        DBG(" m_rtpQueue.push before ");
#endif
        m_rtpQueue.push(info);
#ifdef TestCode
        DBG(" m_rtpQueue.push after ");
#endif
    );
    return true;
}
int BASICGB28181::FFmpegDecoderJPG::read_data(void *opaque, uint8_t *buf, int bufsize) {
#ifdef TestCode
    ClockTimer cl("read_data");
#endif
    FFmpegDecoderJPG *fFmpegDecoderJPG = (FFmpegDecoderJPG *) opaque;
    int len = bufsize;
    int diff = 0;
    do {
//        DBG(" m_rtpQueue.pop before ");
        frameBuffInfo *buffinfo = fFmpegDecoderJPG->m_rtpQueue.pop();
//        DBG(" m_rtpQueue.pop after ");
        diff = len - buffinfo->buffLen;
//        printf("bufsize is :%ld,len is :%ld, datalen:%d \n", bufsize, len, buffinfo->buffLen);
        //帧长大于bufsize
        if (diff < 0) {
//            DBG("/帧长大于bufsize" << diff);
            memcpy(buf + bufsize - len, buffinfo->buff, len);
            frameBuffInfo *info = new frameBuffInfo();
            info->buffLen = buffinfo->buffLen - len;
            info->buff = new uint8_t[buffinfo->buffLen - len]{};
            memcpy(info->buff, buffinfo->buff + len, buffinfo->buffLen - len);
            fFmpegDecoderJPG->m_rtpQueue.push_front_one(info);
            fFmpegDecoderJPG->m_PackageState = true;
        } else if (diff == 0) {
//            DBG("/帧长等于bufsize" << diff);
            memcpy(buf + bufsize - len, buffinfo->buff, buffinfo->buffLen);
            fFmpegDecoderJPG->m_PackageState = false;
        } else if (diff > 0) {
//            DBG("/帧长小于bufsize" << diff);
            memcpy(buf + bufsize - len, buffinfo->buff, buffinfo->buffLen);
            len = len - buffinfo->buffLen;   //还需要填充的大小
            memset(buf + bufsize - len, 0, len);
//            if (fFmpegDecoderJPG->m_PackageState) {
            //不等待填充,直接进行解码
            diff = 0;
            fFmpegDecoderJPG->m_PackageState = false;
//            }
        }
        delete[] buffinfo->buff;
        delete buffinfo;
    } while (diff > 0);
    //#todo 触发信号
//    DBG("emitSigal(\"read_dataOk\") begin");
//    gSignalLock.emitSigal("read_dataOk");
    fFmpegDecoderJPG->m_readData = true;
//    DBG("emitSigal(\"read_dataOk\") after");
    return bufsize;
}
void BASICGB28181::FFmpegDecoderJPG::BareFlowDecoderThd(FFmpegDecoderJPG *p_this) {
    DBG(p_this->m_camIdx << "  BareFlowDecoderThd ok ... gpuIdx is " << p_this->m_gpuIdx);
    p_this->m_running = true;
    av_register_all();
    avformat_network_init();
    AVFormatContext *ic = avformat_alloc_context();
    unsigned char *iobuffer = (unsigned char *) av_malloc(p_this->m_buf_size);
    AVIOContext *avio = avio_alloc_context(iobuffer, p_this->m_buf_size, 0, p_this, p_this->read_data, NULL, NULL);
    ic->pb = avio;
    int err = av_probe_input_buffer(ic->pb, &ic->iformat, nullptr, nullptr, 0, p_this->m_buf_size);
    int err1 = avformat_open_input(&ic, "", NULL, NULL);
    int err2 = avformat_find_stream_info(ic, nullptr);
    int vi = -1;
    for (int i = 0; i < ic->nb_streams; ++i) {
        if (ic->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            vi = i;
            break;
        }
    }
    AVStream *stream = ic->streams[vi];
    p_this->video_st = stream;
    AVCodecContext *ctx = avcodec_alloc_context3(nullptr);
    int err3 = avcodec_parameters_to_context(ctx, stream->codecpar);
    AVCodec *codec = avcodec_find_decoder(ctx->codec_id);
    //是否启用GPU
    if (p_this->m_gpuIdx >= 0) {
        if (codec != NULL) {
            char cuvidName[40] = {0};
            sprintf(cuvidName, "%s_cuvid", codec->name);
            if (!strcmp(codec->name, "h264") || !strcmp(codec->name, "h265") || !strcmp(codec->name, "hevc")) {
                AVCodec *codec_cuvid = avcodec_find_decoder_by_name(cuvidName);
                if (codec_cuvid != NULL) {
                    codec = codec_cuvid;
                } else {
//                    return false;
                    ERR("codec_cuvid is NULL");
                }
            }
        }
    }
    int err4 = avcodec_open2(ctx, codec, nullptr);
    AVPacket pkt;
    av_init_packet(&pkt);
    AVFrame *frame = av_frame_alloc();
    unsigned int usleepTime = (1000 / p_this->m_fps) - 12;
    usleepTime *= 1000;
    DBG(" before while <<usleepTime is " << usleepTime);
    while (p_this->m_running) {
#ifdef TestCode
        ClockTimer Test("while time");
#endif
        int err5 = av_read_frame(ic, &pkt);
        //# todo save package
        p_this->frame_number++;
        //DBG("GotPicture "<<m_camId<<":"<<frame_number);
        //放在此处是因为之前放在前面,收到的帧不完成
        p_this->SaveToPacketVector(pkt);
        p_this->CheckSave();
        int err6 = avcodec_send_packet(ctx, &pkt);
        av_packet_unref(&pkt);
        int err7 = avcodec_receive_frame(ctx, frame);
        if ((err7 == AVERROR(EAGAIN)) || (err5 < 0) || (err6 < 0)) {
            ERR(" error << err7:" << err7 << "  err5: " << err5 << " err6: " << err6);
            usleep(40000);
            continue;
        }
//        BASICGB28181::avframe_to_cvmat(frame).copyTo(p_this->m_image);
        p_this->m_image = std::move(BASICGB28181::avframe_to_cvmat(frame));
#ifdef TestCode
        {
//            TestCode
            ClockTimer cl("TestCode");
            std::string strNewTime2 = AppUtil::getTimeUSecString();
            cv::putText(p_this->m_image, strNewTime2, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN,
                        5, cv::Scalar(255, 255, 0), 2);
            std::thread test([&](cv::Mat img, std::string strThing) {
                try {
                    std::string strNewTime = "tmpDec/";
                    strNewTime.append(p_this->m_camIdx + "_").append(strThing).append(".jpg");
//                    cv::imwrite(strNewTime, p_this->m_image);
                } catch (std::exception ex) {
                    ERR(ex.what());
                }
            }, p_this->m_image, strNewTime2);
            test.detach();
        }
#endif
        //#todo send to other thd
#ifdef TestCode
        DBG("emitSigal(\"DecoderImageOK\") begin");
#endif
        gSignalLock.emitSigal(p_this->m_camIdx + "DecoderImageOK");
//#ifdef TestCode
//        DBG("emitSigal(\"DecoderImageOK\") after");
//#endif
        DBG("emitSigal(\"DecoderImageOK\") after");
        DBG("p_this->m_camIdx is " << p_this->m_camIdx << " queue size is " << p_this->m_rtpQueue.count_queue());
#ifdef TestCode
        {
            ClockTimer cl("waitTime");
            int loop = 0;
            //#TODO
//            while ((loop++ < 3000) && !(p_this->m_readData)) {
//                usleep(10);
//            }
            usleep(30000);
            DBG("p_this->m_readData is " << p_this->m_readData << "  loop is " << loop << " queue size is "
                                         << p_this->m_rtpQueue.count_queue());
            p_this->m_readData = false;
//        usleep(12000);
        }
#else
        usleep(usleepTime);
#endif
    }
    DBG(" after while ");
    av_frame_free(&frame);
}
bool BASICGB28181::FFmpegDecoderJPG::startThd(const std::string &camIdx, const int &fps, const int &gpuIdx) {
    TryCath(
        DBG(camIdx << "  FFmpegDecoderJPG startThd ... gpuIdx is " << gpuIdx);
        m_gpuIdx = gpuIdx;
        m_fps = fps;
        if (gpuIdx >= 0) {
            setenv("CUDA_VISIBLE_DEVICES", std::to_string(gpuIdx).c_str(), 0);
        }
        m_camIdx = camIdx;
        std::thread t_BareFlowDecoder(BareFlowDecoderThd, this);
        t_BareFlowDecoder.detach();
    );
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::stopThd() {
    TryCath(
        DBG(m_camIdx << "  FFmpegDecoderJPG stopThd ... ");
        m_running = false;
    );
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::getRunning() {
    return m_running;
}
cv::Mat BASICGB28181::FFmpegDecoderJPG::getImage() {
    return m_image;
}
std::string BASICGB28181::FFmpegDecoderJPG::GetImageName() {
    ImageName_s_t st;
    st.m_camId = this->m_camIdx;
    st.m_frameId = this->m_frameIndex;
    st.m_timeStamp = AppUtil::GetTimeWithHyphen();
    return st.toString();
}
bool BASICGB28181::FFmpegDecoderJPG::SaveVideoByImageName(const std::string &strPath, const std::string &imageName) {
    DBG(" strPath is " << strPath << "  imageName " << imageName);
    ImageName_s_t imgName_s = ImageName_s_t::fromString(imageName);
    if (!imgName_s.Valid()) {
        ERR("Image Name Valid  " << imageName);
        return false;
    }
    m_videoPath = strPath;
    if (m_recordState == STOP_RECORD) {
        m_recordState = RECORDING_VIDEO;
        m_startFrameId = m_endFrameId = imgName_s.m_frameId;
    } else {
        if (imgName_s.m_frameId > m_endFrameId) {
            m_endFrameId = imgName_s.m_frameId;
        }
    }
    if (!m_packetsVec.empty()) {
        if (imgName_s.m_frameId < m_packetsVec[0].m_frameId) {
            ERR("Save Video Failed: PackageFirstID: " << m_packetsVec[0].m_frameId << " ImageId: "
                                                      << imgName_s.m_frameId);
        }
    }
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::SetMinMaxVideoSeconds(const int minSeconds, const int maxSecond) {
    if (minSeconds < 0 || maxSecond < 0 && minSeconds >= maxSecond) {
        return false;
    } else {
        m_minVideoFrameCount = minSeconds * 25;
        m_maxVideoFrameCount = maxSecond * 25;
        return true;
    }
}
bool BASICGB28181::FFmpegDecoderJPG::CleanToFrameId(int64_t lastFrameId) {
    std::lock_guard<std::mutex> lock(g_mutex);
    if (RECORDING_VIDEO == m_recordState) {
        if (!m_packetsVec.empty()) {
            auto iter = m_packetsVec.begin();
            while (iter->m_frameId < lastFrameId) {
                INFO("DropFrame: " << iter->m_frameId);
                delete iter->m_packet.data;
                iter = m_packetsVec.erase(iter);
            }
        }
    }
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::CleanOneKeyFrameOneRange() {
    std::lock_guard<std::mutex> lock(g_mutex);
    if (!m_packetsVec.empty() && STOP_RECORD == m_recordState) {
        auto firstFrame = m_packetsVec[0];
        //视频的最短长度有问题,可以考虑修改此处 m_minVideoFrameCount
        if ((m_last_I_FrameId - firstFrame.m_frameId > m_minVideoFrameCount / 2)) {
            auto iter = m_packetsVec.begin();
            delete iter->m_packet.data;
            iter = m_packetsVec.erase(iter);
            while (!(iter->m_packet.flags & AV_PKT_FLAG_KEY)) {
//                INFO("DropFrame: " << iter->m_frameId);
                delete iter->m_packet.data;
                iter = m_packetsVec.erase(iter);
            }
        }
    }
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::SaveVideo(std::string path, int64_t lastFrameId) {
    std::lock_guard<std::mutex> lock(g_mutex);
    INFO("SaveVideo: " << path);
    if (!m_packetsVec.empty()) {
        startWrite(path.c_str());
        int64_t firstKeyFramePts = m_packetsVec[0].m_packet.pts;
        int64_t firstKeyFrameDts = m_packetsVec[0].m_packet.dts;
        for (const auto &item:m_packetsVec) {
            if (item.m_frameId < lastFrameId) {
                conversion(const_cast<AVPacket *> (&item.m_packet), firstKeyFramePts, firstKeyFrameDts, video_st);
                av_write_frame(m_pOutFmtCtx, &item.m_packet);
            } else {
                break;
            }
        }
        stopWrite();
    }
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::HandleSave() {
    if (m_recordState == RECORDING_VIDEO) {
        auto firstFrame = m_packetsVec[0];
        VideoName_s_t st;
        st.m_camId = m_camIdx;
        st.m_timeStamp = AppUtil::GetTimeWithHyphen();
        st.m_startFrameId = firstFrame.m_frameId;
        st.m_endFrameId = m_last_I_FrameId - 1;
        //结尾留的已经足够,并且没有新的帧需要录像
        if (m_last_I_FrameId - m_endFrameId > m_minVideoFrameCount / 2) {
//            INFO("LastIFrameID: " << m_last_I_FrameId << "  FirstFrameID: " << st.m_startFrameId << " m_endFrameID: "
//                                  << m_endFrameId << "   MinVideoFrameCount :" << m_minVideoFrameCount);
            m_startFrameId = m_endFrameId = -1;
            SaveVideo(m_videoPath + st.ToVideoName(), m_last_I_FrameId);
            CleanToFrameId(m_last_I_FrameId);
            m_recordState = STOP_RECORD;
        } else {
            //缓冲区中已经有太多的帧了,并且剩余的在缓冲队列的m_last_I_FrameId之后还有需要录像的帧
            if (m_endFrameId - firstFrame.m_frameId > m_maxVideoFrameCount) {
//                INFO("FirstFrameID: " << firstFrame.m_frameId << " m_endFrameID: " << m_endFrameId
//                                      << "   MinVideoFrameCount :" << m_maxVideoFrameCount);
                m_startFrameId = m_last_I_FrameId;
                SaveVideo(m_videoPath + st.ToVideoName(), m_last_I_FrameId);
                CleanToFrameId(m_last_I_FrameId);
            }
        }
    }
    return true;
}
bool BASICGB28181::FFmpegDecoderJPG::CheckSave() {
    if (!m_packetsVec.empty()) {
        if (RECORDING_VIDEO == m_recordState) {
            HandleSave();
            return true;
        }
        return CleanOneKeyFrameOneRange();
    }
}
void BASICGB28181::FFmpegDecoderJPG::SaveToPacketVector(AVPacket &packet) {
    AVPacket newPacket(packet);
    newPacket.data = reinterpret_cast<uint8_t *>(new uint64_t[
    (packet.size + FF_INPUT_BUFFER_PADDING_SIZE) / sizeof(uint64_t) + 1]);
    memcpy(newPacket.data, packet.data, packet.size);
    m_frameIndex++;
    m_packetsVec.push_back({m_frameIndex, newPacket});
    if (newPacket.flags & AV_PKT_FLAG_KEY) {
        m_last_I_FrameId = m_frameIndex;
    }
}
int BASICGB28181::FFmpegDecoderJPG::startWrite(const char *filename) {
    if (video_st == nullptr) {
        printf("video_st instream is null");
        return -1;
    }
    int ret = avformat_alloc_output_context2(&m_pOutFmtCtx, NULL, NULL, filename);
    if (ret < 0) {
        fprintf(stderr, "avformat_alloc_output_context2 failed, errorCode: %d\n", AVERROR(ret));
        return -1;
    }
    /*
    * since all input files are supposed to be identical (framerate, dimension, color format, ...)
    * we can safely set output codec values from first input file
    */
    m_pOutVideo_stream = avformat_new_stream(m_pOutFmtCtx, NULL);
    {
//        AVCodecContext *c;
//        c = m_pOutVideo_stream->codec;
//        c->bit_rate = 400000;
//        c->codec_id = video_st->codec->codec_id;
//        c->codec_type = video_st->codec->codec_type;
//        c->time_base.num = video_st->time_base.num;
//        c->time_base.den = video_st->time_base.den;
//        fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
//        c->width = video_st->codec->width;
//        c->height = video_st->codec->height;
//        c->pix_fmt = video_st->codec->pix_fmt;
//        printf("%d %d %d", c->width, c->height, c->pix_fmt);
//        c->flags = video_st->codec->flags;
//        c->flags |= CODEC_FLAG_GLOBAL_HEADER;
//        c->me_range = video_st->codec->me_range;
//        c->max_qdiff = video_st->codec->max_qdiff;
//
//        c->qmin = video_st->codec->qmin;
//        c->qmax = video_st->codec->qmax;
//
//        c->qcompress = video_st->codec->qcompress;
    }
    ret = avio_open(&m_pOutFmtCtx->pb, filename, AVIO_FLAG_WRITE);
    if (ret < 0) {
        fprintf(stderr, "could not find stream info, errorCode: %d\n", AVERROR(ret));
        return -1;
    }
    avformat_write_header(m_pOutFmtCtx, NULL);
    m_bstartWrite = true;
    m_bFirstKeyFrame = true;
    m_nFirstKeyDts = 0;
    m_nFirstKeyPts = 0;
    return 0;
}
int BASICGB28181::FFmpegDecoderJPG::stopWrite() {
    if (m_pOutFmtCtx == nullptr) return -1;
    av_write_trailer(m_pOutFmtCtx);
    avio_close(m_pOutFmtCtx->pb);
    avcodec_close(m_pOutFmtCtx->streams[0]->codec);
    av_freep(&m_pOutFmtCtx->streams[0]->codec);
    av_freep(&m_pOutFmtCtx->streams[0]);
    av_free(m_pOutFmtCtx);
    m_pOutFmtCtx = nullptr;
    m_bstartWrite = false;
    return 0;
}
void BASICGB28181::FFmpegDecoderJPG::conversion(void *packet, const long int &firstKeyPts, const long int &firstKeyDts,
                                                void *inVideoStream) {
    if ((packet != nullptr) && (inVideoStream != nullptr)) {
        AVStream *inStream = (AVStream *) inVideoStream;
        AVPacket *pkg = static_cast<AVPacket *>(packet);
//            static int a = 0;
//            pkg->dts = a++;
//            pkg->pts = a;
        pkg->pts -= firstKeyPts;
        pkg->dts -= firstKeyDts;
        pkg->pts = av_rescale_q_rnd(pkg->pts, inStream->time_base,
                                    m_pOutVideo_stream->time_base,
                                    (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkg->dts = av_rescale_q_rnd(pkg->dts, inStream->time_base,
                                    m_pOutVideo_stream->time_base,
                                    (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkg->duration = av_rescale_q(pkg->duration, inStream->time_base,
                                     m_pOutVideo_stream->time_base);
        pkg->pos = -1;
    }
}
QiaoJiaSystem/GB28181DecoderModel/FFmpegDecoderJPG.h
New file
@@ -0,0 +1,248 @@
//
// Created by ps on 19-1-11.
//
//
// #TODO
// 可以把ffmpeg的操作抽成一个对象
//
#ifndef GB28181SDK_FFMPEGDECODERJPG_H
#define GB28181SDK_FFMPEGDECODERJPG_H
#include <iostream>
#include <unistd.h>
#include <MyQueue.h>
#include <Debug.h>
#include <atomic>
#include "GlobalSignalWaitLock.hpp"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#include <list>
#include "basic_struct_for_video_image.h"
extern "C"
{
#include <libavutil/frame.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <libavfilter/avfiltergraph.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
#include <mutex>
namespace BASICGB28181 {
#define TryCath(CODE)     try { \
CODE } catch (std::exception ex) { \
ERR(ex.what()); \
return false; \
}
    static void initFFmpeg();
    static cv::Mat avframe_to_cvmat(AVFrame *frame);
    typedef struct _buffInfo {
        uint8_t *buff;
        int buffLen;
        std::string camIdx;
    } frameBuffInfo;
    struct FrameIdPackage_s_t {
        int64_t m_frameId;
        AVPacket m_packet;
    };
    struct FrameRange_s_t {
        int64_t m_startFrameId;
        int64_t m_endFrameId;
        std::string m_videoName;
        FrameRange_s_t() {
            m_startFrameId = 0;
            m_endFrameId = 0;
            m_videoName.clear();
        }
        bool IsFrameIdInRange(int64_t frameId) {
            return m_startFrameId <= frameId && frameId <= m_endFrameId;
        }
    };
    enum RecordState_e_t {
        START_RECORD,
        RECORDING_VIDEO,
        STOP_RECORD,
    };
//    std::map<std::string, MyQueue<frameBuffInfo *> > MapMyQueue;
    static std::mutex g_mutex;
    class FFmpegDecoderJPG {
    public:
        FFmpegDecoderJPG();
        virtual ~FFmpegDecoderJPG();
        /***
         * 放入I帧或者P帧
         * #TODO如果第一帧不是I帧会不会有异常?
         * @param data 帧数据地址
         * @param datalen 数据长度
         * @param camIdx 摄像机id
         * @return
         */
        bool pushInfo(unsigned char *data, int datalen, const std::string &camIdx);
        /***
         * 开启解帧线程
         * @param camIdx 摄像机id
         * @param gpuIdx gpuId
         * @return
         */
        bool startThd(const std::string &camIdx, const int &fps, const int &gpuIdx = -1);
        /***
         * 停止解帧线程
         * @return
         */
        bool stopThd();
        //****************************
        std::string GetImageName();
        //保存imageName所在的视频到strPath的路径下
        bool SaveVideoByImageName(const std::string &strPath, const std::string &imageName);
        //设置录像保存的最短长度和最长长度
        bool SetMinMaxVideoSeconds(const int minSeconds, const int maxSecond);
    private:
        //检查保存,每次保存一帧到数组的时候调用
        bool CheckSave();
        //Function
        bool HandleSave();
        //录像的时候,在视频保存完以后,调用这个函数清空缓存
        bool CleanToFrameId(int64_t lastFrameId);
        //不录像的时候,如果保存了足够多的帧,调用此函数清除最前面的两个I帧之间的数据
        bool CleanOneKeyFrameOneRange();
    private:
        //记录最后一个I帧的位置
        int64_t m_last_I_FrameId = -1;
        //保存视频的路径
        std::string m_videoPath;
        //录像的两种状态,
        RecordState_e_t m_recordState = STOP_RECORD;
        //开始录像的帧ID
        int64_t m_startFrameId = 0;
        //最后一个需要录像的帧ID
        int64_t m_endFrameId = 0;
        //Variable
        // 录像视频的最短长度(帧数)
        int m_minVideoFrameCount = 10 * 25; // 10 seconds
        // 录像视频的最长长度(帧数)
        int m_maxVideoFrameCount = 20 * 25; // 20 seconds;
        int m_maxVectorFrameCount = m_maxVideoFrameCount * 2;
        //对收到的帧进行计数
        int64_t m_frameIndex = 0;
        //将帧保存到帧数组
        void SaveToPacketVector(AVPacket &packet);
        //保存lastFrameId之前的视频保存到path表示的文件中
        bool SaveVideo(std::string path, int64_t lastFrameId);
    public:
        //第一帧必须是I帧
        std::vector<FrameIdPackage_s_t> m_packetsVec;
        //******************
        int64_t frame_number, first_frame_number;
        //打开文件写入文件头
        int startWrite(const char *filename);
        //关闭用startWrite打开的文件
        int stopWrite();
        //对packet做转换
        void conversion(void *packet, const long int &firstKeyPts, const long int &firstKeyDts, void *inVideoStream);
        bool m_bstartWrite = {false};
        bool m_bFirstKeyFrame = {false};
        long int m_nFirstKeyPts = 0;
        long int m_nFirstKeyDts = 0;
        AVFormatContext *m_pOutFmtCtx = {nullptr};
        AVStream *video_st{0};
        AVStream *m_pOutVideo_stream{nullptr};
        //****************************
    public:
        /***
         * 获取线程状态
         * @return
         */
        bool getRunning();
        cv::Mat getImage();
//        #todo send Image func ? thd?
    private:
        int m_buf_size; //32768;
        int m_gpuIdx;
        int m_fps;
        bool m_PackageState;
        std::string m_camIdx;
        MyQueue<frameBuffInfo *> m_rtpQueue;
        cv::Mat m_image;
        std::atomic<bool> m_running;
        bool m_readData;
    private:
        /***
         * ffmpeg读取内存数据回调函数
         * @param opaque
         * @param buf
         * @param bufsize
         * @return
         */
        static int read_data(void *opaque, uint8_t *buf, int bufsize);
        /***
         * 裸流解码函数
         * @param p_this 类指针
         */
        static void BareFlowDecoderThd(FFmpegDecoderJPG *p_this);
    };
}
#endif //GB28181SDK_FFMPEGDECODERJPG_H
QiaoJiaSystem/GB28181DecoderModel/GB28181Server.cpp
New file
@@ -0,0 +1,56 @@
//
// Created by ps on 19-3-1.
//
#include "GB28181Server.h"
bool bGetLoaclRes = {false};
GB28181Server::GB28181Server() {}
GB28181Server::~GB28181Server() {
    C_UnInitSDK();
}
void GB28181Server::setMysqlConnParam(const MysqlDBServerCfg &_MysqlConnParam) {
    memset(&MysqlConnParam, 0, sizeof(MysqlConnParam_T));
    strcpy(MysqlConnParam.Host, _MysqlConnParam.Host.c_str());    //连接数据库的ip地址
    MysqlConnParam.Port = _MysqlConnParam.Port;                        //连接数据库的端口
    strcpy(MysqlConnParam.UserName, _MysqlConnParam.UserName.c_str());        //连接数据库的用户名
    strcpy(MysqlConnParam.Passwd, _MysqlConnParam.Passwd.c_str());        //连接数据库的密码
    strcpy(MysqlConnParam.DBName, _MysqlConnParam.DBName.c_str());    //连接数据库的表名
    MysqlConnParam.DBConnCount = _MysqlConnParam.DBConnCount;                    //连接数据库的数量
}
void GB28181Server::setGBServerParam(const GBServerCfg &_GBServerParam) {
    memset(&GBServerParam, 0, sizeof(GBServerParam_T));
    strcpy(GBServerParam.SvrIp, _GBServerParam.SvrIp.c_str()); // 国标服务的ip地址  (本机的ip地址)
    //strcpy(GBServerParam.SvrNatIp, "222.35.102.22"); //注意: 此处如果没有穿网ip地址不需要配置 (一般部署公网时使用)
    GBServerParam.SvrPort = _GBServerParam.SvrPort; // 国标服务监听的端口
    // 44122500042001000123
    strcpy(GBServerParam.SvrPubID, _GBServerParam.SvrPubID.c_str()); // 国标服务器的ID
    GBServerParam.bMD5Auth = _GBServerParam.bMD5Auth;                            // 是否需要MD5加密
    strcpy(GBServerParam.UserName, _GBServerParam.UserName.c_str()); // 国标服务的用户名    (下级设备注册的用户名)
    strcpy(GBServerParam.Passwd, _GBServerParam.Passwd.c_str());                // 国标服务的密码    (下级设备注册的密码)
    GBServerParam.SubScribeTime = _GBServerParam.SubScribeTime;  // 订阅时间  如果为0 表示不订阅
}
void GB28181Server::setUpperPlatform(const UpperPFCfg &_upperinfo) {
    memset(&upperinfo, 0, sizeof(UpperPlatform_T));
    strcpy(upperinfo.Name, _upperinfo.Name.c_str());
    strcpy(upperinfo.PublicID, _upperinfo.PublicID.c_str());
    strcpy(upperinfo.AuthUserName, _upperinfo.AuthUserName.c_str());
    strcpy(upperinfo.AuthPasswd, _upperinfo.AuthPasswd.c_str());
    strcpy(upperinfo.IpAddr, _upperinfo.IpAddr.c_str());
    upperinfo.Port = _upperinfo.Port;
    upperinfo.RegisterItv = _upperinfo.RegisterItv;
    upperinfo.KeepAliveItv = _upperinfo.KeepAliveItv;
}
bool GB28181Server::initServer() {
    bool iRet = C_InitSDK(&GBServerParam, &MysqlConnParam, NULL, enventcallback);
    sleep(90);
    return iRet;
}
QiaoJiaSystem/GB28181DecoderModel/GB28181Server.h
New file
@@ -0,0 +1,93 @@
//
// Created by ps on 19-3-1.
//
#ifndef GB28181SDK_GB28181SERVER_H
#define GB28181SDK_GB28181SERVER_H
#include <iostream>
#include <stdio.h>
#include <sys/time.h>
#include <sys/resource.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/wait.h>
#include <unistd.h>
#include <signal.h>
#include <stdlib.h>
#include <time.h>
#include <string.h>
#include "28181SDK.h"
#include "SpinLock.hpp"
struct MysqlDBServerCfg {
    std::string Host; //连接数据库的ip地址
    int Port;//连接数据库的端口
    std::string UserName;//连接数据库的用户名
    std::string Passwd;//连接数据库的密码
    std::string DBName;//连接数据库的表名
    int DBConnCount;//连接数据库的数量
};
struct GBServerCfg {
    std::string SvrIp;  // 国标服务的ip地址  (本机的ip地址)
    std::string SvrNatIp;       //注意: 此处如果没有穿网ip地址不需要配置 (一般部署公网时使用)
    int SvrPort;  // 国标服务监听的端口
    std::string SvrPubID; // 国标服务器的ID
    bool bMD5Auth; // 是否需要MD5加密
    std::string UserName;// 国标服务的用户名    (下级设备注册的用户名)
    std::string Passwd; // 国标服务的密码    (下级设备注册的密码)
    int SubScribeTime; // 订阅时间  如果为0 表示不订阅
};
struct UpperPFCfg {
    std::string Name;
    std::string PublicID;
    std::string AuthUserName;
    std::string AuthPasswd;
    std::string IpAddr;
    int Port;
    int RegisterItv;
    int KeepAliveItv;
};
class GB28181Server {
public:
    GB28181Server();
    virtual ~GB28181Server();
    void setMysqlConnParam(const MysqlDBServerCfg &_MysqlConnParam);
    void setGBServerParam(const GBServerCfg &_GBServerParam);
    void setUpperPlatform(const UpperPFCfg &_upperinfo);
    bool initServer();
private:
    // mysql数据库的配置信息
    MysqlConnParam_T MysqlConnParam;
    //  国标服务的配置信息
    GBServerParam_T GBServerParam;
    UpperPlatform_T upperinfo;
    static bool bGetLoaclRes;
private:
    //打印事件回调信息
    static void enventcallback(int eventtype, int eventparam, int datalen, char *data) {
        printf("eventtype:%d, eventparam:%d, datalen:%d, data:%s\n", eventtype, eventparam, datalen, data);
        if (eventtype == 2) {
//            GB28181Server::bGetLoaclRes = true;
        } else if (eventtype == 1 && eventparam == 1) {
            C_GetResource(NULL);
        }
    }
};
#endif //GB28181SDK_GB28181SERVER_H
QiaoJiaSystem/GB28181DecoderModel/GlobalSignalWaitLock.hpp
New file
@@ -0,0 +1,115 @@
//
// Created by pans on 19-1-13.
//
#ifndef GB28181SDK_GLOBALSIGNALWAITLOCK_H
#define GB28181SDK_GLOBALSIGNALWAITLOCK_H
#include <iostream>
#include <basic/util/thread/RWLock.hpp>
//#define TryCath(CODE)     try { \
//CODE } catch (std::exception ex) { \
//ERR(ex.what()); \
//return false; \
//} return true;
#define gSignalLock GlobalSignalWaitLock::getInstance()
//#todo 记录每个信号的wait状态,避免某个信号还在使用却将其删除的情况
class SignalLock {
public:
    SignalLock() : m_signal_cond_mutex(PTHREAD_MUTEX_INITIALIZER),
                   m_signal_cond(PTHREAD_COND_INITIALIZER) {
    }
    virtual ~SignalLock() {
    }
    bool wait() {
        pthread_mutex_lock(&m_signal_cond_mutex);
        pthread_cond_wait(&m_signal_cond, &m_signal_cond_mutex);
        pthread_mutex_unlock(&m_signal_cond_mutex);
    }
    bool emit() {
        pthread_cond_signal(&m_signal_cond);
    }
private:
    pthread_cond_t m_signal_cond;
    pthread_mutex_t m_signal_cond_mutex;
};
class GlobalSignalWaitLock {
public:
//    仅支持一对一,#TODO多对一
    static GlobalSignalWaitLock &getInstance() {
        static GlobalSignalWaitLock globalSignalWaitLock;
        return globalSignalWaitLock;
    }
protected:
    GlobalSignalWaitLock() : m_live(true) {}
    virtual ~GlobalSignalWaitLock() {
        m_slRwLock.wrlock();
        m_signalLock.clear();
        m_slRwLock.unlock();
    }
private:
    bool m_live;
    RWLock m_slRwLock;
    std::map<std::string, SignalLock> m_signalLock;
public:
    bool waitSignal(std::string key) {
        try {
            m_slRwLock.rdlock();
            if (m_signalLock.find(key) == m_signalLock.end()) {
                m_slRwLock.r2wLock();
                m_signalLock[key] = std::move(SignalLock());
                m_slRwLock.w2rLock();
            }
            SignalLock &t_signalLock = m_signalLock[key];
            m_slRwLock.unlock();
            t_signalLock.wait();
        } catch (std::exception ex) {
            ERR(ex.what());
            m_slRwLock.unlock();
            return false;
        }
        return true;
    }
    bool emitSigal(std::string key) {
        try {
            m_slRwLock.rdlock();
            if (m_signalLock.find(key) == m_signalLock.end()) {
                m_slRwLock.r2wLock();
                m_signalLock[key] = std::move(SignalLock());
                m_slRwLock.w2rLock();
            }
            SignalLock &t_signalLock = m_signalLock[key];
            m_slRwLock.unlock();
            t_signalLock.emit();
        } catch (std::exception ex) {
            ERR(ex.what());
            m_slRwLock.unlock();
            return false;
        }
        return true;
    }
};
#endif //GB28181SDK_GLOBALSIGNALWAITLOCK_H
QiaoJiaSystem/GB28181DecoderModel/SpinLock.hpp
New file
@@ -0,0 +1,30 @@
//
// Created by ps on 19-3-1.
//
#ifndef GB28181SDK_SPINLOCK_H
#define GB28181SDK_SPINLOCK_H
#include <atomic>
class SpinLock {
public:
    SpinLock() : m_lock(ATOMIC_FLAG_INIT) {}
    virtual ~SpinLock() {
    }
    void lock() {
        while (m_lock.test_and_set());
    }
    void unlock() {
        m_lock.clear();
    }
private:
    std::atomic_flag m_lock;
};
#endif //GB28181SDK_SPINLOCK_H
QiaoJiaSystem/GB28181DecoderModel/VideoCaptureElementWithRtp.cpp
New file
@@ -0,0 +1,210 @@
//
// Created by ps on 19-1-10.
//
#include <opencv2/imgproc.hpp>
#include <qt5/QtCore/QDateTime>
#include <basic/util/app/AppPreference.hpp>
#include "VideoCaptureElementWithRtp.h"
//std::string BASICGB28181::VideoCaptureElementWithRtp::m_chanPubID;
//BASICGB28181::FFmpegDecoderJPG BASICGB28181::VideoCaptureElementWithRtp::m_fFmpegDecoderJPG;
BASICGB28181::VideoCaptureElementWithRtp::VideoCaptureElementWithRtp(std::string &chanPubID, int fps,
                                                                     int streamTransType,
                                                                     int gpuIdx) : m_chanPubID(chanPubID), m_fps(fps),
                                                                                   m_running(false),
                                                                                   m_waitSignal(false),
                                                                                   m_streamTransType(streamTransType),
                                                                                   m_gpuIdx(gpuIdx),
                                                                                   m_userdata((long) this) {
//    m_chanPubID = chanPubID;
    m_cutPath = appPref.getStringData("user.loop.absolute.path");
    assert(!m_cutPath.empty());
}
BASICGB28181::VideoCaptureElementWithRtp::~VideoCaptureElementWithRtp() {
    m_fFmpegDecoderJPG.stopThd();
    int loop = 0;
    while ((loop++ < 100) && m_fFmpegDecoderJPG.getRunning()) {
        m_fFmpegDecoderJPG.stopThd();
        usleep(100000);
    }
}
bool BASICGB28181::VideoCaptureElementWithRtp::startRtpStream(int streamTransType) {
    TryCath(
    //--------------国标设备或则国标下级平台必须支持GB28181-2016----------------------------------------------
        std::thread videoCaptureElementThd([&](VideoCaptureElementWithRtp *p_this, int streamType) {
            DBG("videoCaptureElementThd start...");
            StreamTransType_E etype;
            switch (streamType) {
                case 1: {
                    // 1.实时视频请求 UDP请求实时视频
                    etype = E_STREAM_TRANS_UDP;
                    break;
                }
                case 2: {
                    // 2.实时视频请求 TCP主动请求实时视频
                    etype = E_STREAM_TRANS_TCPACTIVE;
                    break;
                }
                case 3: {
                    // 3.实时视频请求 TCP被动请求实时视频
                    etype = E_STREAM_TRANS_TCPPASSIVE;
                    break;
                }
                default:
                    etype = E_STREAM_TRANS_UDP;
            }
            DBG("C_RealVideoStart start... m_chanPubID is " << p_this->m_chanPubID << " etype is " << etype
                                                            << " m_userdata is " << m_userdata);
            long lrealhandle = C_RealVideoStart(const_cast<char *>(p_this->m_chanPubID.c_str()), etype,
                                                p_this->streamcallback, m_userdata);
            if (lrealhandle != -1) {
                DBG(p_this->m_chanPubID << "  C_RealVideoStart ok ... type is " << etype);
                p_this->m_running = true;
                p_this->m_fFmpegDecoderJPG.startThd(p_this->m_chanPubID, p_this->m_fps, p_this->m_gpuIdx);
                while (p_this->m_running) {
                    usleep(300000);
                }
                DBG("videoCaptureElementThd stop ...");
                C_RealVideoStop(lrealhandle);
                DBG("videoCaptureElementThd stop ok...");
            } else {
                p_this->m_running = false;
                p_this->m_fFmpegDecoderJPG.stopThd();
                ERR(p_this->m_chanPubID << " C_RealVideoStart is error lrealhandle is  " << lrealhandle);
            }
        }, this, streamTransType);
        videoCaptureElementThd.detach();
    );
    return true;
}
bool BASICGB28181::VideoCaptureElementWithRtp::stopRtpStream() {
    TryCath(
        m_running = false;
    );
    return true;
}
bool BASICGB28181::VideoCaptureElementWithRtp::getRunning() {
    return m_running;
}
void BASICGB28181::VideoCaptureElementWithRtp::streamcallback(long handle, int datatype, int frametype,
                                                              unsigned char *data, int datalen, long userdata) {
#ifdef TestCode
    ClockTimer test("streamcallback");
#endif
    BASICGB28181::VideoCaptureElementWithRtp *p_this = (BASICGB28181::VideoCaptureElementWithRtp *) userdata;
#ifdef TestCode
    {
//        FILE *fp11 = NULL;
//        if (!fp11) {
////            printf("fp11 handle:%ld, datatype:%d, datalen:%d, userdata:%ld\n", handle, datatype, datalen, userdata);
//            std::string fileName(p_this->m_chanPubID);
//            fileName.append(".mp4");
//            fp11 = fopen(fileName.c_str(), "w+");
//        }
//        fwrite(data, sizeof(char), datalen, fp11);
    }
#endif
    CHKDBG(p_this->m_fFmpegDecoderJPG.pushInfo(data, datalen, p_this->m_chanPubID), true,
           "pushInfo is error !! handle is " << handle << " datatype is " << datatype << " frametype is " << frametype);
}
void BASICGB28181::VideoCaptureElementWithRtp::threadFunc() {
    fireConnectors();
}
void BASICGB28181::VideoCaptureElementWithRtp::threadInitial() {
    std::thread waitSignalAndEmit([&](BASICGB28181::VideoCaptureElementWithRtp *p_this) {
        p_this->m_waitSignal = true;
        while (p_this->m_waitSignal) {
//#TODO wait test
#ifdef TestCode
            DBG("waitSignal(\"DecoderImageOK\") begin");
#endif
            gSignalLock.waitSignal(p_this->m_chanPubID + "DecoderImageOK");
#ifdef TestCode
            DBG("waitSignal(\"DecoderImageOK\") after");
#endif
            p_this->m_picCount++;
            //几张选一张放入Redis
            if (p_this->m_picCount % m_nPicsPickOne != 0) {
                continue;
            } else {
                p_this->m_picCount.store(0);
            }
            p_this->m_fFmpegDecoderJPG.getImage().copyTo(p_this->m_image);
            {
                cv::Mat copyMat;
                std::string imageName = p_this->m_fFmpegDecoderJPG.GetImageName();
                p_this->m_image.copyTo(copyMat);
//                m_pManager->SaveImageToRedis(m_camId, imageName, copyMat);
            }
            p_this->submit();
        }
        INFO("waitSignalAndEmit is exit...");
    }, this);
    waitSignalAndEmit.detach();
    startRtpStream(m_streamTransType);
}
void BASICGB28181::VideoCaptureElementWithRtp::threadClosing() {
    m_waitSignal = false;
    stopRtpStream();
}
cv::Mat BASICGB28181::VideoCaptureElementWithRtp::getImage() {
    return m_image;
}
void BASICGB28181::VideoCaptureElementWithRtp::SetVideoMinMaxSeconds(const int minSeconds, const int maxSeconds) {
    m_fFmpegDecoderJPG.SetMinMaxVideoSeconds(minSeconds, maxSeconds);
}
void BASICGB28181::VideoCaptureElementWithRtp::SaveVideo(const std::string &strImageName) {
    INFO("SaveVideo: " << strImageName);
    std::string strTimeStamp = AppUtil::getTimeUSecString();
    std::string strPath = MakeDir(strTimeStamp);
    m_fFmpegDecoderJPG.SaveVideoByImageName(strPath, strImageName);
}
std::string BASICGB28181::VideoCaptureElementWithRtp::MakeDir(const std::string &timeStamp) {
    std::string t_FilePath = m_cutPath;
    if (t_FilePath.back() != '/') {
        t_FilePath.push_back('/');
    }
    char buf[24];
    QDateTime dt = QDateTime::fromString(QString::fromStdString(timeStamp), "yyyy-MM-dd hh:mm:ss:zzz");
    std::string t_strTime = dt.toString("yyyyMMddhh").toStdString();
    // DBG("t_strTime="<<t_strTime);
    t_FilePath.append(m_chanPubID + "/" + t_strTime.substr(0, 6) + "/" + t_strTime.substr(6, 2) + "/");
    //YYYYMMDDHH
    t_FilePath.append(t_strTime.substr(0, 10) + "/");
    std::string t_cmd = "mkdir -p '";
    t_cmd.append(t_FilePath + "'");
    //#get path mkdir path
    system(t_cmd.c_str());
    return t_FilePath;
}
QiaoJiaSystem/GB28181DecoderModel/VideoCaptureElementWithRtp.h
New file
@@ -0,0 +1,94 @@
//
// Created by ps on 19-1-10.
//
#ifndef GB28181SDK_VIDEOCAPTUREELEMENT_H
#define GB28181SDK_VIDEOCAPTUREELEMENT_H
#include "FFmpegDecoderJPG.h"
#include "28181SDK.h"
#include <basic/pipe/PipeElement.h>
#include "GlobalSignalWaitLock.hpp"
namespace BASICGB28181 {
    class VideoCaptureElementWithRtp : public basic::PipeElement {
    public:
        explicit VideoCaptureElementWithRtp(std::string &chanPubID, int fps, int streamTransType, int gpuIdx = -1);
        virtual ~VideoCaptureElementWithRtp();
        /***
         * 获取当前实时流接收数据线程运行状态
         * @return
         */
        bool getRunning();
        cv::Mat getImage();
        //保存视频接口,从RtspAnalysManager发起调用
        void SaveVideo(const std::string &strImageName);
        //设置保存视频的最小和最大长度,单位是秒,实际的运行情况有一些差距,需要完善
        void SetVideoMinMaxSeconds(const int minSeconds, const int maxSeconds);
        //根据timeStamp创建路径
        std::string MakeDir(const std::string &timeStamp);
    private:
        int m_gpuIdx;
        int m_fps;
        int m_streamTransType;
        std::string m_chanPubID;
        std::atomic<int> m_picCount{0};
        //几张图丢一张,目前是8张丢一张
        const int m_nPicsPickOne = 8;
        FFmpegDecoderJPG m_fFmpegDecoderJPG;
        cv::Mat m_image;
        long m_userdata;
        std::atomic<bool> m_running;
        std::atomic<bool> m_waitSignal;
        //用来保存录像视频的路径
        std::string m_cutPath;
    private:
        /***
         * 启动实时流接收数据线程
         * @param streamTransType 流数据发送方式
         * @return
         */
        bool startRtpStream(int streamTransType = 1);
        /***
         * 停止实时流接收数据线程
         * @return
         */
        bool stopRtpStream();
        /***
         * rtp组包回调函数
         * @param handle
         * @param datatype
         * @param frametype
         * @param data
         * @param datalen
         * @param userdata
         */
        static void streamcallback(long handle, int datatype, int frametype,
                                   unsigned char *data, int datalen, long userdata);
        virtual void threadFunc() override;
        virtual void threadInitial() override;
        virtual void threadClosing() override;
    };
}
#endif //GB28181SDK_VIDEOCAPTUREELEMENT_H
QiaoJiaSystem/GB28181DecoderModel/main.cpp
New file
@@ -0,0 +1,123 @@
//
// Created by ps on 19-1-8.
//
#include <iostream>
#include <thread>
#include <Debug.h>
#include "GB28181Server.h"
#include "VideoCaptureElementWithRtp.h"
#include <opencv2/opencv.hpp>
#include <CvUtil.h>
using namespace std;
int main(int argc, char **argv) {
    std::cout << "test " << std::endl;
    bool running = true;
    bool serinit = false;
    MysqlDBServerCfg mysqlDBServerCfg;
    mysqlDBServerCfg.Host = "192.168.1.148";
    mysqlDBServerCfg.Port = 3306;
    mysqlDBServerCfg.UserName = "root";
    mysqlDBServerCfg.Passwd = "123456";
    mysqlDBServerCfg.DBName = "EGEyesForVSS";
    mysqlDBServerCfg.DBConnCount = 5;
    GBServerCfg gbServerCfg;
    gbServerCfg.SvrIp = "192.168.1.148"; // 国标服务的ip地址  (本机的ip地址)
    gbServerCfg.SvrPort = 7060; // 国标服务监听的端口
    gbServerCfg.SvrPubID = "44120000002000000001"; // 国标服务器的ID
    gbServerCfg.bMD5Auth = false; // 是否需要MD5加密
    gbServerCfg.UserName = "44120100002000000002"; // 国标服务的用户名    (下级设备注册的用户名)
    gbServerCfg.Passwd = "123456"; // 国标服务的密码    (下级设备注册的密码)
    gbServerCfg.SubScribeTime = 3600; // 订阅时间  如果为0 表示不订阅
    SpinLock spinLock;
    auto func = [&] {
        spinLock.lock();
        GB28181Server m_gbs;
        m_gbs.setMysqlConnParam(mysqlDBServerCfg);
        m_gbs.setGBServerParam(gbServerCfg);
        DBG("initServer start before");
        serinit = m_gbs.initServer();
        DBG("initServer start after");
        spinLock.unlock();
        while (running) {
            usleep(4000);
        }
    };
    std::thread thd(func);
    //      ---------------------测试------------------------
    spinLock.lock();
    DBG("test start");
    //#todo search from db
    char ChanPubID2[] = "11010202081314000001";
    std::string str_ChanPubID2(ChanPubID2);
    BASICGB28181::VideoCaptureElementWithRtp videoCapture2(str_ChanPubID2, 25, 1, 0);
    videoCapture2.registerConnector([&]() {
        {
            //testCode
            cv::Mat t_image = videoCapture2.getImage();
            if (t_image.empty()) {
                ERR("t_image is empty");
                return;
            }
            std::string strNewTime;
            strNewTime = AppUtil::getTimeUSecString();
            cv::putText(t_image, strNewTime, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN, 5,
                        cv::Scalar(255, 255, 0), 2);
            DBG("imshow image" << str_ChanPubID2 << strNewTime);
//            imshow("str_ChanPubID2", t_image);
//            cv::waitKey(10);
#if 1
            {
//            TestCode
//                ClockTimer cl("str_ChanPubID1");
                std::string strNewTime2 = AppUtil::getTimeUSecString();
//                cv::putText(t_image, strNewTime2, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN,
//                            5, cv::Scalar(255, 255, 0), 2);
                std::thread test([&](cv::Mat img, std::string strThing) {
                    try {
                        std::string strNewTime = "tmpDec/";
                        strNewTime.append(str_ChanPubID2 + "_").append(strThing).append(".jpg");
                        cv::imwrite(strNewTime, img);
                    } catch (std::exception ex) {
                        ERR(ex.what());
                    }
                }, t_image, strNewTime2);
                test.detach();
            }
#endif
        }
    });
    usleep(10);
    videoCapture2.start();
    sleep(60);
    videoCapture2.stop();
    DBG("test end");
    getchar();
    running = false;
    getchar();
    sleep(2);
    return 0;
}