派生自 development/c++

chenshijun
2019-03-30 91c3b892e976ecf33819c0c2664e4dfcfd88e434
QiaoJiaSystem/GB28181DecoderModel/FFmpegDecoderJPG.cpp
@@ -32,7 +32,8 @@
              dst.data, dst.linesize);
    sws_freeContext(convert_ctx);
    DBG("m.size is " << m.size());
//    DBG("m.size is " << m.size());
//    LOG_IF();
    return m;
}
@@ -44,8 +45,9 @@
}
BASICGB28181::FFmpegDecoderJPG::~FFmpegDecoderJPG() {
    //清空队列
    while (m_rtpQueue.count_queue()) {
        //#todo delete frameBuffInfo*
        m_rtpQueue.popNotWait();
    }
@@ -65,8 +67,17 @@
        info->buffLen = datalen;
        info->camIdx = camIdx;
        memcpy(info->buff, data, datalen);
    {
//        FILE *fp11 = NULL;
//        if (!fp11) {
//        fp11 = fopen(camIdx.c_str(), "a+");
//    }
//        fwrite(data, sizeof(char), datalen, fp11);
//        fclose(fp11);
    }
#ifdef TestCode
        DBG(" m_rtpQueue.push before ");
        DBG(" m_rtpQueue.push befores ");
#endif
        m_rtpQueue.push(info);
#ifdef TestCode
@@ -86,10 +97,11 @@
    do {
//        DBG(" m_rtpQueue.pop before ");
        //从缓存中获取buffinfo
        frameBuffInfo *buffinfo = fFmpegDecoderJPG->m_rtpQueue.pop();
//        DBG(" m_rtpQueue.pop after ");
        diff = len - buffinfo->buffLen;
//        printf("bufsize is :%ld,len is :%ld, datalen:%d \n", bufsize, len, buffinfo->buffLen);
        //帧长大于bufsize
        if (diff < 0) {
//            DBG("/帧长大于bufsize" << diff);
@@ -120,7 +132,7 @@
        delete[] buffinfo->buff;
        delete buffinfo;
    } while (diff > 0);
    //#todo 触发信号
//    DBG("emitSigal(\"read_dataOk\") begin");
//    gSignalLock.emitSigal("read_dataOk");
    fFmpegDecoderJPG->m_readData = true;
@@ -128,137 +140,213 @@
    return bufsize;
}
/***
 * 解码线程
 * @param p_this
 */
void BASICGB28181::FFmpegDecoderJPG::BareFlowDecoderThd(FFmpegDecoderJPG *p_this) {
    DBG(p_this->m_camIdx << "  BareFlowDecoderThd ok ... gpuIdx is " << p_this->m_gpuIdx);
    p_this->m_running = true;
    av_register_all();
    avformat_network_init();
    AVFormatContext *ic = avformat_alloc_context();
    while (!p_this->m_running) {
    unsigned char *iobuffer = (unsigned char *) av_malloc(p_this->m_buf_size);
    AVIOContext *avio = avio_alloc_context(iobuffer, p_this->m_buf_size, 0, p_this, p_this->read_data, NULL, NULL);
    ic->pb = avio;
        p_this->m_running = true;
//    av_register_all();
//    avformat_network_init();
        p_this->ic = avformat_alloc_context();
    int err = av_probe_input_buffer(ic->pb, &ic->iformat, nullptr, nullptr, 0, p_this->m_buf_size);
    int err1 = avformat_open_input(&ic, "", NULL, NULL);
    int err2 = avformat_find_stream_info(ic, nullptr);
    int vi = -1;
    for (int i = 0; i < ic->nb_streams; ++i) {
        if (ic->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            vi = i;
            break;
        //创建ffmpeg使用的内存空间
        p_this->iobuffer = (unsigned char *) av_malloc(p_this->m_buf_size);
        if (!p_this->iobuffer) {
            ERR("av_malloc: err======" << p_this->m_camIdx);
            p_this->m_running = false;
            continue;
        }
    }
    AVStream *stream = ic->streams[vi];
    p_this->video_st = stream;
    AVCodecContext *ctx = avcodec_alloc_context3(nullptr);
    int err3 = avcodec_parameters_to_context(ctx, stream->codecpar);
//     使用回调函数和内存块创建 AVIOContext
        p_this->avio = avio_alloc_context(p_this->iobuffer, p_this->m_buf_size, 0, p_this, p_this->read_data, NULL,
                                          NULL);
        if (!p_this->avio) {
            ERR("avio_alloc_context: err======" << p_this->m_camIdx);
            p_this->m_running = false;
            continue;
        }
        p_this->ic->pb = p_this->avio;
    AVCodec *codec = avcodec_find_decoder(ctx->codec_id);
    //是否启用GPU
    if (p_this->m_gpuIdx >= 0) {
        if (codec != NULL) {
            char cuvidName[40] = {0};
            sprintf(cuvidName, "%s_cuvid", codec->name);
            if (!strcmp(codec->name, "h264") || !strcmp(codec->name, "h265") || !strcmp(codec->name, "hevc")) {
                AVCodec *codec_cuvid = avcodec_find_decoder_by_name(cuvidName);
                if (codec_cuvid != NULL) {
                    codec = codec_cuvid;
                } else {
        int err = av_probe_input_buffer(p_this->ic->pb, &p_this->ic->iformat, nullptr, nullptr, 0, p_this->m_buf_size);
        if (err) {
            ERR("av_probe_input_buffer: err======" << err << p_this->m_camIdx);
            p_this->m_running = false;
            continue;
        }
        err = avformat_open_input(&p_this->ic, "", NULL, NULL);
        if (err) {
            ERR("avformat_open_input: err======" << err << p_this->m_camIdx);
            p_this->m_running = false;
            continue;
        }
//    int err2 = avformat_find_stream_info(ic, nullptr);
        err = avformat_find_stream_info(p_this->ic, NULL);
        if (err) {
            ERR("avformat_find_stream_info: err======" << err << p_this->m_camIdx);
//        avformat_close_input(&p_this->ic);
//        DBG("avformat_close_input(&p_this->ic);");
            p_this->m_running = false;
            continue;
        }
        int vi = -1;
        for (int i = 0; i < p_this->ic->nb_streams; ++i) {
            if (p_this->ic->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
                vi = i;
                break;
            }
        }
        if(vi == -1)//无视频包
        {
            ERR("no video packet!!");
            p_this->m_running = false;
            continue;
        }
        p_this->stream = p_this->ic->streams[vi];
        p_this->video_st = p_this->stream;
        p_this->ctx = avcodec_alloc_context3(nullptr);
        err = avcodec_parameters_to_context(p_this->ctx, p_this->stream->codecpar);
        if (err) {
            ERR("avcodec_parameters_to_context: err======" << err << p_this->m_camIdx);
//        avformat_close_input(&p_this->ic);
//        DBG("avformat_close_input(&p_this->ic);");
            p_this->m_running = false;
            continue;
        }
        p_this->codec = avcodec_find_decoder(p_this->ctx->codec_id);
        //是否启用GPU
        if (p_this->m_gpuIdx >= 0) {
            if (p_this->codec != NULL) {
                char cuvidName[40] = {0};
                sprintf(cuvidName, "%s_cuvid", p_this->codec->name);
                if (!strcmp(p_this->codec->name, "h264") || !strcmp(p_this->codec->name, "h265") ||
                    !strcmp(p_this->codec->name, "hevc")) {
                    p_this->codec_cuvid = avcodec_find_decoder_by_name(cuvidName);
                    if (p_this->codec_cuvid != NULL) {
                        p_this->codec = p_this->codec_cuvid;
                    } else {
//                    return false;
                    ERR("codec_cuvid is NULL");
                        ERR("codec_cuvid is NULL" << p_this->m_camIdx);
                    }
                }
            }
        }
    }
    int err4 = avcodec_open2(ctx, codec, nullptr);
    AVPacket pkt;
    av_init_packet(&pkt);
    AVFrame *frame = av_frame_alloc();
    unsigned int usleepTime = (1000 / p_this->m_fps) - 12;
    usleepTime *= 1000;
    DBG(" before while <<usleepTime is " << usleepTime);
    while (p_this->m_running) {
#ifdef TestCode
        ClockTimer Test("while time");
#endif
        int err5 = av_read_frame(ic, &pkt);
        //# todo save package
        p_this->frame_number++;
        //DBG("GotPicture "<<m_camId<<":"<<frame_number);
        //放在此处是因为之前放在前面,收到的帧不完成
        p_this->SaveToPacketVector(pkt);
        p_this->CheckSave();
        int err6 = avcodec_send_packet(ctx, &pkt);
        av_packet_unref(&pkt);
        int err7 = avcodec_receive_frame(ctx, frame);
        if ((err7 == AVERROR(EAGAIN)) || (err5 < 0) || (err6 < 0)) {
            ERR(" error << err7:" << err7 << "  err5: " << err5 << " err6: " << err6);
            usleep(40000);
        err = avcodec_open2(p_this->ctx, p_this->codec, nullptr);
        if (err) {
            ERR("avcodec_open2: err======" << err << p_this->m_camIdx);
//        avformat_close_input(&p_this->ic);
//        DBG("avformat_close_input(&p_this->ic);");
//        avcodec_free_context(&p_this->ctx);
//        DBG("avcodec_free_context(&p_this->ctx);");
            p_this->m_running = false;
            continue;
        }
//        BASICGB28181::avframe_to_cvmat(frame).copyTo(p_this->m_image);
        p_this->m_image = std::move(BASICGB28181::avframe_to_cvmat(frame));
        av_init_packet(&p_this->pkt);
        p_this->frame = av_frame_alloc();
//        unsigned int usleepTime = (1000 / p_this->m_fps) - 12;
//        usleepTime *= 1000;
        DBG("===open ok, camid: " << p_this->m_camIdx);
        while (p_this->m_running) {
#ifdef TestCode
        {
//            TestCode
            ClockTimer cl("TestCode");
            std::string strNewTime2 = AppUtil::getTimeUSecString();
            cv::putText(p_this->m_image, strNewTime2, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN,
                        5, cv::Scalar(255, 255, 0), 2);
            std::thread test([&](cv::Mat img, std::string strThing) {
            ClockTimer Test("while time");
#endif
            av_packet_unref(&p_this->pkt);
            p_this->pkt.data = NULL;
            p_this->pkt.size = 0;
            int err5 = av_read_frame(p_this->ic, &p_this->pkt);
            //# todo save package
            p_this->frame_number++;
            int err6 = avcodec_send_packet(p_this->ctx, &p_this->pkt);
            int err7 = avcodec_receive_frame(p_this->ctx, p_this->frame);
            if ((err7 == AVERROR(EAGAIN)) || (err5 < 0) || (err6 < 0)) {
                ERR("  err======: err7" << err7 << "  err5: " << err5 << " err6: " << err6 << p_this->m_camIdx);
                usleep(40000);
                continue;
            }
            //DBG("GotPicture "<<m_camId<<":"<<frame_number);
            //放在此处是因为之前放在前面,收到的帧不完成
            p_this->SaveToPacketVector(p_this->pkt);
            p_this->CheckSave();
//        BASICGB28181::avframe_to_cvmat(frame).copyTo(p_this->m_image);
            p_this->m_image = std::move(BASICGB28181::avframe_to_cvmat(p_this->frame));
//            将i帧保存为快照
            if (p_this->m_SnapshotNotSaveRet && (p_this->pkt.flags & AV_PKT_FLAG_KEY)) {
                try {
                    std::string strNewTime = "tmpDec/";
                    strNewTime.append(p_this->m_camIdx + "_").append(strThing).append(".jpg");
//                    cv::imwrite(strNewTime, p_this->m_image);
                    std::string strNewName = "./";
                    strNewName.append(p_this->m_camIdx).append(".jpg");
                    cv::imwrite(strNewName, p_this->m_image);
                    p_this->m_SnapshotNotSaveRet = false;
                } catch (std::exception ex) {
                    ERR(ex.what());
                }
            }, p_this->m_image, strNewTime2);
            test.detach();
        }
#endif
            }
        //#todo send to other thd
#ifdef TestCode
        DBG("emitSigal(\"DecoderImageOK\") begin");
                {
        //            TestCode 测试代码保存图片到本地
                    ClockTimer cl("TestCode");
                    std::string strNewTime2 = AppUtil::getTimeUSecString();
                    cv::putText(p_this->m_image, strNewTime2, cv::Point(408, 540), cv::HersheyFonts::FONT_HERSHEY_PLAIN,
                                5, cv::Scalar(255, 255, 0), 2);
                    std::thread test([&](cv::Mat img, std::string strThing) {
                        try {
                            std::string strNewTime = "tmpDec/";
                            strNewTime.append(p_this->m_camIdx + "_").append(strThing).append(".jpg");
        //                    cv::imwrite(strNewTime, p_this->m_image);
                        } catch (std::exception ex) {
                            ERR(ex.what());
                        }
                    }, p_this->m_image, strNewTime2);
                    test.detach();
                }
#endif
        gSignalLock.emitSigal(p_this->m_camIdx + "DecoderImageOK");
                //#todo send to other thd
#ifdef TestCode
                DBG("emitSigal(\"DecoderImageOK\") begin");
#endif
            //触发上层信号
            gSignalLock.emitSigal(p_this->m_camIdx + "DecoderImageOK");
//#ifdef TestCode
//        DBG("emitSigal(\"DecoderImageOK\") after");
//#endif
        DBG("emitSigal(\"DecoderImageOK\") after");
        DBG("p_this->m_camIdx is " << p_this->m_camIdx << " queue size is " << p_this->m_rtpQueue.count_queue());
//            DBG("emitSigal(\"DecoderImageOK\") after");
//            DBG("p_this->m_camIdx is " << p_this->m_camIdx << " queue size is " << p_this->m_rtpQueue.count_queue());
#ifdef TestCode
        {
            ClockTimer cl("waitTime");
            int loop = 0;
            //#TODO
//            while ((loop++ < 3000) && !(p_this->m_readData)) {
//                usleep(10);
//            }
            {
                ClockTimer cl("waitTime");
                int loop = 0;
                //#TODO
    //            while ((loop++ < 3000) && !(p_this->m_readData)) {
    //                usleep(10);
    //            }
            usleep(30000);
            DBG("p_this->m_readData is " << p_this->m_readData << "  loop is " << loop << " queue size is "
                                         << p_this->m_rtpQueue.count_queue());
            p_this->m_readData = false;
//        usleep(12000);
        }
                usleep(30000);
                DBG("p_this->m_readData is " << p_this->m_readData << "  loop is " << loop << " queue size is "
                                             << p_this->m_rtpQueue.count_queue());
                p_this->m_readData = false;
    //        usleep(12000);
            }
#else
        usleep(usleepTime);
//            usleep(usleepTime);
#endif
        }
        DBG(" after while ");
        av_frame_free(&p_this->frame);
    }
    DBG(" after while ");
    av_frame_free(&frame);
}
bool BASICGB28181::FFmpegDecoderJPG::startThd(const std::string &camIdx, const int &fps, const int &gpuIdx) {
@@ -270,6 +358,8 @@
            setenv("CUDA_VISIBLE_DEVICES", std::to_string(gpuIdx).c_str(), 0);
        }
        m_camIdx = camIdx;
        DBG("BareFlowDecoderThd camIdx : " << camIdx);
//        启动解码线程
        std::thread t_BareFlowDecoder(BareFlowDecoderThd, this);
        t_BareFlowDecoder.detach();
    );
@@ -278,7 +368,7 @@
bool BASICGB28181::FFmpegDecoderJPG::stopThd() {
    TryCath(
        DBG(m_camIdx << "  FFmpegDecoderJPG stopThd ... ");
        DBG(m_camIdx << "  FFmpegDecoderJPG stopThd ... " << m_camIdx);
        m_running = false;
    );
    return true;
@@ -373,16 +463,21 @@
bool BASICGB28181::FFmpegDecoderJPG::SaveVideo(std::string path, int64_t lastFrameId) {
    std::lock_guard<std::mutex> lock(g_mutex);
    INFO("SaveVideo: " << path);
    INFO("SaveVideo: " << path << "m_packetsVec.size : " << m_packetsVec.size());
    if (!m_packetsVec.empty()) {
        startWrite(path.c_str());
        int64_t firstKeyFramePts = m_packetsVec[0].m_packet.pts;
        int64_t firstKeyFrameDts = m_packetsVec[0].m_packet.dts;
        unsigned long int frame_index = 0;
        for (const auto &item:m_packetsVec) {
            if (item.m_frameId < lastFrameId) {
                conversion(const_cast<AVPacket *> (&item.m_packet), firstKeyFramePts, firstKeyFrameDts, video_st);
                DBG("item.m_frameId < lastFrameId   " << item.m_frameId << "  " << lastFrameId);
                conversion(const_cast<AVPacket *> (&item.m_packet), firstKeyFramePts, firstKeyFrameDts, video_st,
                           frame_index);
                frame_index++;
                av_write_frame(m_pOutFmtCtx, &item.m_packet);
            } else {
                DBG("item.m_frameId > lastFrameId   " << item.m_frameId << "  " << lastFrameId);
                break;
            }
        }
@@ -440,6 +535,7 @@
    m_frameIndex++;
    m_packetsVec.push_back({m_frameIndex, newPacket});
    if (newPacket.flags & AV_PKT_FLAG_KEY) {
        DBG("newPacket.flags & AV_PKT_FLAG_KEY   ");
        m_last_I_FrameId = m_frameIndex;
    }
}
@@ -460,27 +556,27 @@
    */
    m_pOutVideo_stream = avformat_new_stream(m_pOutFmtCtx, NULL);
    {
//        AVCodecContext *c;
//        c = m_pOutVideo_stream->codec;
//        c->bit_rate = 400000;
//        c->codec_id = video_st->codec->codec_id;
//        c->codec_type = video_st->codec->codec_type;
//        c->time_base.num = video_st->time_base.num;
//        c->time_base.den = video_st->time_base.den;
//        fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
//        c->width = video_st->codec->width;
//        c->height = video_st->codec->height;
//        c->pix_fmt = video_st->codec->pix_fmt;
//        printf("%d %d %d", c->width, c->height, c->pix_fmt);
//        c->flags = video_st->codec->flags;
//        c->flags |= CODEC_FLAG_GLOBAL_HEADER;
//        c->me_range = video_st->codec->me_range;
//        c->max_qdiff = video_st->codec->max_qdiff;
//
//        c->qmin = video_st->codec->qmin;
//        c->qmax = video_st->codec->qmax;
//
//        c->qcompress = video_st->codec->qcompress;
        AVCodecContext *c;
        c = m_pOutVideo_stream->codec;
        c->bit_rate = 400000;
        c->codec_id = video_st->codec->codec_id;
        c->codec_type = video_st->codec->codec_type;
        c->time_base.num = video_st->time_base.num;
        c->time_base.den = video_st->time_base.den;
        fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
        c->width = video_st->codec->width;
        c->height = video_st->codec->height;
        c->pix_fmt = video_st->codec->pix_fmt;
        printf("%d %d %d", c->width, c->height, c->pix_fmt);
        c->flags = video_st->codec->flags;
        c->flags |= CODEC_FLAG_GLOBAL_HEADER;
        c->me_range = video_st->codec->me_range;
        c->max_qdiff = video_st->codec->max_qdiff;
        c->qmin = video_st->codec->qmin;
        c->qmax = video_st->codec->qmax;
        c->qcompress = video_st->codec->qcompress;
    }
    ret = avio_open(&m_pOutFmtCtx->pb, filename, AVIO_FLAG_WRITE);
    if (ret < 0) {
@@ -488,7 +584,7 @@
        return -1;
    }
    avformat_write_header(m_pOutFmtCtx, NULL);
    DBG(" avformat_write_header   " << avformat_write_header(m_pOutFmtCtx, NULL));
    m_bstartWrite = true;
    m_bFirstKeyFrame = true;
@@ -501,11 +597,12 @@
int BASICGB28181::FFmpegDecoderJPG::stopWrite() {
    if (m_pOutFmtCtx == nullptr) return -1;
    av_write_trailer(m_pOutFmtCtx);
    avio_close(m_pOutFmtCtx->pb);
    avcodec_close(m_pOutFmtCtx->streams[0]->codec);
    av_freep(&m_pOutFmtCtx->streams[0]->codec);
    av_freep(&m_pOutFmtCtx->streams[0]);
    avio_close(m_pOutFmtCtx->pb);
    av_free(m_pOutFmtCtx);
    m_pOutFmtCtx = nullptr;
    m_bstartWrite = false;
@@ -514,23 +611,23 @@
void BASICGB28181::FFmpegDecoderJPG::conversion(void *packet, const long int &firstKeyPts, const long int &firstKeyDts,
                                                void *inVideoStream) {
                                                void *inVideoStream, unsigned long int frame_index) {
    if ((packet != nullptr) && (inVideoStream != nullptr)) {
        AVStream *inStream = (AVStream *) inVideoStream;
        AVPacket *pkg = static_cast<AVPacket *>(packet);
//            static int a = 0;
//            pkg->dts = a++;
//            pkg->pts = a;
        pkg->pts -= firstKeyPts;
        pkg->dts -= firstKeyDts;
        pkg->pts = av_rescale_q_rnd(pkg->pts, inStream->time_base,
                                    m_pOutVideo_stream->time_base,
                                    (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkg->dts = av_rescale_q_rnd(pkg->dts, inStream->time_base,
                                    m_pOutVideo_stream->time_base,
                                    (AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkg->duration = av_rescale_q(pkg->duration, inStream->time_base,
                                     m_pOutVideo_stream->time_base);
        pkg->pos = -1;
        DBG("frame_index==%d\n" << frame_index);
        //Write PTS
        AVRational time_base1 = inStream->time_base;
        //Duration between 2 frames (us)
        int64_t calc_duration = (double) AV_TIME_BASE / av_q2d(inStream->r_frame_rate);
        //Parameters
        pkg->pts = (double) (frame_index * calc_duration) / (double) (av_q2d(time_base1) * AV_TIME_BASE);
        pkg->dts = pkg->pts;
        pkg->duration = (double) calc_duration / (double) (av_q2d(time_base1) * AV_TIME_BASE);
//        DBG("pts:" << pkg->pts);
//        DBG("dts:" << pkg->dts);
    }
}