From f93ee1a42e8c47e472332287b7350b66a6b0fa11 Mon Sep 17 00:00:00 2001 From: zhangmeng <775834166@qq.com> Date: 星期五, 24 七月 2020 18:28:57 +0800 Subject: [PATCH] 保存触发id之后的视频作为触发视频 --- csrc/ffmpeg/format/FormatOut.cpp | 244 +++++++++++++++++++++++++++--------------------- 1 files changed, 139 insertions(+), 105 deletions(-) diff --git a/csrc/ffmpeg/format/FormatOut.cpp b/csrc/ffmpeg/format/FormatOut.cpp index bc77524..e832bfe 100644 --- a/csrc/ffmpeg/format/FormatOut.cpp +++ b/csrc/ffmpeg/format/FormatOut.cpp @@ -16,8 +16,6 @@ #include "../configure/conf.hpp" #include "../property/VideoProp.hpp" -#include "../data/CodedData.hpp" -#include "../data/FrameData.hpp" #include "../../common/gpu/info.h" @@ -26,12 +24,15 @@ namespace ffwrapper{ FormatOut::FormatOut() :ctx_(NULL) - ,v_s_(NULL) + ,v_idx_(-1) + ,a_idx_(-1) ,enc_ctx_(NULL) ,sync_opts_(0) ,record_(false) ,fps_(0.0f) ,format_name_("mp4") + ,in_v_stream_(NULL) + ,in_a_stream_(NULL) {} FormatOut::~FormatOut() @@ -50,7 +51,6 @@ avformat_free_context(ctx_); ctx_ = NULL; } - v_s_ = NULL; sync_opts_ = 0; } @@ -151,7 +151,8 @@ } logIt("use encoder %s", codec->name); - v_s_ = avformat_new_stream(ctx_, codec); + AVStream *v = avformat_new_stream(ctx_, codec); + v_idx_ = 0; enc_ctx_ = avcodec_alloc_context3(codec); @@ -181,7 +182,7 @@ logIt("can't open output codec: %s", getAVErrorDesc(err).c_str()); return false; } - err = avcodec_parameters_from_context(v_s_->codecpar, enc_ctx_); + err = avcodec_parameters_from_context(v->codecpar, enc_ctx_); if (err < 0) { logIt("can't avcodec_parameters_from_context: %s", getAVErrorDesc(err).c_str()); return false; @@ -196,20 +197,25 @@ return true; } + + AVStream *FormatOut::getStream(){ + if (v_idx_ == -1) return NULL; + return ctx_->streams[v_idx_]; + } const AVCodecContext *FormatOut::getCodecContext()const{ return enc_ctx_; } - int FormatOut::encode(AVPacket &pkt, AVFrame *frame){ + int FormatOut::encode(AVPacket *pkt, AVFrame *frame){ AVStream *out = getStream(); frame->quality = enc_ctx_->global_quality; frame->pict_type = AV_PICTURE_TYPE_NONE; - pkt.data = NULL; - pkt.size = 0; + pkt->data = NULL; + pkt->size = 0; int ret = avcodec_send_frame(enc_ctx_, frame); if(ret < 0){ @@ -218,89 +224,39 @@ } while(ret >= 0){ - ret = avcodec_receive_packet(enc_ctx_, &pkt); + ret = avcodec_receive_packet(enc_ctx_, pkt); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { break; }else if (ret < 0) { logIt("avcodec_receive_packet : %s", getAVErrorDesc(ret).c_str()); return -1; }else{ - if(pkt.pts == AV_NOPTS_VALUE + if(pkt->pts == AV_NOPTS_VALUE && !(enc_ctx_->codec->capabilities & AV_CODEC_CAP_DELAY)) { - pkt.pts = sync_opts_++; + pkt->pts = sync_opts_++; } - av_packet_rescale_ts(&pkt, enc_ctx_->time_base, out->time_base); + av_packet_rescale_ts(pkt, enc_ctx_->time_base, out->time_base); // printf("pkt pts: %lld\n", pkt.pts); - return 1; + return 0; } } - return 0; - } - - int FormatOut::encode(std::shared_ptr<CodedData> &data, - std::shared_ptr<FrameData> &frame_data){ - - AVStream *out = getStream(); - AVCodecContext *enc_ctx = out->codec; - data->refExtraData(enc_ctx->extradata, enc_ctx->extradata_size); - - AVPacket &pkt(data->getAVPacket()); - AVFrame *frame = frame_data->getAVFrame(); - - return encode(pkt, frame); - } - - int FormatOut::encode(std::shared_ptr<CodedData> &data,AVFrame *frame){ - - AVStream *out = getStream(); - AVCodecContext *enc_ctx = out->codec; - data->refExtraData(enc_ctx->extradata, enc_ctx->extradata_size); - - AVPacket &pkt(data->getAVPacket()); - - return encode(pkt, frame); + return -2; } ////////////////////////////////////////////////////////////////////////// - FormatOut::FormatOut(AVStream *in, const char *format_name) + FormatOut::FormatOut(const double fps, const char *format_name) :FormatOut(){ format_name_ = format_name; - if(in->r_frame_rate.num >=1 && in->r_frame_rate.den >= 1){ - fps_ = av_q2d(in->r_frame_rate); - }else if(in->avg_frame_rate.num >=1 && in->avg_frame_rate.den >= 1){ - fps_ = av_q2d(in->avg_frame_rate); - } - } - - - bool FormatOut::copyCodecFromIn(AVStream *in){ - - v_s_ = avformat_new_stream(ctx_, in->codec->codec); - if(!v_s_){ - return false; - } - - int ret = avcodec_copy_context(v_s_->codec, in->codec); - if (ret < 0){ - logIt("can't copy codec from in error:%s", getAVErrorDesc(ret).c_str()); - - return false; - } - - if (ctx_->oformat->flags & AVFMT_GLOBALHEADER) - { - v_s_->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; - } - return true; + fps_ = fps; } bool FormatOut::openResource(const char *filename, const int flags){ - if((ctx_->flags & AVFMT_NOFILE) != AVFMT_NOFILE){ + if((ctx_->oformat->flags & AVFMT_NOFILE) != AVFMT_NOFILE){ const int err = avio_open2(&ctx_->pb, filename, flags, NULL, NULL); if(err < 0) @@ -320,20 +276,71 @@ } return true; } - bool FormatOut::JustWriter(AVStream *in, const char *filename){ + + bool FormatOut::addStream(AVStream *s){ + AVStream *in_stream = s; + + AVStream *out_stream = avformat_new_stream(ctx_, in_stream->codec->codec); + if(!out_stream) + { + logIt("Failed allocating output stream.\n"); + return false; + } + //灏嗚緭鍑烘祦鐨勭紪鐮佷俊鎭鍒跺埌杈撳叆娴� + auto ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar); + if(ret<0) + { + logIt("Failed to copy context from input to output stream codec context\n"); + return false; + } + out_stream->codecpar->codec_tag = 0; + + if(ctx_->oformat->flags & AVFMT_GLOBALHEADER) + out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; + + return true; + } + + bool FormatOut::copyCodecFromIn(AVStream *v, AVStream *a){ + if (v){ + v_idx_ = 0; + in_v_stream_ = v; + + if (!addStream(v)){ + logIt("format out add video stream error"); + return false; + }else{ + logIt("copy video from instream"); + } + } + if (a){ + a_idx_ = 1; + in_a_stream_ = a; + if (!addStream(a)){ + logIt("format out add audio stream error"); + return false; + }else{ + logIt("copy audio from instream"); + } + } + return true; + } + + bool FormatOut::JustWriter(AVStream *v, AVStream *a, const char *filename){ if(ctx_){ clear(); } bool flag = open(NULL, format_name_.c_str()); - flag = copyCodecFromIn(in) && flag; + flag = copyCodecFromIn(v, a) && flag; if(!flag){ logIt("FormatOut JustWriter error from in"); return false; } flag = openResource(filename, 2); + if(flag){ AVDictionary *avdic = NULL; char option_key[]="movflags"; @@ -341,8 +348,8 @@ av_dict_set(&avdic,option_key,option_value,0); flag = writeHeader(&avdic); av_dict_free(&avdic); - } + return flag; } @@ -364,6 +371,7 @@ bool FormatOut::endWriter(){ auto flag = writeTrailer(); + closeResource(); record_ = false; return flag; @@ -381,60 +389,86 @@ getAVErrorDesc(ret).c_str()); return false; } + record_ = true; return true; } - void FormatOut::adjustPTS(AVPacket &pkt, const int64_t &frame_cnt){ - int64_t time_stamp = frame_cnt; - - pkt.pos = -1; - pkt.stream_index = 0; + void FormatOut::adjustPTS(AVPacket *pkt, const int64_t &frame_cnt){ + AVStream *in_stream,*out_stream; + int out_idx = -1; + std::vector<AVStream*> in_streams{in_v_stream_, in_a_stream_}; + for (auto i : in_streams){ + if (i && (i->index == pkt->stream_index)){ + if (i->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){ + out_idx = v_idx_; + in_stream = i; + break; + }else if (i->codecpar->codec_type == AVMEDIA_TYPE_AUDIO){ + in_stream = i; + out_idx = a_idx_; + break; + } + } + } + if (out_idx == -1) return; + out_stream = ctx_->streams[out_idx]; + pkt->stream_index = out_idx; - //Write PTS - AVRational time_base = getStream()->time_base; + int64_t time_stamp = frame_cnt; + + if (out_idx == v_idx_){ - AVRational time_base_q = { 1, AV_TIME_BASE }; - //Duration between 2 frames (us) - // int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / fps_); //鍐呴儴鏃堕棿鎴� - int64_t calc_duration = (int64_t)(AV_TIME_BASE / fps_); //鍐呴儴鏃堕棿鎴� - //Parameters - pkt.pts = av_rescale_q(time_stamp*calc_duration, time_base_q, time_base); - pkt.dts = pkt.pts; - pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base)); - - // if (pkt.duration < 0 || time_base.den != 90000){ - // logIt("CALCULATE DURATION : %lld, fame count : %lld, TIMEBASE: %d", calc_duration,time_stamp, time_base.den); - // } - + pkt->pos = -1; + AVRational time_base = ctx_->streams[out_idx]->time_base; + + AVRational time_base_q = { 1, AV_TIME_BASE }; + int64_t calc_duration = (int64_t)(AV_TIME_BASE / fps_); //鍐呴儴鏃堕棿鎴� + pkt->pts = av_rescale_q(time_stamp*calc_duration, time_base_q, time_base); + pkt->dts = pkt->pts; + pkt->duration = av_rescale_q(calc_duration, time_base_q, time_base); + + }else if (out_idx == a_idx_){ + + pkt->duration = 1024; + pkt->pts = pkt->dts = pkt->duration * time_stamp; + + } + // logIt("BEFORE in stream timebase %d:%d, out timebase %d:%d, + // pts: %lld, dts: %lld, duration: %lld", + // in_stream->time_base.num, in_stream->time_base.den, + // out_stream->time_base.num, out_stream->time_base.den, + // pkt->pts, pkt->dts, pkt->duration); + } - bool FormatOut::writeFrame(AVPacket &pkt, const int64_t &frame_cnt, + bool FormatOut::writeFrame(AVPacket *pkt, const int64_t &frame_cnt, bool interleaved/* = true*/){ adjustPTS(pkt, frame_cnt); - return writeFrame2(pkt, interleaved); + auto ret = writeFrame2(pkt, interleaved); + if (!ret){ + logIt("write to file failed, pkt.pts: %lld, dts: %lld, frame count: %d", + pkt->pts, pkt->dts, frame_cnt); + } + return ret; } - bool FormatOut::writeFrame2(AVPacket &pkt, bool interleaved){ + bool FormatOut::writeFrame2(AVPacket *pkt, bool interleaved){ int ret = 0; - if(interleaved) - ret = av_interleaved_write_frame(ctx_, &pkt); - else - { + if(interleaved){ + ret = av_interleaved_write_frame(ctx_, pkt); + }else{ // returns 1 if flushed and there is no more data to flush - ret = av_write_frame(ctx_, &pkt); + ret = av_write_frame(ctx_, pkt); } - if(ret < 0) - { - logIt("write packet to file failed:%s", - getAVErrorDesc(ret).c_str()); - return false; + if(ret < -22 || ret == 0){ + return true; } - return true; + return false; } bool FormatOut::writeTrailer(){ -- Gitblit v1.8.0