From a8b447be656145c9ba2a2d8319a10ae8f726de1f Mon Sep 17 00:00:00 2001
From: zhangmeng <775834166@qq.com>
Date: 星期五, 09 十月 2020 18:20:46 +0800
Subject: [PATCH] h264 mp4

---
 csrc/ffmpeg/format/FormatOut.cpp |   75 ++++++++++++++++---------------------
 1 files changed, 33 insertions(+), 42 deletions(-)

diff --git a/csrc/ffmpeg/format/FormatOut.cpp b/csrc/ffmpeg/format/FormatOut.cpp
index 830e9ac..4d6ce60 100644
--- a/csrc/ffmpeg/format/FormatOut.cpp
+++ b/csrc/ffmpeg/format/FormatOut.cpp
@@ -38,6 +38,7 @@
 	FormatOut::~FormatOut()
 	{
 		clear();
+
 	}
 
     void FormatOut::clear(){
@@ -293,7 +294,7 @@
             logIt("Failed to copy context from input to output stream codec context\n");
             return false;
         }
-        out_stream->codecpar->codec_tag = out_stream->codec->codec_tag = 0;
+        out_stream->codecpar->codec_tag = 0;
         
         if(ctx_->oformat->flags & AVFMT_GLOBALHEADER)
             out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
@@ -349,7 +350,7 @@
             flag = writeHeader(&avdic);
             av_dict_free(&avdic);
         }
-        
+
         return flag;
     }
     
@@ -395,35 +396,11 @@
     }
 
     void FormatOut::adjustPTS(AVPacket *pkt, const int64_t &frame_cnt){
-        if (pkt->stream_index >= ctx_->nb_streams){
-            logIt("adjustPTS pkt stream index too much");
-            return;
-        }
-        
-        if (!in_a_stream_){
-            int64_t time_stamp = frame_cnt;
-        
-            pkt->pos = -1;  
-            pkt->stream_index = 0;
-            //Write PTS
-            AVRational time_base = getStream()->time_base;
-
-            AVRational time_base_q = { 1, AV_TIME_BASE };
-            //Duration between 2 frames (us)
-            // int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / fps_);  //鍐呴儴鏃堕棿鎴�
-            int64_t calc_duration = (int64_t)(AV_TIME_BASE / fps_);  //鍐呴儴鏃堕棿鎴�
-            //Parameters
-            pkt->pts = av_rescale_q(time_stamp*calc_duration, time_base_q, time_base);
-            pkt->dts = pkt->pts;
-            pkt->duration = av_rescale_q(calc_duration, time_base_q, time_base); 
-            return;
-        }
-
         AVStream *in_stream,*out_stream;
         int out_idx = -1;
         std::vector<AVStream*> in_streams{in_v_stream_, in_a_stream_};
         for (auto i : in_streams){
-            if (i->index == pkt->stream_index){
+            if (i && (i->index == pkt->stream_index)){
                 if (i->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
                     out_idx = v_idx_;
                     in_stream = i;
@@ -436,31 +413,41 @@
             }
         }
         if (out_idx == -1) return;
-
         out_stream = ctx_->streams[out_idx];
         pkt->stream_index = out_idx;
+
+        int64_t time_stamp = frame_cnt;
+
+        if (out_idx == v_idx_){
         
+            pkt->pos = -1;  
+            AVRational time_base = ctx_->streams[out_idx]->time_base;
+
+            AVRational time_base_q = { 1, AV_TIME_BASE };
+            int64_t calc_duration = (int64_t)(AV_TIME_BASE / fps_);  //鍐呴儴鏃堕棿鎴�
+            pkt->pts = av_rescale_q(time_stamp*calc_duration, time_base_q, time_base);
+            pkt->dts = pkt->pts;
+            pkt->duration = av_rescale_q(calc_duration, time_base_q, time_base); 
+            
+        }else if (out_idx == a_idx_){
+
+            pkt->duration = 1024;
+            pkt->pts = pkt->dts = pkt->duration * time_stamp; 
+
+        }
         // logIt("BEFORE in stream timebase %d:%d, out timebase %d:%d, 
         //         pts: %lld, dts: %lld, duration: %lld", 
         //     in_stream->time_base.num, in_stream->time_base.den,
         //     out_stream->time_base.num, out_stream->time_base.den,
         //     pkt->pts, pkt->dts, pkt->duration);
 
-        //杞崲 PTS/DTS 鏃跺簭
-        pkt->pts = av_rescale_q_rnd(pkt->pts,in_stream->time_base,out_stream->time_base,(enum AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
-        pkt->dts = av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  
-        pkt->duration = av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base); 
-        pkt->pos = -1;  
-
-        // logIt("AFTER stream %d, pts: %lld, dts: %lld, duration: %lld", 
-        // pkt->stream_index, pkt->pts, pkt->dts, pkt->duration);
     }
 
     bool FormatOut::writeFrame(AVPacket *pkt, const int64_t &frame_cnt,
                               bool interleaved/* = true*/){
 
         adjustPTS(pkt, frame_cnt);
-        auto ret = writeFrame2(pkt, interleaved);
+        auto ret = writeFrameInternal(pkt, interleaved);
         if (!ret){
             logIt("write to file failed, pkt.pts: %lld, dts: %lld, frame count: %d",
                     pkt->pts, pkt->dts, frame_cnt);
@@ -468,16 +455,15 @@
         return ret;
     }
 
-    bool FormatOut::writeFrame2(AVPacket *pkt, bool interleaved){
-        
+    static bool write_frame(AVFormatContext *ctx, AVPacket *pkt, bool interleaved){
         int ret = 0;
         if(interleaved){
-            ret = av_interleaved_write_frame(ctx_, pkt);
+            ret = av_interleaved_write_frame(ctx, pkt);
         }else{
             // returns 1 if flushed and there is no more data to flush
-            ret = av_write_frame(ctx_, pkt);
+            ret = av_write_frame(ctx, pkt);
         }
-    
+
         if(ret < -22 || ret == 0){
             return true;
         }
@@ -485,6 +471,11 @@
         return false;
     }
 
+    bool FormatOut::writeFrameInternal(AVPacket *pkt, bool interleaved){
+
+        return write_frame(ctx_, pkt, interleaved);
+    }
+
     bool FormatOut::writeTrailer(){
         const int ret = av_write_trailer(ctx_);
         if(ret != 0)

--
Gitblit v1.8.0