From 18a05d269516a5e33d8460291c2f93e73d95adce Mon Sep 17 00:00:00 2001
From: zhangmeng <775834166@qq.com>
Date: 星期二, 26 十二月 2023 10:45:31 +0800
Subject: [PATCH] GetYUV format is NV12
---
csrc/ffmpeg/format/FormatIn.cpp | 373 ++++++++++++++++------------
csrc/wrapper.cpp | 75 +++--
csrc/worker/stream.cpp | 5
csrc/worker/decoder.cpp | 43 +-
csrc/wrapper.hpp | 1
csrc/ffmpeg/format/FormatOut.cpp | 11
csrc/worker/stream.hpp | 4
csrc/buz/recorder.cpp | 5
csrc/thirdparty/gb28181/include/PsToEs.hpp | 191 ++++++++------
csrc/ffmpeg/format/FormatIn.hpp | 68 +++--
10 files changed, 445 insertions(+), 331 deletions(-)
diff --git a/csrc/buz/recorder.cpp b/csrc/buz/recorder.cpp
index beda224..8f32088 100644
--- a/csrc/buz/recorder.cpp
+++ b/csrc/buz/recorder.cpp
@@ -70,7 +70,6 @@
fclose(fp_);
fp_ = NULL;
}
-
}
int Recorder::init_write_h264(const bool audio){
@@ -323,10 +322,6 @@
if (in->open(NULL, NULL) < 0){
logIt("mux hevc open stream error");
return -3;
- }
- if (!in->findStreamInfo(NULL)) {
- logIt("mux hevc can't find streams");
- return -4;
}
std::unique_ptr<FormatOut> out(new FormatOut(in_->getFPS(), "mp4"));
diff --git a/csrc/ffmpeg/format/FormatIn.cpp b/csrc/ffmpeg/format/FormatIn.cpp
index b827eb6..120e2c7 100644
--- a/csrc/ffmpeg/format/FormatIn.cpp
+++ b/csrc/ffmpeg/format/FormatIn.cpp
@@ -21,7 +21,6 @@
#include "../property/VideoProp.hpp"
#include "../../common/gpu/info.h"
-
using namespace logif;
namespace ffwrapper{
@@ -31,30 +30,14 @@
,vs_idx_(-1)
,as_idx_(-1)
,prop_(NULL)
- ,hw_accl_(hw)
,io_ctx_(NULL)
,read_io_buff_(NULL)
,read_io_buff_size_(32768)
-#ifdef GB28181
- ,handle_gb28181(NULL)
-#endif
,fps_(25.0)
{}
FormatIn::FormatIn(const VideoProp &prop, bool hw/*=true*/)
- :ctx_(NULL)
- ,dec_ctx_(NULL)
- ,vs_idx_(-1)
- ,as_idx_(-1)
- ,prop_(NULL)
- ,hw_accl_(hw)
- ,io_ctx_(NULL)
- ,read_io_buff_(NULL)
- ,read_io_buff_size_(32768)
-#ifdef GB28181
- ,handle_gb28181(NULL)
-#endif
- ,fps_(25.0)
+ :FormatIn(hw)
{
prop_ = new VideoProp;
*prop_ = prop;
@@ -76,21 +59,13 @@
ctx_ = NULL;
}
-#ifdef GB28181
- if (handle_gb28181){
- delete handle_gb28181;
- }
-#endif
-
if(io_ctx_){
av_freep(&io_ctx_->buffer);
av_freep(&io_ctx_);
io_ctx_ = NULL;
}
-
}
-////////////////////////////////////////////////////////////////////////
int FormatIn::openWithCustomIO(void *opaque, read_packet fn, AVDictionary **options/*=NULL*/){
read_io_buff_ = (uint8_t*)av_malloc(read_io_buff_size_);
if(!read_io_buff_){
@@ -112,67 +87,19 @@
ctx_->pb = io_ctx_;
- // auto err = av_probe_input_buffer(ctx_->pb, &ctx_->iformat, NULL, NULL, 0, 0);
- // if(err != 0){
- // logIt("open with custom io prob input buffer error:%d err: %s\n", err, getAVErrorDesc(err).c_str());
- // return -1;
- // }
-
return 0;
}
-#ifdef GB28181
- int FormatIn::openGb28181(const char *filename, AVDictionary **options){
-
- std::string fn = filename;
- //GB28181API gb28181(fn);
- if (handle_gb28181){
- delete handle_gb28181;
- }
- handle_gb28181 = new GB28181API;
- if(handle_gb28181->addCamera(fn) == -1){
- logIt("do addCamera Error\n");
- return -1;
- }
-
- int ret = openWithCustomIO(handle_gb28181, handle_gb28181->readData, options);
- if(ret < 0){
- logIt("do openWithCustomIO failed:%d",ret);
- }else{
- ret = avformat_open_input(&ctx_, "", NULL, options);
- }
-
- // if(ret < 0){
- // logIt("open %s failed:%s",filename,
- // getAVErrorDesc(ret).c_str());
- // }
-
- return ret;
- }
-#endif
-/////////////////////////////////////////////////////////////////////////
int FormatIn::open(const char *filename, AVDictionary **options){
- const int ret = avformat_open_input(&ctx_, filename, NULL, options);
- // if(ret < 0){
- // logIt("open %s failed:%s",filename,
- // getAVErrorDesc(ret).c_str());
- // }
+ int ret = avformat_open_input(&ctx_, filename, NULL, options);
- return ret;
- }
-
- bool FormatIn::findStreamInfo(AVDictionary **options){
-
- const int ret = avformat_find_stream_info(ctx_, options);
+ ret = avformat_find_stream_info(ctx_, options);
if(ret < 0){
logIt("find %s stream info failed:%s",
ctx_->filename,getAVErrorDesc(ret).c_str());
-
- return false;
+ return ret;
}
-
- // logIt("there are %d stream", ctx_->nb_streams);
for (int i = 0; i < ctx_->nb_streams; ++i)
{
@@ -188,13 +115,11 @@
}else if(in->avg_frame_rate.num >=1 && in->avg_frame_rate.den >= 1){
fps_ = av_q2d(in->avg_frame_rate);
}
- logIt("in stream video fps %f, time_base: %d : %d, size: %dx%d", fps_, in->time_base.num, in->time_base.den, in->codecpar->width, in->codecpar->height);
-
-
+ logIt("in stream video fps %f, time_base: %d:%d, size: %dx%d", fps_, in->time_base.num, in->time_base.den, in->codecpar->width, in->codecpar->height);
}
if (type == AVMEDIA_TYPE_AUDIO){
auto in = ctx_->streams[i];
- logIt("in stream audio %d time_base: %d : %d", in->codecpar->codec_id, in->time_base.num, in->time_base.den);
+ logIt("in stream audio %d time_base: %d:%d", in->codecpar->codec_id, in->time_base.num, in->time_base.den);
if (in->codecpar->codec_id == AV_CODEC_ID_AAC)
as_idx_ = i;
else
@@ -202,7 +127,7 @@
}
}
- return true;
+ return 0;
}
const bool FormatIn::IsHEVC()const{
@@ -220,81 +145,29 @@
}
bool FormatIn::openCodec(AVDictionary **options){
+ if (dec_ctx_) return true;
if (vs_idx_ == -1) return false;
AVStream *s = ctx_->streams[vs_idx_];
-
AVCodecParameters *codecpar = s->codecpar;
- AVCodec *dec = NULL;
bool flag = false;
- AVDictionary *avdic = NULL;
- int idle_gpu = -1;
+ AVCodecID codec_id = codecpar->codec_id;
- srand((unsigned)time(NULL));
+ AVCodec *dec = avcodec_find_decoder(codec_id);
-constexpr int need = 350; // M
-constexpr int reserved = 512; // M
-
- for (int i = 0; i < 2; ++i)
- {
- if(hw_accl_){
-
- // 璁剧疆gpu index
- if (prop_->gpu_index_ > -1){
- if (!gpu::satisfy(prop_->gpu_index_, need, reserved)){
- hw_accl_ = false;
- continue;
- }
- idle_gpu = prop_->gpu_index_;
- }else{
- idle_gpu = gpu::getGPUPrior(need, reserved, 0);
- // idle_gpu = gpu::getGPU(300);
- usleep(2000000 + rand()%3000000);
- if (!gpu::satisfy(idle_gpu, need, reserved)){
- hw_accl_ = false;
- continue;
- }
- }
-
- if(idle_gpu < 0){
- logIt("NO GPU RESOURCE TO DECODE");
- hw_accl_ = false;
- continue;
- }
-
- std::string codec_name(avcodec_get_name(codecpar->codec_id));
- codec_name += "_cuvid";
- dec = avcodec_find_decoder_by_name(codec_name.c_str());
-
- if(!dec){
- hw_accl_ = false;
- continue;
- }else{
- av_dict_set(&avdic, "gpu", std::to_string(idle_gpu).c_str(), 0);
- }
- }else{
- dec = avcodec_find_decoder(codecpar->codec_id);
+ if(dec){
+ flag = allocCodec(dec, s, options);
+ if(*options){
+ av_dict_free(options);
}
- if(dec){
- if(avdic){
- options = &avdic;
- logIt("DECODE USE GPU %d", idle_gpu);
- }
- flag = allocCodec(dec, s, options);
- if(avdic){
- av_dict_free(&avdic);
- }
- if(flag){
- logIt("use decoder %s\n", dec->name);
- break;
- }else{
- av_free(dec_ctx_);
- dec_ctx_ = NULL;
- hw_accl_ = false;
- }
+ if(!flag){
+ av_free(dec_ctx_);
+ dec_ctx_ = NULL;
}
+
+ logIt("use decoder %s\n", dec->name);
}
return flag;
@@ -302,21 +175,24 @@
bool FormatIn::allocCodec(AVCodec *dec, AVStream *s, AVDictionary **options){
- AVCodecParameters *codecpar = s->codecpar;
+ AVCodecParameters *codecpar = NULL;
+ if(s) codecpar = s->codecpar;
dec_ctx_ = avcodec_alloc_context3(dec);
if (!dec_ctx_){
logIt("avcodec_alloc_context3 error");
return false;
}
- int ret = avcodec_parameters_to_context(dec_ctx_, codecpar);
- if(ret < 0){
- logIt("avcodec_parameters_to_context error : %s", getAVErrorDesc(ret).c_str());
- return false;
+ int ret = 0;
+ if(s && codecpar) {
+ ret = avcodec_parameters_to_context(dec_ctx_, codecpar);
+ if(ret < 0) {
+ logIt("avcodec_parameters_to_context error : %s", getAVErrorDesc(ret).c_str());
+ return false;
+ }
+ av_codec_set_pkt_timebase(dec_ctx_, s->time_base);
+ dec_ctx_->framerate = av_guess_frame_rate(ctx_, s, NULL);
}
- av_codec_set_pkt_timebase(dec_ctx_, s->time_base);
-
- dec_ctx_->framerate = av_guess_frame_rate(ctx_, s, NULL);
dec_ctx_->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
ret = avcodec_open2(dec_ctx_,dec, options);
@@ -329,6 +205,8 @@
}
AVStream *FormatIn::getStream(int type/*=-1*/){
+ if (vs_idx_ < 0 || !ctx_ || ctx_->nb_streams == 0 || !ctx_->streams) return NULL;
+
if (type == -1){
return ctx_->streams[vs_idx_];
}
@@ -364,16 +242,18 @@
}
int FormatIn::readPacket(AVPacket *pkt_out){
-
auto flag = av_read_frame(ctx_, pkt_out);
+ if (flag < 0)
+ logIt("======>> av_read_frame error %s", getAVErrorDesc(flag).c_str());
return flag;
}
int FormatIn::decode(AVFrame* frame, AVPacket *pkt){
AVStream *in = getStream();
-
- av_packet_rescale_ts(pkt, in->time_base, in->codec->time_base);
+ if (in){
+ av_packet_rescale_ts(pkt, in->time_base, in->codec->time_base);
+ }
int ret = avcodec_send_packet(dec_ctx_, pkt);
if(ret < 0){
logIt("pkt size %d avcodec_send_packet error : %s", pkt->size, getAVErrorDesc(ret).c_str());
@@ -392,4 +272,183 @@
}
return -2;
}
+
+//////////////////////////////////////////////////////////////////////
+ constexpr int bs = 8192;
+ FormatInGB::FormatInGB()
+ :FormatIn()
+ ,gb28181_(NULL)
+ ,parser_ctx_(NULL)
+ ,buffer_(NULL)
+ ,buffer_size_(bs)
+ {
+ buffer_ = (unsigned char*)malloc(buffer_size_);
+ }
+
+ FormatInGB::FormatInGB(const VideoProp &prop)
+ :FormatInGB()
+ {
+ prop_ = new VideoProp;
+ *prop_ = prop;
+ }
+
+ FormatInGB::~FormatInGB(){
+ if (parser_ctx_){
+ av_parser_close(parser_ctx_);
+ }
+ if (gb28181_)delete gb28181_;
+ if (buffer_) free(buffer_);
+
+ for(auto &pkt : q_pkt_){
+ av_packet_free(&pkt);
+ }
+ q_pkt_.clear();
+ }
+
+ static enum AVCodecID codecMap(const int c){
+ switch (c) {
+ case E_VIDEO_STREAM_H264:// = 0,
+ return AV_CODEC_ID_H264;
+ case E_VIDEO_STREAM_MPEG2:// = 1, // MPEG4
+ // return AV_CODEC_ID_MPEG2VIDEO;
+ case E_VIDEO_STREAM_MPEG4:// = 2, // MPEG4
+ return AV_CODEC_ID_MPEG4;
+ case E_VIDEO_STREAM_SVAC:// = 3, // SVAC
+ return AV_CODEC_ID_NONE;
+ case E_VIDEO_STREAM_3GP:// = 4, // 3GP
+ return AV_CODEC_ID_NONE; // audio
+ case E_VIDEO_STREAM_H265:// = 5, //H265
+ return AV_CODEC_ID_HEVC;
+ default:
+ break;
+ }
+ return AV_CODEC_ID_NONE;
+ }
+
+ int FormatInGB::open(const char *filename, AVDictionary **options){
+ if (gb28181_){
+ delete gb28181_;
+ }
+
+ gb28181_ = new GB28181API;
+ std::string fn = filename;
+
+ if(gb28181_->addCamera(fn) < 0){
+ delete gb28181_;
+ gb28181_ = NULL;
+ logIt("do addCamera Error\n");
+ return -1;
+ }
+ for(int i = 0; i < 6; i++){
+ if (gb28181_->getDataType() >= 0){
+ AVCodecID id = codecMap(gb28181_->getDataType());
+ logIt("======>>codec name %s\n", avcodec_get_name(id));
+ parser_ctx_ = av_parser_init(id);
+ if (parser_ctx_) parser_ctx_->flags |= PARSER_FLAG_USE_CODEC_TS;
+ AVCodec* dec = avcodec_find_decoder(id);
+ allocCodec(dec, NULL, NULL);
+ break;
+ }
+ usleep(1000000);
+ }
+ return 0;
+ }
+
+ int FormatInGB::readPacket(AVPacket *pkt_out){
+ if (!q_pkt_.empty()){
+ auto pkt = q_pkt_.front();
+ q_pkt_.pop_front();
+ av_packet_ref(pkt_out, pkt);
+ av_packet_free(&pkt);
+ return 0;
+ }
+
+ if (gb28181_->getDataType() < 0){
+ logIt("======>> readPacket can't recv gb28181 stream");
+ return 1;
+ }
+ if (!parser_ctx_){
+ AVCodecID id = codecMap(gb28181_->getDataType());
+ parser_ctx_ = av_parser_init(id);
+ AVCodec* dec = avcodec_find_decoder(id);
+ allocCodec(dec, NULL, NULL);
+ }
+ if (parser_ctx_ && dec_ctx_){
+
+ int try_run = 0;
+ AVPacket* pkt = av_packet_alloc();
+ bool got_pkt = false;
+
+ while (true) {
+ int data_size = gb28181_->readData(gb28181_, buffer_, buffer_size_);
+ // printf("======>> data_size %d pos %d\n", data_size, buffer_pos_);
+ if (data_size == 0){
+ try_run ++;
+ if (try_run > 12){
+ av_packet_free(&pkt);
+ logIt("gb28181_ readData %d failed, return -1", try_run);
+ return -1;
+ }
+ continue;
+ }
+ try_run = 0;
+ unsigned char* data = buffer_;
+ while (data_size > 0) {
+ int ret = av_parser_parse2(parser_ctx_, dec_ctx_,
+ &pkt->data, &pkt->size, data, data_size,
+ AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
+
+ // logIt("======>> av_parser_parse2 ret %d pkt size %d", ret, pkt->size);
+ if (ret < 0) {
+ av_packet_free(&pkt);
+ logIt("======>> av_parser_parse2 error %d %s", ret, getAVErrorDesc(ret).c_str());
+ return ret;
+ }
+
+ data += ret;
+ data_size -= ret;
+
+ if (pkt->size){
+ if(fps_ == 0 && dec_ctx_->framerate.den > 0 && dec_ctx_->framerate.num > 0){
+ fps_ = dec_ctx_->framerate.num/dec_ctx_->framerate.den;
+ if (fps_ == 0) fps_ = 24;
+ }
+ if (parser_ctx_->key_frame == 1){
+ pkt->flags |= AV_PKT_FLAG_KEY;
+ }
+ got_pkt = true;
+ AVPacket* tmpkt = av_packet_alloc();
+ av_packet_ref(tmpkt, pkt);
+ q_pkt_.push_back(tmpkt);
+ }
+ }
+ if (got_pkt) {
+ av_packet_free(&pkt);
+ auto tmpkt = q_pkt_.front();
+ q_pkt_.pop_front();
+ av_packet_ref(pkt_out, tmpkt);
+ av_packet_free(&tmpkt);
+ return 0;
+ }
+ }
+ }
+ return -1;
+ }
+
+ const bool FormatInGB::IsHEVC()const{
+ if (!gb28181_) return false;
+ return codecMap(gb28181_->getDataType()) == AV_CODEC_ID_HEVC;
+ }
+
+ const bool FormatInGB::IsAVC1()const{
+ return false;
+ }
+
+ bool FormatInGB::isVideoPkt(AVPacket *pkt) {
+ return true;
+ }
+ bool FormatInGB::isAudioPkt(AVPacket *pkt) {
+ return false;
+ }
+
}
diff --git a/csrc/ffmpeg/format/FormatIn.hpp b/csrc/ffmpeg/format/FormatIn.hpp
index 2bdafd1..c558aa6 100644
--- a/csrc/ffmpeg/format/FormatIn.hpp
+++ b/csrc/ffmpeg/format/FormatIn.hpp
@@ -3,9 +3,8 @@
#include <stdint.h>
#include <memory>
-#ifdef GB28181
+#include <deque>
#include "PsToEs.hpp"
-#endif
struct AVFormatContext;
struct AVDictionary;
@@ -15,6 +14,7 @@
struct AVFrame;
struct AVCodec;
struct AVIOContext;
+struct AVCodecParserContext;
typedef int(* read_packet)(void *opaque,uint8_t *buf, int buf_size);
@@ -27,52 +27,64 @@
public:
explicit FormatIn(bool hw=true);
explicit FormatIn(const VideoProp &prop, bool hw=true);
- ~FormatIn();
+ virtual ~FormatIn();
public:
- int openWithCustomIO(void *opaque, read_packet fn, AVDictionary **options=NULL);
-#ifdef GB28181
- int openGb28181(const char *filename, AVDictionary **options);
-#endif
- int open(const char *filename, AVDictionary **options);
- bool findStreamInfo(AVDictionary **options);
-
- bool openCodec(AVDictionary **options);
-
- int readPacket(AVPacket *pkt_out);
-
- int decode(AVFrame* frame, AVPacket *pkt);
-
- bool isVideoPkt(AVPacket *pkt);
- bool isAudioPkt(AVPacket *pkt);
+ virtual int open(const char *filename, AVDictionary **options);
+ virtual const bool IsHEVC()const;
+ virtual const bool IsAVC1()const;
+ virtual int readPacket(AVPacket *pkt_out);
+ virtual bool isVideoPkt(AVPacket *pkt);
+ virtual bool isAudioPkt(AVPacket *pkt);
bool notVideoAudio(AVPacket *pkt);
- private:
+
+ int openWithCustomIO(void *opaque, read_packet fn, AVDictionary **options=NULL);
+ bool openCodec(AVDictionary **options);
+ int decode(AVFrame* frame, AVPacket *pkt);
+
+ protected:
bool allocCodec(AVCodec *dec, AVStream *s, AVDictionary **options);
public:
AVStream *getStream(int type = -1);
AVCodecContext *getCodecContext(int type = 0);
AVFormatContext *getFromatContext(){return ctx_;}
const double getFPS()const{return fps_;}
- const bool IsHEVC()const;
- const bool IsAVC1()const;
- private:
+ protected:
AVFormatContext *ctx_;
AVCodecContext *dec_ctx_;
int vs_idx_;
int as_idx_;
-
VideoProp *prop_;
- bool hw_accl_;
- double fps_;
+ int fps_;
private:
AVIOContext *io_ctx_;
uint8_t *read_io_buff_;
const int read_io_buff_size_;
-#ifdef GB28181
- GB28181API *handle_gb28181;
-#endif
+ };
+
+ class FormatInGB : public FormatIn{
+ public:
+ FormatInGB();
+ explicit FormatInGB(const VideoProp &prop);
+ ~FormatInGB();
+
+ virtual int open(const char *filename, AVDictionary **options) override;
+ virtual const bool IsHEVC()const override;
+ virtual const bool IsAVC1()const override;
+ virtual int readPacket(AVPacket *pkt_out) override;
+
+ virtual bool isVideoPkt(AVPacket *pkt) override;
+ virtual bool isAudioPkt(AVPacket *pkt) override;
+
+ private:
+ GB28181API* gb28181_;
+ AVCodecParserContext* parser_ctx_;
+ unsigned char* buffer_;
+ int buffer_size_;
+
+ std::deque<AVPacket*> q_pkt_;
};
}
diff --git a/csrc/ffmpeg/format/FormatOut.cpp b/csrc/ffmpeg/format/FormatOut.cpp
index 6c91db6..5d51aaf 100644
--- a/csrc/ffmpeg/format/FormatOut.cpp
+++ b/csrc/ffmpeg/format/FormatOut.cpp
@@ -107,8 +107,8 @@
enc_ctx_->codec_id = AV_CODEC_ID_H264;
enc_ctx_->codec_type = AVMEDIA_TYPE_VIDEO;
- enc_ctx_->height = (prop.height_ & 0x01) ? prop.height_-1 : prop.height_;
- enc_ctx_->width = (prop.width_ & 0x01) ? prop.width_ - 1 : prop.width_;
+ enc_ctx_->height = prop.height_ & ~0x01;
+ enc_ctx_->width = prop.width_ & ~0x01;
enc_ctx_->sample_aspect_ratio = prop.sample_aspect_ratio_;
@@ -182,7 +182,7 @@
av_opt_set(enc_ctx_->priv_data, "tune", "zerolatency", 0);
av_opt_set(enc_ctx_->priv_data, "profile", "baseline", 0);
- int err =avcodec_open2(enc_ctx_, codec, NULL);
+ int err = avcodec_open2(enc_ctx_, codec, NULL);
if( err< 0)
{
logIt("can't open output codec: %s", getAVErrorDesc(err).c_str());
@@ -194,14 +194,11 @@
return false;
}
ofmt->video_codec = codec_id;
- if(ofmt->flags & AVFMT_GLOBALHEADER)
- {
+ if(ofmt->flags & AVFMT_GLOBALHEADER){
enc_ctx_->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
-
return true;
-
}
AVStream *FormatOut::getStream(){
diff --git a/csrc/thirdparty/gb28181/include/PsToEs.hpp b/csrc/thirdparty/gb28181/include/PsToEs.hpp
index d73d93c..54dc25f 100644
--- a/csrc/thirdparty/gb28181/include/PsToEs.hpp
+++ b/csrc/thirdparty/gb28181/include/PsToEs.hpp
@@ -43,16 +43,22 @@
}
T pop() {
- struct timespec now, end;
- clock_gettime(CLOCK_MONOTONIC, &now);
- static uint64_t waitS = 12; // wait
- end.tv_sec = now.tv_sec + waitS;
- end.tv_nsec = now.tv_nsec;
+ struct timespec to;
+ clock_gettime(CLOCK_MONOTONIC, &to);
+ static uint64_t waitMS = 620; // wait
+ uint64_t sec = waitMS / 1000;
+ uint64_t nsec = (waitMS % 1000) * 1e6;
+ to.tv_sec = to.tv_sec + sec;
+ nsec += to.tv_nsec;
+ sec = nsec / 1000000000;
+ nsec = nsec % 1000000000;
+ to.tv_sec += sec;
+ to.tv_nsec = nsec;
// printf("======>>wait stream data\n");
pthread_mutex_lock(&mtx);
while(q.empty()){
- if(pthread_cond_timedwait(&cond, &mtx, &end) == ETIMEDOUT){
+ if(pthread_cond_timedwait(&cond, &mtx, &to) == ETIMEDOUT){
printf("======>>timeout quit\n");
break;
}
@@ -109,8 +115,90 @@
int buffLen;
} frameBuffInfo;
+typedef enum
+{
+ E_VIDEO_STREAM_NONE = -1,
+ E_VIDEO_STREAM_H264 = 0,
+ E_VIDEO_STREAM_MPEG2 = 1, // MPEG4
+ E_VIDEO_STREAM_MPEG4 = 2, // MPEG4
+ E_VIDEO_STREAM_SVAC = 3, // SVAC
+ E_VIDEO_STREAM_3GP = 4, // 3GP
+ E_VIDEO_STREAM_H265 = 5, //H265
+}VideoStreamType_E;
+
class GB28181API{
public:
+
+ static int capturePic(void *opaque, char *buf, int *bufsize, const int tt) {
+
+ GB28181API *_this = (GB28181API *) opaque;
+ int len = 0;
+ *bufsize = 0;
+
+ int ttt = 0;
+ do {
+ if (ttt > tt) return 0;
+ ttt++;
+
+ //浠庣紦瀛樹腑鑾峰彇buffinfo
+ if (_this->m_rtpQueue.count_queue() == 0) {
+// printf(" count_queue == 0 \n");
+ usleep(200000);
+ continue;
+ }
+
+ frameBuffInfo *buffinfo = _this->m_rtpQueue.pop();
+ if (buffinfo == nullptr) {
+ printf(" buffinfo == nullptr \n");
+ return 0;
+ }
+////////////////////////////////////////////////////////
+ FILE* fpJpg = NULL;
+ char fileJpgName[32] = "./tmpCaptureJpg.jpg";
+ char fileIFrameName[32] = "./tmpCaptureX264IFrame";
+ char cmd[512] = {0};
+
+ for(int i = 0; i < 10 * 25; i++){
+ if (!buffinfo){
+ buffinfo = _this->m_rtpQueue.pop();
+ }
+ if (!buffinfo) continue;
+
+ auto fpIframe = fopen(fileIFrameName, "wb+");
+ fwrite(buffinfo->buff, buffinfo->buffLen, 1, fpIframe);
+ fflush(fpIframe);
+ fclose(fpIframe);
+
+ memset(cmd, 0, 512);
+ sprintf(cmd, "ffmpeg -i %s -y -f image2 -ss 00:00:00 -vframes 1 %s >/dev/null", fileIFrameName,
+ fileJpgName);
+ int rr = system(cmd);
+
+ delete[] buffinfo->buff;
+ delete buffinfo;
+ buffinfo = nullptr;
+
+ fpJpg = fopen(fileJpgName, "rb");
+ if (fpJpg) {
+ break;
+ }
+ }
+///////////////////////////////////////////////////////////
+
+ fseek(fpJpg, 0, SEEK_END);
+ len = ftell(fpJpg);
+ fseek(fpJpg, 0, SEEK_SET);
+ *bufsize = fread(buf, sizeof(char), len, fpJpg);
+ fclose(fpJpg);
+
+ memset(cmd, 0, 128);
+ sprintf(cmd, "rm %s %s >/dev/null", fileIFrameName, fileJpgName);
+ system(cmd);
+ } while (*bufsize == 0);
+
+ return *bufsize;
+ }
+
GB28181API(/*string rtspUrl*/){
// handle = addCamera(rtspUrl);
}
@@ -127,9 +215,10 @@
deleteCamera();
}
+ static const int keep_queue_count = 126;
bool pushInfo(unsigned char *data, int datalen) {
- while(m_rtpQueue.count_queue() > 120){
+ while(m_rtpQueue.count_queue() > keep_queue_count){
auto p = m_rtpQueue.popNotWait();
if (p){
delete[] p->buff;
@@ -160,7 +249,7 @@
frameBuffInfo *buffinfo = _this->m_rtpQueue.pop();
// printf(" m_rtpQueue.pop after \n");
if(buffinfo != nullptr){
- diff = len - buffinfo->buffLen;
+ diff = len - buffinfo->buffLen;
}else{
return 0;
}
@@ -175,7 +264,7 @@
info->buff = new unsigned char[buffinfo->buffLen - len]{};
memcpy(info->buff, buffinfo->buff + len, buffinfo->buffLen - len);
- while(_this->m_rtpQueue.count_queue() > 120){
+ while(_this->m_rtpQueue.count_queue() > keep_queue_count){
auto p = _this->m_rtpQueue.popNotWait();
if (p){
delete[] p->buff;
@@ -204,76 +293,6 @@
return bufsize;
}
- static int capturePic(void *opaque, char *buf, int *bufsize, const int tt) {
-
- GB28181API *_this = (GB28181API *) opaque;
- int len = 0;
- *bufsize = 0;
-
- int ttt = 0;
- do {
- if (ttt > tt) return 0;
- ttt++;
-
- //浠庣紦瀛樹腑鑾峰彇buffinfo
- if (_this->m_rtpQueue.count_queue() == 0) {
-// printf(" count_queue == 0 \n");
- usleep(200000);
- continue;
- }
-
- frameBuffInfo *buffinfo = _this->m_rtpQueue.pop();
- if (buffinfo == nullptr) {
- printf(" buffinfo == nullptr \n");
- return 0;
- }
-////////////////////////////////////////////////////////
- FILE* fpJpg = NULL;
- char fileJpgName[32] = "./tmpCaptureJpg.jpg";
- char fileIFrameName[32] = "./tmpCaptureX264IFrame";
- char cmd[512] = {0};
-
- for(int i = 0; i < 10 * 25; i++){
- if (!buffinfo){
- buffinfo = _this->m_rtpQueue.pop();
- }
- if (!buffinfo) continue;
-
- auto fpIframe = fopen(fileIFrameName, "wb+");
- fwrite(buffinfo->buff, buffinfo->buffLen, 1, fpIframe);
- fflush(fpIframe);
- fclose(fpIframe);
-
- memset(cmd, 0, 512);
- sprintf(cmd, "ffmpeg -i %s -y -f image2 -ss 00:00:00 -vframes 1 %s >/dev/null", fileIFrameName,
- fileJpgName);
- int rr = system(cmd);
-
- delete[] buffinfo->buff;
- delete buffinfo;
- buffinfo = nullptr;
-
- fpJpg = fopen(fileJpgName, "rb");
- if (fpJpg) {
- break;
- }
- }
-///////////////////////////////////////////////////////////
-
- fseek(fpJpg, 0, SEEK_END);
- len = ftell(fpJpg);
- fseek(fpJpg, 0, SEEK_SET);
- *bufsize = fread(buf, sizeof(char), len, fpJpg);
- fclose(fpJpg);
-
- memset(cmd, 0, 128);
- sprintf(cmd, "rm %s %s >/dev/null", fileIFrameName, fileJpgName);
- system(cmd);
- } while (*bufsize == 0);
-
- return *bufsize;
- }
-
static void streamCallBack(int datatype, int frametype, unsigned char *data, unsigned int datalen, long userdata)
{
GB28181API *_this = (GB28181API *)userdata;
@@ -282,17 +301,23 @@
if(frametype == GB_VIDEO_FRAME_I){
startFlag = true;
}
+
+ // printf("streamCallBack recv data len %d frametype %d\n", datalen, startFlag);
+ if (_this->datatype_ < 0)
+ _this->datatype_ = datatype;
+
if((data != NULL) && (startFlag == true)){
- _this->pushInfo(data, datalen);
+ _this->pushInfo(data, datalen);
}
}
long addCamera(string &rtsp){
int count = 0;
- while (handle == -1 && count <= 3) {
+ while (handle < 0 && count <= 3) {
count ++;
handle = RTSPSTREAM_Open(rtsp.c_str(), streamCallBack, (long) this);
printf("RTSPSTREAM_Open, handle:%ld \n", handle);
+ usleep(20000);
}
return handle;
}
@@ -304,8 +329,10 @@
}
handle = -1;
- }
+ }
+ const int getDataType(){return datatype_;}
private:
+ int datatype_ = -1;
MyQueue<frameBuffInfo *> m_rtpQueue;
long handle = -1;
};
diff --git a/csrc/worker/decoder.cpp b/csrc/worker/decoder.cpp
index 5e8a7c9..4d5a3b7 100644
--- a/csrc/worker/decoder.cpp
+++ b/csrc/worker/decoder.cpp
@@ -95,6 +95,8 @@
next_idx_ = i.id + 1;
if (frame) {av_frame_free(&frame); frame = NULL;}
frame = frm;
+ }else {
+ av_frame_free(&frm);
}
}
}
@@ -103,30 +105,33 @@
int pix_fmt = frame->format;
int width = frame->width;
int height = frame->height;
- int len = 0;
- uint8_t *origin = cvbridge::extractFrame(frame, &len);
- av_frame_free(&frame);
- if (!origin) return;
-
- uint8_t *finale = NULL;
if (pix_fmt != AV_PIX_FMT_NV12){
- finale = (uint8_t*)malloc(len);
- unsigned char* SrcU = origin + width * height;
- unsigned char* SrcV = SrcU + width * height / 4 ;
- unsigned char* DstU = finale + width * height;
- memcpy(finale, origin, width * height);
- int i = 0;
- for( i = 0 ; i < width * height / 4 ; i++ ){
- *(DstU++) = *(SrcU++);
- *(DstU++) = *(SrcV++);
- }
- free(origin);
- }else{
- finale = origin;
+ cvbridge* bridge = new cvbridge(width, height, pix_fmt,
+ width, height, AV_PIX_FMT_NV12);
+ AVFrame* nv12 = bridge->convert2Frame(frame);
+ av_frame_free(&frame);
+ frame = nv12;
+ delete bridge;
+
+ // finale = (uint8_t*)malloc(len);
+ // unsigned char* SrcU = origin + width * height;
+ // unsigned char* SrcV = SrcU + width * height / 4 ;
+ // unsigned char* DstU = finale + width * height;
+ // memcpy(finale, origin, width * height);
+ // int i = 0;
+ // for( i = 0 ; i < width * height / 4 ; i++ ){
+ // *(DstU++) = *(SrcU++);
+ // *(DstU++) = *(SrcV++);
+ // }
+ // free(origin);
}
+ int len = 0;
+ uint8_t* finale = cvbridge::extractFrame(frame, &len);
+ av_frame_free(&frame);
+
*data = finale;
*w = width;
*h = height;
diff --git a/csrc/worker/stream.cpp b/csrc/worker/stream.cpp
index 94e5ac3..92bc032 100644
--- a/csrc/worker/stream.cpp
+++ b/csrc/worker/stream.cpp
@@ -30,8 +30,11 @@
std::lock_guard<std::mutex> locker(mutex_avpkt_);
list_pkt_.push_back(pkt);
-
while(list_pkt_.size() > max_size_/2*3){
+ CPacket &tmpkt = list_pkt_.front();
+ if (tmpkt.data->getAVPacket().flags & AV_PKT_FLAG_KEY){
+ break;
+ }
list_pkt_.pop_front();
}
diff --git a/csrc/worker/stream.hpp b/csrc/worker/stream.hpp
index 8a0feec..43b24b0 100644
--- a/csrc/worker/stream.hpp
+++ b/csrc/worker/stream.hpp
@@ -1,7 +1,7 @@
#ifndef _cffmpeg_stream_hpp_
#define _cffmpeg_stream_hpp_
-#include <list>
+#include <deque>
#include <mutex>
#include <memory>
@@ -15,7 +15,7 @@
class stream
{
private:
- std::list<CPacket> list_pkt_;
+ std::deque<CPacket> list_pkt_;
std::mutex mutex_avpkt_;
ffwrapper::FormatIn *streamRef_;
const int max_size_;
diff --git a/csrc/wrapper.cpp b/csrc/wrapper.cpp
index e4540b3..a84fd19 100644
--- a/csrc/wrapper.cpp
+++ b/csrc/wrapper.cpp
@@ -50,6 +50,7 @@
,run_dec_(false)
,run_stream_(true)
,run_rec_(false)
+ ,work_start(false)
,thread_(nullptr)
,stop_stream_(false)
,stream_(nullptr)
@@ -72,6 +73,7 @@
,run_dec_(false)
,run_stream_(true)
,run_rec_(false)
+ ,work_start(false)
,thread_(nullptr)
,stop_stream_(false)
,stream_(nullptr)
@@ -103,6 +105,8 @@
}
if (logit_)
logif::DestroyLogger();
+
+ printf("wrapper release\n");
}
std::unique_ptr<ffwrapper::FormatIn> Wrapper::init_reader(const char* input){
@@ -113,32 +117,23 @@
prop.gpu_acc_ = false;
prop.gpu_index_ = devid_;
- std::unique_ptr<FormatIn> in(new FormatIn(prop, prop.gpuAccl()));
+ std::unique_ptr<FormatIn> in(nullptr);
int flag = -1;
-#ifdef GB28181
+ AVDictionary* avdic = NULL;
if (gb_){
- flag = in->openGb28181(input, NULL);
+ in.reset(new FormatInGB(prop));
}else{
-#endif
- AVDictionary *avdic = prop.optsFormat();
- if(avdic){
- flag = in->open(input, &avdic);
- av_dict_free(&avdic);
- }else{
- flag = in->open(input, NULL);
- }
-#ifdef GB28181
+ in.reset(new FormatIn(prop, prop.gpuAccl()));
+ avdic = prop.optsFormat();
}
-#endif
-
- if(flag == 0){
- if(!in->findStreamInfo(NULL)){
- logIt("can't find video stream\n");
- return nullptr;
- }
-
- return in;
+
+ if(avdic){
+ flag = in->open(input, &avdic);
+ av_dict_free(&avdic);
+ }else{
+ flag = in->open(input, NULL);
}
+ if(flag == 0) return in;
return nullptr;
}
@@ -183,7 +178,7 @@
int Wrapper::run_worker(ffwrapper::FormatIn *in, const CPacket &pkt){
if (gb_){
AVPacket &p = pkt.data->getAVPacket();
- p.pts = p.dts = AV_NOPTS_VALUE;
+ // p.pts = p.dts = AV_NOPTS_VALUE;
}
int flag = 0;
if (run_stream_ && stream_) stream_->SetPacket(pkt);
@@ -205,22 +200,32 @@
int64_t file_frame = 0;
using namespace std;
- const string gb_suffix[] = {"/StreamType=2", "/StreamType=3", ""};
+ // const string gb_suffix[] = {"/StreamType=2", "/StreamType=3", ""};
+ // const string gb_suffix[] = {"/StreamType=2", ""};
+ const string gb_suffix[] = {""};
const size_t gb_size = sizeof(gb_suffix) / sizeof(string);
int gs_idx = 0;
- string url = input_url_;
+ string url;
+ url.reserve(input_url_.size()*2);
+ url = input_url_;
+ work_start = false;
while(!stop_stream_.load()){
if (gb_){
if (input_url_.find("/StreamType=") == string::npos)
url = input_url_ + gb_suffix[gs_idx];
-
logIt("======>>input real url %s\n", url.c_str());
+ gs_idx = (gs_idx + 1) % gb_size;
}
auto in = init_reader(url.c_str());
if (!in) {
+ work_start = false;
logIt("ERROR: init_reader! url: %s\n", url.c_str());
- sleep(2);
+ usleep(126000);
+ // for(int i = 0; i < 10; i++){
+ // if (stop_stream_.load()) break;
+ if(gb_) usleep(2617000);
+ // }
continue;
}
@@ -234,6 +239,7 @@
wTime >>= 1;
init_worker(in.get());
+ work_start = true;
int64_t id = gb_ ? 0 : -1;
int64_t v_id = id;
@@ -249,7 +255,13 @@
while(!stop_stream_.load()){
auto data(std::make_shared<CodedData>());
- if (in->readPacket(&data->getAVPacket()) != 0){
+ auto ret = in->readPacket(&data->getAVPacket());
+ if (ret > 0){
+ sleep(2);
+ continue;
+ }
+
+ if (ret < 0){
logIt("read packet error, id: %lld", id);
break;
}
@@ -283,7 +295,6 @@
file_frame++;
usleep(wTime);
}
-
}
deinit_worker();
@@ -335,7 +346,7 @@
}
int Wrapper::GetInfoRecorder(std::string &recID, int &index, std::string &path){
- if (rec_){
+ if (work_start && rec_){
rec_->GetRecInfo(recID, index, path);
}
return 0;
@@ -351,6 +362,10 @@
}
if (decoder_){
decoder_->GetFrame(data, w, h, format, length, id);
+ }else {
+ if (work_start){
+ for(int i = 0; i < 6; i++)this_thread::sleep_for(chrono::seconds{1});
+ }
}
return 0;
}
@@ -360,7 +375,7 @@
}
int Wrapper::GetPacket(unsigned char **pktData, int *size, int *key){
- if (stream_){
+ if (work_start && stream_){
stream_->GetPacket(pktData, size, key);
}
return 0;
diff --git a/csrc/wrapper.hpp b/csrc/wrapper.hpp
index b0df07a..42f0c91 100644
--- a/csrc/wrapper.hpp
+++ b/csrc/wrapper.hpp
@@ -73,6 +73,7 @@
bool run_dec_;
bool run_stream_;
bool run_rec_;
+ bool work_start;
// decoder 鍙傛暟
std::unique_ptr<std::thread> thread_;
std::atomic_bool stop_stream_;
--
Gitblit v1.8.0