#include "wrapper.hpp" #include #include extern "C"{ #include #include #include } #include "ffmpeg/configure/conf.hpp" #include "ffmpeg/format/FormatIn.hpp" #include "ffmpeg/format/FormatOut.hpp" #include "ffmpeg/property/VideoProp.hpp" #include "ffmpeg/data/CodedData.hpp" #include "ffmpeg/data/FrameData.hpp" #include "ffmpeg/log/log.hpp" #include "ffmpeg/bridge/cvbridge.hpp" #include "buz/recorder.hpp" using namespace logif; using namespace ffwrapper; namespace cffmpeg_wrap{ using namespace buz; Wrapper::Wrapper() :input_url_("") ,thread_(nullptr) ,stop_stream_(false) ,bridge_(NULL) ,scale_w_(0) ,scale_h_(0) ,scale_f_(SWS_POINT) ,gb_(0) ,cpu_(0) ,use_decoder_(false) { makeTheWorld(); } Wrapper::~Wrapper() { if(thread_){ stop_stream_.store(true); thread_->join(); } if(bridge_){ delete bridge_; bridge_ = NULL; } } void Wrapper::ScalePicture(const int w, const int h, const int flags){ scale_w_ = w; scale_f_ = flags; scale_h_ = h; } void Wrapper::UseGB28181(){ gb_ = 1; } void Wrapper::UseCPU(){ cpu_ = 1; } std::unique_ptr Wrapper::init_reader(const char* input){ VideoProp prop; prop.url_ = input; prop.rtsp_tcp_ = true; prop.gpu_acc_ = !cpu_; std::unique_ptr in(new FormatIn(prop.gpuAccl())); AVDictionary *avdic = prop.optsFormat(); int flag = -1; if (gb_){ flag = in->openGb28181(input, NULL); }else{ flag = in->open(input, &avdic); } if(avdic){ av_dict_free(&avdic); } if(flag == 0){ if(!in->findStreamInfo(NULL)){ logIt("yolo can't find video stream\n"); return nullptr; } return in; } return nullptr; } void Wrapper::run_stream_thread(){ while(!stop_stream_.load()){ auto in = init_reader(input_url_.c_str()); if (!in) { logIt("ERROR: init_reader! url: %s\n", input_url_.c_str()); usleep(200000); continue; } int64_t id = 0; avpacket pkt; while(!stop_stream_.load()){ auto data(std::make_shared()); if(!in->readPacket(data)){ logIt("read packet error"); pkt.id = -1; data = nullptr; id = 0; }else{ pkt.id = id++; } pkt.data = data; if(data != nullptr) { cacheAVPacket(data->getAVPacket()); } run_worker(in.get(), pkt); if(!data){ break; } //test // if(recorder_) // if(id % 250 == 0) // recorder_->FireRecorder(id); } } } int Wrapper::RunStream(const char* input){ if(thread_){ logIt("wrapper run stream already run"); return 0; } input_url_ = input; thread_.reset(new std::thread([&]{ run_stream_thread(); })); return 0; } //////////////recorder std::shared_ptr Wrapper::init_recorder(FormatIn *in, std::string id,std::string dir, const int mind, const int maxd){ if(!in){ logIt("Init wrapper first"); return nullptr; } auto rec = std::make_shared(in, id); rec->SetCallback([&](std::string &id, int &index, std::string &path){ cache_rec_info(id, index, path); }); int trycnt = 0; while(trycnt < 100){ const int ret = rec->Run(dir.c_str(), mind, maxd); if(ret == 0) break; usleep(200000); } if (trycnt < 100){ return rec; } return nullptr; } void Wrapper::BuildRecorder(const char* id, const char *output, const int mindur, const int maxdur){ std::string rid(id); std::string dir(output); auto fn = [=](FormatIn *in){ return init_recorder(in, rid, dir, mindur, maxdur); }; std::shared_ptr rec(nullptr); FnRec r = FnRec{fn, rec}; map_rec_[rid] = r; } int Wrapper::FireRecorder(const char* sid,const int64_t &id){ auto iter = map_rec_.find(sid); if (iter != map_rec_.end()){ if(iter->second.rec){ iter->second.rec->FireRecorder(id); } } } void Wrapper::run_worker(ffwrapper::FormatIn *in, avpacket &pkt){ if(!pkt.data) return; if (use_decoder_) { if(in->getCodecContext() == NULL){ bool flag = true; flag = in->openCodec(AVMEDIA_TYPE_VIDEO, NULL); auto dec_ctx = in->getCodecContext(); if(bridge_)delete bridge_; scale_w_ = scale_w_ == 0 || scale_w_ > dec_ctx->width ? dec_ctx->width : scale_w_; scale_h_ = scale_h_ == 0 || scale_h_ > dec_ctx->height ? dec_ctx->height : scale_h_; AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; bridge_ = new cvbridge( dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt, scale_w_, scale_h_, pix_fmt, scale_f_); if (!flag){ logIt("FormatIn openCodec Failed!"); } } auto frame(std::make_shared()); auto ret = in->decode(frame, pkt.data); if(ret == 1){ //吐出数据 cache_pic(frame); } } for(auto &i : map_rec_){ if (!i.second.rec){ i.second.rec = i.second.fn_init(in); } if (i.second.rec){ i.second.rec->CachePacket(pkt); } } } void Wrapper::cache_rec_info(std::string &id, int &index, std::string &path){ if(func_rec_){ //active api func_rec_(path, index); }else{ // passive api std::lock_guard l(mutex_rec_); while(list_rec_.size() > 10){ for(int i = 0; i < 5; i++){ list_rec_.pop_front(); } } struct record_file_info info; info.id = id; info.file_frame_index = index; info.file_path = path; list_rec_.emplace_back(info); logIt("list rec files count : %d", list_rec_.size()); map_rec_.erase(id); } } void Wrapper::GetInfoRecorder(std::string &sid, int &index, std::string &path){ std::lock_guard l(mutex_rec_); if(list_rec_.empty()){ index = -1; path = ""; sid = ""; return; } auto info = list_rec_.front(); index = info.file_frame_index; path = info.file_path; sid = info.id; list_rec_.pop_front(); // logIt("go get info index: %d, file: %s\n", index, path.c_str()); } ////////decoder void Wrapper::BuildDecoder(){ use_decoder_ = true; } void Wrapper::cache_pic(std::shared_ptr &frame){ pic_bgr24 pic; if(bridge_){ AVFrame *frm = frame->getAVFrame(); pic.w = scale_w_; pic.h = scale_h_; unsigned char *data = (unsigned char*)malloc(pic.w * pic.h * 3); bridge_->copyPicture(data, frm); pic.data = data; } if(func_dec_){ func_dec_(pic.data, pic.w, pic.h); }else{ std::lock_guard l(mutex_pic_); while(list_pic_.size() > 10){ for(int i = 0; i < 5; i++){ auto t = list_pic_.front(); free(t.data); list_pic_.pop_front(); } } list_pic_.emplace_back(pic); } } void Wrapper::GetPicDecoder(unsigned char **data, int *w, int *h){ std::lock_guard l(mutex_pic_); if(list_pic_.empty()){ *data = NULL; *w = 0; *h = 0; return; } auto p = list_pic_.front(); *data = p.data; *w = p.w; *h = p.h; list_pic_.pop_front(); } void Wrapper::GetPacket(unsigned char **pktData, int *size, int *key){ std::lock_guard l(mutex_avpkt_); if(list_avpkt_.empty()){ return; } auto pkt = list_avpkt_.front(); *key = pkt.flags & AV_PKT_FLAG_KEY; *size = pkt.size; *pktData = (unsigned char *)malloc(*size); memcpy(*pktData, pkt.data, pkt.size); list_avpkt_.pop_front(); } void Wrapper::cacheAVPacket(const AVPacket &pkt){ std::lock_guard l(mutex_pic_); while(list_avpkt_.size() > 10){ // printf("cacheAVPacket drop packets!!!!!!!!!!\n"); for(int i = 0; i < 5; i++){ list_avpkt_.pop_front(); } } list_avpkt_.emplace_back(pkt); } ///// active api void Wrapper::ActiveRecorder(const char *dir, const int mind, const int maxd, FUNC_REC func){ BuildRecorder("", dir, mind, maxd); func_rec_ = func; } void Wrapper::ActiveDecoder(FUNC_DEC fn){ BuildDecoder(); func_dec_ = fn; } ////// test uint8_t *Wrapper::decodeJPEG(const char *file, int *w, int *h){ VideoProp prop; prop.url_ = file; prop.gpu_acc_ = false; std::unique_ptr in(new FormatIn(prop.gpuAccl())); int flag = in->open(file, NULL); std::unique_ptr bridge_(nullptr); if(flag == 0){ if(!in->findStreamInfo(NULL)){ logIt("yolo can't find video stream\n"); return NULL; } auto flag = in->openCodec(AVMEDIA_TYPE_VIDEO, NULL); if(flag){ auto dec_ctx = in->getCodecContext(); AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; bridge_.reset(new cvbridge( dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt, dec_ctx->width, dec_ctx->height, pix_fmt, SWS_BICUBIC)); }else{ logIt("FormatIn openCodec Failed!"); return NULL; } }else{ logIt("open %s error", input_url_.c_str()); return NULL; } auto data(std::make_shared()); if(!in->readPacket(data)){ logIt("read packet error"); return NULL; } auto frame(std::make_shared()); auto ret = in->decode(frame, data); if(ret == 1){ AVFrame *frm = frame->getAVFrame(); *w = frm->width; *h = frm->height; unsigned char *data = (unsigned char*)malloc(frm->width * frm->height * 3); bridge_->copyPicture(data, frm); return data; } return NULL; } /////// for encoder typedef struct _PicEncoder{ FormatOut *enc; int w; int h; int fps; int br; int gi; int flag; cvbridge *bridge; } PicEncoder; void *CreateEncoder(const int w, const int h, const int fps, const int br, const int scale_flag, const int gi){ PicEncoder *e = (PicEncoder*)malloc(sizeof(PicEncoder)); e->enc = NULL; e->w = w; e->h = h; e->fps = fps; e->br = br; e->gi = gi; e->flag = scale_flag; e->bridge = NULL; VideoProp prop_; prop_.width_ = w; prop_.height_ = h; prop_.fps_ = fps; prop_.bit_rate_ = br; gi < 0 ? prop_.gpu_acc_ = false : prop_.gpu_acc_ = true; FormatOut *enc = new FormatOut(prop_, "./88.mp4"); e->enc = enc; return e; } void DestroyEncoder(void *h){ PicEncoder *e = (PicEncoder*)h; if (e == NULL){ return; } delete e->bridge; delete e->enc; free(e); } int Encode(void *hdl, uint8_t *in, const int w, const int h, uint8_t **out, int *size, int *key){ PicEncoder *e = (PicEncoder*)hdl; auto ctx = e->enc->getCodecContext(); if (e->bridge == NULL){ AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; e->bridge = new cvbridge( w, h, AV_PIX_FMT_BGR24, e->w, e->h, ctx->pix_fmt, e->flag); } AVFrame *frame = e->bridge->getAVFrame(in, w, h); auto data(std::make_shared()); const int flag = e->enc->encode(data, frame); if(flag > 0){ auto pkt = data->getAVPacket(); int extradata_size = ctx->extradata_size; uint8_t *extra = ctx->extradata; *key = pkt.flags & AV_PKT_FLAG_KEY; if(!(*key)){ extradata_size = 0; } *size = pkt.size + extradata_size; *out = (unsigned char *)malloc(*size); memcpy(*out, extra, extradata_size); memcpy(*out + extradata_size, pkt.data, pkt.size); }else{ logIt("encode error or need more packet\n"); } av_frame_free(&frame); return flag; } }