#include "wrapper.hpp" #include #include extern "C"{ #include #include #include } #include "ffmpeg/configure/conf.hpp" #include "ffmpeg/format/FormatIn.hpp" #include "ffmpeg/format/FormatOut.hpp" #include "ffmpeg/property/VideoProp.hpp" #include "ffmpeg/data/CodedData.hpp" #include "ffmpeg/data/FrameData.hpp" #include "ffmpeg/log/log.hpp" #include "ffmpeg/bridge/cvbridge.hpp" #include "buz/recorder.hpp" #include "worker/stream.hpp" #include "worker/decoder.hpp" #include "worker/rec.hpp" using namespace logif; using namespace ffwrapper; #define DELETE_POINTER(p) \ do \ { \ if(NULL != p) \ delete p; \ p = NULL; \ }while(0) namespace cffmpeg_wrap{ using namespace buz; Wrapper::Wrapper() :input_url_("") ,scale_w_(0) ,scale_h_(0) ,scale_f_(SWS_POINT) ,audio_(false) ,gb_(0) ,cpu_(0) ,thread_(nullptr) ,stop_stream_(false) ,stream_(nullptr) ,decoder_(nullptr) ,rec_(new rec) { makeTheWorld(); } Wrapper::~Wrapper() { try { if(thread_){ stop_stream_.store(true); thread_->join(); } DELETE_POINTER(rec_); } catch(const std::exception& e) { logIt("WRAPPER EXCEPTION: ", e.what()); } } std::unique_ptr Wrapper::init_reader(const char* input){ VideoProp prop; prop.url_ = input; prop.rtsp_tcp_ = true; prop.gpu_acc_ = !cpu_; std::unique_ptr in(new FormatIn(prop.gpuAccl())); AVDictionary *avdic = prop.optsFormat(); int flag = -1; if (gb_){ flag = in->openGb28181(input, NULL); }else{ flag = in->open(input, &avdic); } if(avdic){ av_dict_free(&avdic); } if(flag == 0){ if(!in->findStreamInfo(NULL)){ logIt("can't find video stream\n"); return nullptr; } return in; } return nullptr; } int Wrapper::RunStream(const char* input){ if(thread_){ logIt("wrapper run stream already run"); return 0; } input_url_ = input; thread_.reset(new std::thread([&]{ run_stream_thread(); })); return 0; } void Wrapper::AudioSwitch(const bool a){ audio_ = a; if (stream_){ stream_->AudioSwitch(a); } } void Wrapper::init_worker(ffwrapper::FormatIn *in){ if (rec_->Loaded() && stream_ && decoder_) return; stream_ = new stream(in, 3 * 25); stream_->AudioSwitch(audio_); decoder_ = new decoder(in, scale_w_, scale_h_, scale_f_); rec_->Load(in); if(fn_rec_lazy_) fn_rec_lazy_(in); } void Wrapper::run_worker(ffwrapper::FormatIn *in, std::shared_ptr data, int64_t &id){ if (stream_) stream_->SetPacket(data); if (decoder_) decoder_->SetFrame(data, id); if (rec_->Loaded()) rec_->SetPacket(data, id); } void Wrapper::deinit_worker(){ DELETE_POINTER(stream_); DELETE_POINTER(decoder_); rec_->Unload(); } void Wrapper::run_stream_thread(){ while(!stop_stream_.load()){ auto in = init_reader(input_url_.c_str()); if (!in) { logIt("ERROR: init_reader! url: %s\n", input_url_.c_str()); usleep(200000); continue; } init_worker(in.get()); int64_t id = 0; while(!stop_stream_.load()){ auto data(std::make_shared()); if(!in->readPacket(data)){ logIt("read packet error"); break; } run_worker(in.get(), data, id); id++; } deinit_worker(); } } void Wrapper::BuildRecorder(const char* id, const char *output, const int mindur, const int maxdur, const bool audio){ if (rec_->Loaded()){ rec_->NewRec(id, output, mindur, maxdur, audio); }else{ std::string rid(id), dir(output); fn_rec_lazy_ = [=](ffwrapper::FormatIn *in){rec_->NewRec(rid.c_str(), dir.c_str(), mindur, maxdur, audio);}; } } int Wrapper::FireRecorder(const char* sid,const int64_t &id){ if (rec_->Loaded()){ rec_->FireRecSignal(sid, id); } } void Wrapper::GetInfoRecorder(std::string &recID, int &index, std::string &path){ if (rec_){ rec_->GetRecInfo(recID, index, path); } } ////////decoder void Wrapper::BuildDecoder(){ // use_decoder_ = true; } void Wrapper::GetPicDecoder(unsigned char **data, int *w, int *h, int64_t *id){ if (decoder_){ decoder_->GetFrame(data, w, h, id); } } void Wrapper::GetPacket(unsigned char **pktData, int *size, int *key){ if (stream_){ stream_->GetPacket(pktData, size, key); } } } // end class wrapper /////////////////////////////////////////////////////////// ///single decode or encoder ////// decoder namespace cffmpeg_wrap{ // start test functions uint8_t* DecodeJPEG(const char *file, int *w, int *h){ VideoProp prop; prop.url_ = file; prop.gpu_acc_ = false; std::unique_ptr in(new FormatIn(prop.gpuAccl())); int flag = in->open(file, NULL); std::unique_ptr bridge_(nullptr); if(flag == 0){ if(!in->findStreamInfo(NULL)){ logIt("yolo can't find video stream\n"); return NULL; } auto flag = in->openCodec(AVMEDIA_TYPE_VIDEO, NULL); if(flag){ auto dec_ctx = in->getCodecContext(); AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; bridge_.reset(new cvbridge( dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt, dec_ctx->width, dec_ctx->height, pix_fmt, SWS_BICUBIC)); }else{ logIt("FormatIn openCodec Failed!"); return NULL; } }else{ logIt("open %s error", file); return NULL; } auto data(std::make_shared()); if(!in->readPacket(data)){ logIt("read packet error"); return NULL; } auto frame(std::make_shared()); auto ret = in->decode(frame, data); if(ret == 1){ AVFrame *frm = frame->getAVFrame(); *w = frm->width; *h = frm->height; unsigned char *data = (unsigned char*)malloc(frm->width * frm->height * 3); bridge_->copyPicture(data, frm); return data; } return NULL; } /////// for encoder typedef struct _PicEncoder{ FormatOut *enc; int w; int h; int fps; int br; int gi; int flag; cvbridge *bridge; } PicEncoder; void *CreateEncoder(const int w, const int h, const int fps, const int br, const int scale_flag, const int gi){ PicEncoder *e = (PicEncoder*)malloc(sizeof(PicEncoder)); e->enc = NULL; e->w = w; e->h = h; e->fps = fps; e->br = br; e->gi = gi; e->flag = scale_flag; e->bridge = NULL; VideoProp prop_; prop_.width_ = w; prop_.height_ = h; prop_.fps_ = fps; prop_.bit_rate_ = br; gi < 0 ? prop_.gpu_acc_ = false : prop_.gpu_acc_ = true; FormatOut *enc = new FormatOut(prop_, "./88.mp4"); e->enc = enc; return e; } void DestroyEncoder(void *h){ PicEncoder *e = (PicEncoder*)h; if (e == NULL){ return; } delete e->bridge; delete e->enc; free(e); } int Encode(void *hdl, uint8_t *in, const int w, const int h, uint8_t **out, int *size, int *key){ PicEncoder *e = (PicEncoder*)hdl; auto ctx = e->enc->getCodecContext(); if (e->bridge == NULL){ AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; e->bridge = new cvbridge( w, h, AV_PIX_FMT_BGR24, e->w, e->h, ctx->pix_fmt, e->flag); } AVFrame *frame = e->bridge->getAVFrame(in, w, h); auto data(std::make_shared()); const int flag = e->enc->encode(data, frame); if(flag > 0){ auto pkt = data->getAVPacket(); int extradata_size = ctx->extradata_size; uint8_t *extra = ctx->extradata; *key = pkt.flags & AV_PKT_FLAG_KEY; if(!(*key)){ extradata_size = 0; } *size = pkt.size + extradata_size; *out = (unsigned char *)malloc(*size); memcpy(*out, extra, extradata_size); memcpy(*out + extradata_size, pkt.data, pkt.size); }else{ logIt("encode error or need more packet\n"); } av_frame_free(&frame); return flag; } }