| | |
| | | #include "ffmpeg/configure/conf.hpp" |
| | | #include "ffmpeg/format/FormatIn.hpp" |
| | | #include "ffmpeg/format/FormatOut.hpp" |
| | | #include "ffmpeg/property/VideoProp.hpp" |
| | | #include "ffmpeg/data/CodedData.hpp" |
| | | #include "ffmpeg/data/FrameData.hpp" |
| | | #include "ffmpeg/property/VideoProp.hpp" |
| | | #include "ffmpeg/log/log.hpp" |
| | | #include "ffmpeg/bridge/cvbridge.hpp" |
| | | |
| | |
| | | #include "worker/stream.hpp" |
| | | #include "worker/decoder.hpp" |
| | | #include "worker/rec.hpp" |
| | | |
| | | #include "CUDALERP.h" |
| | | |
| | | using namespace logif; |
| | | using namespace ffwrapper; |
| | |
| | | |
| | | Wrapper::Wrapper() |
| | | :input_url_("") |
| | | ,scale_w_(0) |
| | | ,scale_h_(0) |
| | | ,scale_f_(SWS_POINT) |
| | | ,audio_(false) |
| | | ,gb_(0) |
| | | ,cpu_(0) |
| | | ,run_dec_(false) |
| | | ,thread_(nullptr) |
| | | ,stop_stream_(false) |
| | | ,stream_(nullptr) |
| | | ,decoder_(nullptr) |
| | | ,rec_(new rec) |
| | | ,logit_(false) |
| | | { |
| | | makeTheWorld(); |
| | | } |
| | | |
| | | Wrapper::Wrapper(const char *logfile) |
| | | :input_url_("") |
| | | ,audio_(false) |
| | | ,gb_(0) |
| | | ,cpu_(0) |
| | | ,run_dec_(false) |
| | | ,thread_(nullptr) |
| | | ,stop_stream_(false) |
| | | ,stream_(nullptr) |
| | | ,decoder_(nullptr) |
| | | ,rec_(new rec) |
| | | ,logit_(true) |
| | | { |
| | | makeTheWorld(); |
| | | logif::CreateLogger(logfile, true); |
| | | } |
| | | |
| | | |
| | |
| | | { |
| | | logIt("WRAPPER EXCEPTION: ", e.what()); |
| | | } |
| | | if (logit_) |
| | | logif::DestroyLogger(); |
| | | } |
| | | |
| | | std::unique_ptr<ffwrapper::FormatIn> Wrapper::init_reader(const char* input){ |
| | |
| | | |
| | | void Wrapper::AudioSwitch(const bool a){ |
| | | audio_ = a; |
| | | if (stream_){ |
| | | stream_->AudioSwitch(a); |
| | | } |
| | | // if (stream_){ |
| | | // stream_->AudioSwitch(a); |
| | | // } |
| | | } |
| | | |
| | | void Wrapper::init_worker(ffwrapper::FormatIn *in){ |
| | | if (rec_->Loaded() && stream_ && decoder_) return; |
| | | stream_ = new stream(in, 3 * 25); |
| | | stream_->AudioSwitch(audio_); |
| | | |
| | | decoder_ = new decoder(in, scale_w_, scale_h_, scale_f_); |
| | | |
| | | stream_ = new stream(in, 3 * in->getFPS()); |
| | | // stream_->AudioSwitch(audio_); |
| | | |
| | | decoder_ = new decoder(in); |
| | | |
| | | rec_->Load(in); |
| | | if(fn_rec_lazy_) fn_rec_lazy_(in); |
| | | if(fn_rec_lazy_) { |
| | | fn_rec_lazy_(); |
| | | fn_rec_lazy_ = nullptr; |
| | | } |
| | | } |
| | | |
| | | void Wrapper::run_worker(ffwrapper::FormatIn *in, std::shared_ptr<ffwrapper::CodedData> data, int64_t &id){ |
| | | |
| | | if (stream_) stream_->SetPacket(data); |
| | | if (decoder_) decoder_->SetFrame(data, id); |
| | | if (gb_){ |
| | | AVPacket &pkt = data->getAVPacket(); |
| | | pkt.pts = pkt.dts = AV_NOPTS_VALUE; |
| | | } |
| | | if (stream_) stream_->SetPacket(data, id); |
| | | if (decoder_ && run_dec_) decoder_->SetFrame(data, id); |
| | | if (rec_->Loaded()) rec_->SetPacket(data, id); |
| | | } |
| | | |
| | |
| | | |
| | | if (!in) { |
| | | logIt("ERROR: init_reader! url: %s\n", input_url_.c_str()); |
| | | usleep(200000); |
| | | sleep(2); |
| | | continue; |
| | | } |
| | | |
| | | int wTime = 1000000.0 / in->getFPS() ; |
| | | wTime >>= 1; |
| | | logIt("WAIT TIME PER FRAME: %d", wTime); |
| | | |
| | | init_worker(in.get()); |
| | | |
| | | int64_t id = 0; |
| | | int64_t id = gb_ ? 0 : -1; |
| | | |
| | | while(!stop_stream_.load()){ |
| | | auto data(std::make_shared<CodedData>()); |
| | | if(!in->readPacket(data)){ |
| | | logIt("read packet error"); |
| | | if (in->readPacket(&data->getAVPacket()) != 0){ |
| | | logIt("read packet error, id: %lld", id); |
| | | break; |
| | | } |
| | | } |
| | | |
| | | if (in->notVideoAudio(&data->getAVPacket())){ |
| | | continue; |
| | | } |
| | | |
| | | if (!gb_ && id < 0){ |
| | | id++; |
| | | continue; |
| | | } |
| | | |
| | | run_worker(in.get(), data, id); |
| | | usleep(wTime); |
| | | |
| | | id++; |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | void Wrapper::BuildRecorder(const char* id, const char *output, const int mindur, const int maxdur, const bool audio){ |
| | | |
| | | bool a = audio; |
| | | if (gb_) a = false; |
| | | |
| | | if (rec_->Loaded()){ |
| | | rec_->NewRec(id, output, mindur, maxdur, audio); |
| | | rec_->NewRec(id, output, mindur, maxdur, a); |
| | | }else{ |
| | | std::string rid(id), dir(output); |
| | | fn_rec_lazy_ = |
| | | [=](ffwrapper::FormatIn *in){rec_->NewRec(rid.c_str(), dir.c_str(), mindur, maxdur, audio);}; |
| | | [=]{rec_->NewRec(rid.c_str(), dir.c_str(), mindur, maxdur, a);}; |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | ////////decoder |
| | | void Wrapper::BuildDecoder(){ |
| | | // use_decoder_ = true; |
| | | run_dec_ = true; |
| | | } |
| | | |
| | | void Wrapper::GetPicDecoder(unsigned char **data, int *w, int *h, int64_t *id){ |
| | | void Wrapper::GetPicDecoder(unsigned char **data, int *w, int *h, int *format, int *length, int64_t *id){ |
| | | if (decoder_){ |
| | | decoder_->GetFrame(data, w, h, id); |
| | | decoder_->GetFrame(data, w, h, format, length, id); |
| | | } |
| | | } |
| | | |
| | | |
| | | void Wrapper::GetPacket(unsigned char **pktData, int *size, int *key){ |
| | | if (stream_){ |
| | | stream_->GetPacket(pktData, size, key); |
| | |
| | | } // end class wrapper |
| | | /////////////////////////////////////////////////////////// |
| | | ///single decode or encoder |
| | | ////// decoder |
| | | ////// decoder |
| | | |
| | | #include "ffmpeg/data/FrameData.hpp" |
| | | |
| | | // return val: -1 open error; -2, find stream error; -3, converter create |
| | | namespace cffmpeg_wrap{ // start test functions |
| | | uint8_t* DecodeJPEG(const char *file, int *w, int *h){ |
| | | uint8_t* Decode(const char *file, const int gb, int *w, int *h){ |
| | | VideoProp prop; |
| | | prop.url_ = file; |
| | | prop.gpu_acc_ = false; |
| | | |
| | | std::unique_ptr<FormatIn> in(new FormatIn(prop.gpuAccl())); |
| | | int flag = in->open(file, NULL); |
| | | |
| | | int flag = -1; |
| | | if (gb){ |
| | | flag = in->openGb28181(file, NULL); |
| | | }else{ |
| | | flag = in->open(file, NULL); |
| | | } |
| | | |
| | | std::unique_ptr<cvbridge> bridge_(nullptr); |
| | | |
| | | if(flag == 0){ |
| | | if(!in->findStreamInfo(NULL)){ |
| | | logIt("yolo can't find video stream\n"); |
| | | *w = *h = -2; |
| | | return NULL; |
| | | } |
| | | auto flag = in->openCodec(AVMEDIA_TYPE_VIDEO, NULL); |
| | | auto flag = in->openCodec(NULL); |
| | | if(flag){ |
| | | auto dec_ctx = in->getCodecContext(); |
| | | |
| | |
| | | |
| | | }else{ |
| | | logIt("FormatIn openCodec Failed!"); |
| | | *w = *h = -3; |
| | | return NULL; |
| | | } |
| | | }else{ |
| | | logIt("open %s error", file); |
| | | *w = *h = -1; |
| | | return NULL; |
| | | } |
| | | |
| | | auto data(std::make_shared<CodedData>()); |
| | | if(!in->readPacket(data)){ |
| | | logIt("read packet error"); |
| | | return NULL; |
| | | uint8_t *pic = NULL; |
| | | *w = *h = 0; |
| | | |
| | | int tryTime = 0; |
| | | while (tryTime++ < 100){ |
| | | |
| | | auto data(std::make_shared<CodedData>()); |
| | | if (in->readPacket(&data->getAVPacket()) == 0){ |
| | | |
| | | auto frame(std::make_shared<FrameData>()); |
| | | AVFrame *frm = frame->getAVFrame(); |
| | | if(in->decode(frm, &data->getAVPacket()) == 0){ |
| | | *w = frm->width; |
| | | *h = frm->height; |
| | | pic = (unsigned char*)malloc(frm->width * frm->height * 3); |
| | | bridge_->copyPicture(pic, frm); |
| | | break; |
| | | } |
| | | } |
| | | } |
| | | auto frame(std::make_shared<FrameData>()); |
| | | auto ret = in->decode(frame, data); |
| | | if(ret == 1){ |
| | | AVFrame *frm = frame->getAVFrame(); |
| | | *w = frm->width; |
| | | *h = frm->height; |
| | | unsigned char *data = (unsigned char*)malloc(frm->width * frm->height * 3); |
| | | bridge_->copyPicture(data, frm); |
| | | return data; |
| | | } |
| | | return NULL; |
| | | |
| | | return pic; |
| | | } |
| | | /////// for encoder |
| | | typedef struct _PicEncoder{ |
| | |
| | | } |
| | | |
| | | AVFrame *frame = e->bridge->getAVFrame(in, w, h); |
| | | auto data(std::make_shared<CodedData>()); |
| | | AVPacket *pkt = av_packet_alloc(); |
| | | |
| | | const int flag = e->enc->encode(data, frame); |
| | | if(flag > 0){ |
| | | auto pkt = data->getAVPacket(); |
| | | auto flag = e->enc->encode(pkt, frame); |
| | | if(flag == 0){ |
| | | int extradata_size = ctx->extradata_size; |
| | | uint8_t *extra = ctx->extradata; |
| | | |
| | | *key = pkt.flags & AV_PKT_FLAG_KEY; |
| | | *key = pkt->flags & AV_PKT_FLAG_KEY; |
| | | if(!(*key)){ |
| | | extradata_size = 0; |
| | | } |
| | | *size = pkt.size + extradata_size; |
| | | *size = pkt->size + extradata_size; |
| | | *out = (unsigned char *)malloc(*size); |
| | | |
| | | memcpy(*out, extra, extradata_size); |
| | | memcpy(*out + extradata_size, pkt.data, pkt.size); |
| | | memcpy(*out + extradata_size, pkt->data, pkt->size); |
| | | |
| | | }else{ |
| | | logIt("encode error or need more packet\n"); |
| | | } |
| | | |
| | | av_packet_free(&pkt); |
| | | av_frame_free(&frame); |
| | | |
| | | return flag; |
| | | } |
| | | |
| | | /////////////////////////////////////////////////////////// |
| | | typedef struct _conv |
| | | { |
| | | int srcW; |
| | | int srcH; |
| | | int srcF; |
| | | int dstW; |
| | | int dstH; |
| | | cvbridge *b; |
| | | }Conv; |
| | | |
| | | void *CreateConvertor(const int srcW, const int srcH, const int srcFormat, |
| | | const int dstW, const int dstH, const int flag){ |
| | | AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24; |
| | | auto bridge = new cvbridge( |
| | | srcW, srcH, srcFormat, |
| | | dstW, dstH, pix_fmt, flag); |
| | | if (!bridge) return NULL; |
| | | |
| | | Conv *c = (Conv*)malloc(sizeof(Conv)); |
| | | c->b = bridge; |
| | | c->dstW = dstW; |
| | | c->dstH = dstH; |
| | | c->srcW = srcW; |
| | | c->srcH = srcH; |
| | | c->srcF = srcFormat; |
| | | |
| | | return c; |
| | | } |
| | | |
| | | uint8_t *Convert(void *h, uint8_t *src){ |
| | | Conv *c = (Conv*)h; |
| | | |
| | | auto b = c->b; |
| | | |
| | | AVFrame *tmp_frm = av_frame_alloc(); |
| | | tmp_frm->format = (AVPixelFormat)c->srcF; |
| | | tmp_frm->width = c->srcW; |
| | | tmp_frm->height = c->srcH; |
| | | |
| | | //create a AVPicture frame from the opencv Mat input image |
| | | int ret = avpicture_fill((AVPicture *)tmp_frm, |
| | | (uint8_t *)src, |
| | | (AVPixelFormat)tmp_frm->format, |
| | | tmp_frm->width, |
| | | tmp_frm->height); |
| | | |
| | | unsigned char *picData = NULL; |
| | | if (ret > 0){ |
| | | picData = (unsigned char*)malloc(c->dstW * c->dstH * 3); |
| | | b->copyPicture(picData, tmp_frm); |
| | | } |
| | | |
| | | av_frame_free(&tmp_frm); |
| | | |
| | | return picData; |
| | | } |
| | | |
| | | void DestoryConvertor(void *h){ |
| | | Conv *c = (Conv*)h; |
| | | delete c->b; |
| | | free(c); |
| | | } |
| | | |
| | | |
| | | uint8_t* ConvertYUV2BGR(uint8_t *src, const int w, const int h, const int dst_w, const int dst_h, int *length){ |
| | | return NULL; |
| | | |
| | | // int oldw = w, oldh = h, neww = dst_w, newh = dst_h; |
| | | // // setting cache and shared modes |
| | | // cudaDeviceSetCacheConfig(cudaFuncCachePreferL1); |
| | | // cudaDeviceSetSharedMemConfig(cudaSharedMemBankSizeFourByte); |
| | | |
| | | // // allocating and transferring image and binding to texture object |
| | | // cudaChannelFormatDesc chandesc_img = cudaCreateChannelDesc(8, 0, 0, 0, cudaChannelFormatKindUnsigned); |
| | | // cudaArray* d_img_arr; |
| | | // cudaMallocArray(&d_img_arr, &chandesc_img, oldw, oldh, cudaArrayTextureGather); |
| | | // cudaMemcpyToArray(d_img_arr, 0, 0, image, oldh * oldw, cudaMemcpyHostToDevice); |
| | | // struct cudaResourceDesc resdesc_img; |
| | | // memset(&resdesc_img, 0, sizeof(resdesc_img)); |
| | | // resdesc_img.resType = cudaResourceTypeArray; |
| | | // resdesc_img.res.array.array = d_img_arr; |
| | | // struct cudaTextureDesc texdesc_img; |
| | | // memset(&texdesc_img, 0, sizeof(texdesc_img)); |
| | | // texdesc_img.addressMode[0] = cudaAddressModeClamp; |
| | | // texdesc_img.addressMode[1] = cudaAddressModeClamp; |
| | | // texdesc_img.readMode = cudaReadModeNormalizedFloat; |
| | | // texdesc_img.filterMode = cudaFilterModePoint; |
| | | // texdesc_img.normalizedCoords = 0; |
| | | // cudaTextureObject_t d_img_tex = 0; |
| | | // cudaCreateTextureObject(&d_img_tex, &resdesc_img, &texdesc_img, nullptr); |
| | | |
| | | // uint8_t* d_out = nullptr; |
| | | // cudaMalloc(&d_out, total); |
| | | |
| | | // for (int i = 0; i < warmups; ++i) CUDALERP(d_img_tex, oldw, oldh, d_out, neww, newh); |
| | | // auto start = high_resolution_clock::now(); |
| | | // for (int i = 0; i < runs; ++i) CUDALERP(d_img_tex, oldw, oldh, d_out, neww, newh); |
| | | // auto end = high_resolution_clock::now(); |
| | | // auto sum = (end - start) / runs; |
| | | |
| | | // auto h_out = new uint8_t[neww * newh]; |
| | | // cudaMemcpy(h_out, d_out, total, cudaMemcpyDeviceToHost); |
| | | } |
| | | } |
| | | |