// // Created by ps on 19-1-10. // #include #include #include #include "VideoCaptureElementWithRtp.h" #include //std::string BASICGB28181::VideoCaptureElementWithRtp::m_chanPubID; //BASICGB28181::FFmpegDecoderJPG BASICGB28181::VideoCaptureElementWithRtp::m_fFmpegDecoderJPG; BASICGB28181::VideoCaptureElementWithRtp::VideoCaptureElementWithRtp(std::string &chanPubID, int fps, int streamTransType, int gpuIdx, RtspAnalysManager *manager) : m_chanPubID(chanPubID), m_fps(fps), m_running(false), m_waitSignal(false), m_streamTransType(streamTransType), m_gpuIdx(gpuIdx), m_userdata((long) this), m_pManager(manager) { // m_chanPubID = chanPubID; m_cutPath = appPref.getStringData("user.loop.absolute.path"); assert(!m_cutPath.empty()); } BASICGB28181::VideoCaptureElementWithRtp::~VideoCaptureElementWithRtp() { m_fFmpegDecoderJPG.stopThd(); int loop = 0; while ((loop++ < 100) && m_fFmpegDecoderJPG.getRunning()) { m_fFmpegDecoderJPG.stopThd(); usleep(100000); } } bool BASICGB28181::VideoCaptureElementWithRtp::startRtpStream(int streamTransType) { if(!m_waitSignalrunning) { DBG("std::thread waitSignalAndEmit create New!!"); //等待下层ffmpeg将rtp包解码成为图片后触发信号,然后触发当前类的submit std::thread waitSignalAndEmit([&](BASICGB28181::VideoCaptureElementWithRtp *p_this) { p_this->m_waitSignal = true; p_this->m_waitSignalrunning = true; //循环,由成员变量来维护这个线程的运行状态 while (p_this->m_waitSignal) { //#TODO wait test #ifdef TestCode DBG("waitSignal(\"DecoderImageOK\") begin"); #endif //等待信号触发 // DBG("waitSignal before: " << p_this->m_chanPubID); gSignalLock.waitSignal(p_this->m_chanPubID + "DecoderImageOK"); // DBG("waitSignal: " << p_this->m_chanPubID); #ifdef TestCode DBG("waitSignal(\"DecoderImageOK\") after"); #endif // 从ffmpeg解码类中获取图片 p_this->m_fFmpegDecoderJPG.getImage().copyTo(p_this->m_image); /****debug*****///todo if(p_this->m_image.empty()) { ERR("camID:" << p_this->m_chanPubID); continue; } // else { // static int count =0; // count ++; // if(count > 100){ // ERR("camID:" << p_this->m_chanPubID << " cols:" << p_this->m_image.cols <<\ // " rows:" << p_this->m_image.rows << " channels:" << p_this->m_image.channels()); // count = 0; // } // } { /****录像模块代码*****/ p_this->m_picCount++; //几张选一张放入Redis if (p_this->m_picCount % m_nPicsPickOne != 0) { continue; } else { p_this->m_picCount.store(0); } cv::Mat copyMat; std::string imageName = p_this->m_fFmpegDecoderJPG.GetImageName(); p_this->m_image.copyTo(copyMat); p_this->m_pManager->SaveImageToRedis(p_this->m_chanPubID, imageName, copyMat); } p_this->submit(); } p_this->m_waitSignalrunning = false; INFO("waitSignalAndEmit is exit..."); }, this); waitSignalAndEmit.detach(); } TryCath( //--------------国标设备或则国标下级平台必须支持GB28181-2016---------------------------------------------- //解码线程,发起点播请求,启动ffmpeg解码模块 std::thread videoCaptureElementThd([&](VideoCaptureElementWithRtp *p_this, int streamType) { DBG("videoCaptureElementThd start..."); StreamTransType_E etype; switch (streamType) { case 1: { // 1.实时视频请求 UDP请求实时视频 etype = E_STREAM_TRANS_UDP; break; } case 2: { // 2.实时视频请求 TCP主动请求实时视频 etype = E_STREAM_TRANS_TCPACTIVE; break; } case 3: { // 3.实时视频请求 TCP被动请求实时视频 etype = E_STREAM_TRANS_TCPPASSIVE; break; } default: etype = E_STREAM_TRANS_UDP; } DBG("C_RealVideoStart start... m_chanPubID is " << p_this->m_chanPubID << " etype is " << etype << " m_userdata is " << m_userdata); //开始点播视频 long lrealhandle = C_RealVideoStart(const_cast(p_this->m_chanPubID.c_str()), etype, p_this->streamcallback, m_userdata); if (lrealhandle != -1) { //点播成功 DBG(p_this->m_chanPubID << " C_RealVideoStart ok ... type is " << etype); p_this->m_running = true; //启动ffmpeg解码模块 p_this->m_fFmpegDecoderJPG.startThd(p_this->m_chanPubID, p_this->m_fps, p_this->m_gpuIdx); usleep(1000000); //阻塞线程,等待外部触发关闭点播 while (p_this->m_running) { if(p_this->m_fFmpegDecoderJPG.getRunning()) { usleep(300000); } else { // 根据reopenTime判断是否需要重启 if (reopenTime < 0) { p_this->m_running = false; stop(); INFO("grabFrame faild, element stopping"); break; } else { //todo 业务死锁 usleep((6 - reopenTime--) * 1000000); INFO("grabFrame faild, try reopen video: "); //关闭ffmpeg解码模块 p_this->m_fFmpegDecoderJPG.stopThd(); //启动ffmpeg解码模块 p_this->m_fFmpegDecoderJPG.startThd(p_this->m_chanPubID, p_this->m_fps, p_this->m_gpuIdx); continue; } } } DBG("videoCaptureElementThd stop ..."); //停止点播 C_RealVideoStop(lrealhandle); //将waitSignalAndEmit 线程退出 p_this->m_waitSignal = false; DBG("videoCaptureElementThd stop ok..."); } else { //点播失败 p_this->m_waitSignal = false; p_this->m_running = false; //关闭ffmpeg解码模块 p_this->m_fFmpegDecoderJPG.stopThd(); ERR(p_this->m_chanPubID << " C_RealVideoStart is error lrealhandle is " << lrealhandle); p_this->startRtpStream(p_this->m_streamTransType); } }, this, streamTransType); videoCaptureElementThd.detach(); ); return true; } bool BASICGB28181::VideoCaptureElementWithRtp::stopRtpStream() { TryCath( m_running = false; ); return true; } bool BASICGB28181::VideoCaptureElementWithRtp::getRunning() { return m_running; } void BASICGB28181::VideoCaptureElementWithRtp::streamcallback(long handle, int datatype, int frametype, unsigned char *data, int datalen, long userdata) { #ifdef TestCode ClockTimer test("streamcallback"); #endif BASICGB28181::VideoCaptureElementWithRtp *p_this = (BASICGB28181::VideoCaptureElementWithRtp *) userdata; #ifdef TestCode { // FILE *fp11 = NULL; // if (!fp11) { //// printf("fp11 handle:%ld, datatype:%d, datalen:%d, userdata:%ld\n", handle, datatype, datalen, userdata); // std::string fileName(p_this->m_chanPubID); // fileName.append(".mp4"); // fp11 = fopen(fileName.c_str(), "w+"); // } // fwrite(data, sizeof(char), datalen, fp11); } #endif //将底层组好的rtp包,存入ffmpeg中的缓存队列 CHKDBG(p_this->m_fFmpegDecoderJPG.pushInfo(data, datalen, p_this->m_chanPubID), true, "pushInfo is error !! handle is " << handle << " datatype is " << datatype << " frametype is " << frametype); } void BASICGB28181::VideoCaptureElementWithRtp::threadFunc() { // if ((!m_running) || (!m_waitSignal)) { //// 根据reopenTime判断是否需要重启 // if (reopenTime < 0) { // stop(); // INFO("grabFrame faild, element stopping"); // return; // } else { // //todo 业务死锁 // usleep(reopenTime * 1000); // INFO("grabFrame faild, try reopen video: "); // startRtpStream(m_streamTransType); // return; // } // } fireConnectors(); } void BASICGB28181::VideoCaptureElementWithRtp::threadInitial() { startRtpStream(m_streamTransType); } void BASICGB28181::VideoCaptureElementWithRtp::threadClosing() { m_waitSignal = false; stopRtpStream(); } cv::Mat BASICGB28181::VideoCaptureElementWithRtp::getImage() { return m_image; } void BASICGB28181::VideoCaptureElementWithRtp::SetVideoMinMaxSeconds(const int minSeconds, const int maxSeconds) { m_fFmpegDecoderJPG.SetMinMaxVideoSeconds(minSeconds, maxSeconds); } void BASICGB28181::VideoCaptureElementWithRtp::SaveVideo(const std::string &strImageName) { INFO("SaveVideo: " << strImageName); std::string strTimeStamp = AppUtil::getTimeUSecString(); std::string strPath = MakeDir(strTimeStamp); m_fFmpegDecoderJPG.SaveVideoByImageName(strPath, strImageName); } std::string BASICGB28181::VideoCaptureElementWithRtp::MakeDir(const std::string &timeStamp) { std::string t_FilePath = m_cutPath; if (t_FilePath.back() != '/') { t_FilePath.push_back('/'); } char buf[24]; QDateTime dt = QDateTime::fromString(QString::fromStdString(timeStamp), "yyyy-MM-dd hh:mm:ss:zzz"); std::string t_strTime = dt.toString("yyyyMMddhh").toStdString(); // DBG("t_strTime="<