| | |
| | | #include <PL_BlockGrouping.h>
|
| | | #include <PL_ColorConv.h>
|
| | | #include <PL_AndroidMediaCodecEncoder.h>
|
| | | #include <PL_RTSPServer.h>
|
| | | #include <PL_RTSPServer2.h>
|
| | | CameraWrapper::~CameraWrapper()
|
| | | {
|
| | | stop();
|
| | |
| | | PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
|
| | | PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
|
| | | PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
|
| | | PipeLine::register_global_elem_creator("PL_RTSPServer", create_PL_RTSPServer);
|
| | | PipeLine::register_global_elem_creator("PL_RTSPServer2", create_PL_RTSPServer2);
|
| | |
|
| | | bool ret = initPl_DecoderPl();
|
| | | if (!ret)
|
| | |
| | |
|
| | | PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
|
| | | plScaleCfg.toWidth = 640;
|
| | | plScaleCfg.toHeight = 480;
|
| | | plScaleCfg.toHeight = 480;//#todo config
|
| | | ret = plScale->init(&plScaleCfg);
|
| | | if (!ret)
|
| | | {
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | PL_AndroidMediaCodecEncoder_Config amceCfg;
|
| | | amceCfg.ak_bit_rate = 5000000; // 512KB
|
| | | amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
|
| | | amceCfg.ak_frame_rate = 25;
|
| | | amceCfg.ak_height = 480;
|
| | | amceCfg.ak_i_frame_interval = 20;
|
| | | amceCfg.ak_mime = "video/avc";
|
| | | amceCfg.ak_width = 640;
|
| | | PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
|
| | | ret = plAMCE->init(&amceCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | //PL_ColorConv_Config PLColorConvCfg;
|
| | | //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
| | | //ret = plColorConv->init(&PLColorConvCfg);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | PL_RTSPServer* plRtspServer = (PL_RTSPServer*)pipeLineRender->push_elem("PL_RTSPServer");
|
| | | RTSPServerConfig rtspServerConfig;
|
| | | //rtspServerConfig.syncDeliverFrame = false;
|
| | | PL_Paint_Config plPaintCfg;
|
| | | plPaintCfg.fontPath = fontPath;
|
| | | plPaintCfg.plplCtx = &plplContext;
|
| | | PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
|
| | | ret = plPaint->init(&plPaintCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
|
| | | //ret = asvRender->init(&asvrConfig);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | PL_AndroidMediaCodecEncoder_Config amceCfg;
|
| | | amceCfg.ak_bit_rate = 512 * 1024 * 8; // 512KB
|
| | | amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
|
| | | amceCfg.ak_frame_rate = 40;
|
| | | amceCfg.ak_height = 480;
|
| | | amceCfg.ak_i_frame_interval = 1;
|
| | | amceCfg.ak_mime = "video/avc";
|
| | | amceCfg.ak_width = 640;
|
| | | amceCfg.codecProfileLevel.profile = Android_CodecProfileLevel::AVCProfileBaseline;
|
| | | amceCfg.codecProfileLevel.level = Android_CodecProfileLevel::AVCLevel1;
|
| | | PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
|
| | | ret = plAMCE->init(&amceCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | PL_RTSPServer2* plRtspServer = (PL_RTSPServer2*)pipeLineRender->push_elem("PL_RTSPServer2");
|
| | | RTSPServer2Config rtspServerConfig;
|
| | | //rtspServerConfig.payBlockFullQueue = true;
|
| | | ret = plRtspServer->init(&rtspServerConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plRtspServer init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | //PL_ColorConv_Config PLColorConvCfg;
|
| | | //PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
| | | //ret = plColorConv->init(&PLColorConvCfg);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | //PL_Paint_Config plPaintCfg;
|
| | | //plPaintCfg.fontPath = fontPath;
|
| | | //plPaintCfg.plplCtx = &plplContext;
|
| | | //PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
|
| | | //ret = plPaint->init(&plPaintCfg);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
|
| | | //ret = asvRender->init(&asvrConfig);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | | }
|
| | | }
|
| | |
|
| | | return true;
|
| | | }
|
| | |
| | | //}
|
| | |
|
| | | #ifdef USE_ST_SDK
|
| | | // PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
|
| | | // ret = sfTrack->init(&sftConfig);
|
| | | // if (!ret)
|
| | | // {
|
| | | // LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
|
| | | // return false;
|
| | | // }
|
| | | //PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo use multi
|
| | | //ret = sfTrack->init(&sftConfig);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | | PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
|
| | | ret = sfTrack->init(&sftConfig);
|
| | | if (!ret)
|
| | |
| | |
|
| | | bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
|
| | | {
|
| | | CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
|
| | | if (cameraWrapper.faceCacheLocked)
|
| | | return false;
|
| | |
|
| | | int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
|
| | | if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
|
| | | return false;
|
| | | cameraWrapper.faceCache.cachePm(*pm);
|
| | | //remote call start, 为了保证通用性,未将以下步骤封入RtspFaceDetectClient
|
| | | RtspFaceDetectClient &client = getRtspFaceDetectClient();
|
| | | auto request = client.fireFaceCountListenerRequest();
|
| | | request.setCameraIndex(cameraWrapper.cameraIdx);
|
| | | request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm));
|
| | | auto sendAct = request.send();
|
| | | sendAct.wait(client.getWaitScope());
|
| | | //remote call end
|
| | | return false;
|
| | | }
|
| | |
|
| | | void test_paint(CameraWrapper& cameraWrapper)
|
| | |
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | }
|
| | |
|
| | | //struct ScopeMutexLocker
|
| | | //{
|
| | | // pthread_mutex_t* mut;
|
| | | // ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
|
| | | // ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
|
| | | // //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
|
| | | //};
|
| | |
|
| | | void cw_elem_destory_func(PipeLineElem* elem)
|
| | | {
|
| | |
| | | {
|
| | | sleep(10);
|
| | |
|
| | | continue;//#todo
|
| | | if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
|
| | | {
|
| | | PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
|