| | |
| | | #include "CameraWrapper.h" |
| | | #include "CameraWrapper.h"
|
| | | #include <logger.h> |
| | | #include <Logger/src/logger.hpp> |
| | | #include <MaterialBuffer.h>
|
| | | #include <PL_Gainer.h> |
| | | #include <PL_Paint.h> |
| | | #include <PL_Scale.h> |
| | | #include <MaterialBuffer.h> |
| | | #include <PL_V4L2Source.h> |
| | | #include <PL_BlockGrouping.h> |
| | | #include <PL_ColorConv.h> |
| | |
|
| | | CameraWrapper::~CameraWrapper() |
| | | { |
| | | stop(); |
| | | delete pipeLineDecoderDetector; |
| | | delete pipeLineAnalyzer; |
| | | delete pipeLineRender; |
| | | delete pipeLineAnalyzer;
|
| | | delete pipeLineDecoderDetector;
|
| | | } |
| | | |
| | | bool CameraWrapper::start() |
| | | { |
| | | LOG_INFO << "CameraWrapper::start" << LOG_ENDL; |
| | | |
| | | pthread_mutex_init(&live_daemon_mut, NULL);
|
| | |
|
| | | running = true; |
| | | int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this); |
| | | if(ret != 0) |
| | | { |
| | | LOGP(ERROR, "pthread_create: %s/n", strerror(ret)); |
| | | LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
|
| | | running = false;
|
| | | return false;
|
| | | }
|
| | |
|
| | | ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOGP(ERROR, "pthread_create live_daemon_thid: %s/n", strerror(ret));
|
| | | running = false; |
| | | return false; |
| | | } |
| | |
| | | |
| | | running = false; |
| | | pthread_join(decoder_thid, NULL); |
| | | pthread_join(live_daemon_thid, NULL);
|
| | |
|
| | | pthread_mutex_destroy(&live_daemon_mut);
|
| | | } |
| | | |
| | | static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData) |
| | |
| | | PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint); |
| | | PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source); |
| | | PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping); |
| | | bool ret = false; |
| | | |
| | | { |
| | | PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient"); |
| | | ret = rtspClient->init(&rtspConfig); |
| | | bool ret = initPl_DecoderPl();
|
| | | if (!ret) |
| | | { |
| | | LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL; |
| | | LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
|
| | | return false; |
| | | } |
| | | |
| | | PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder"); |
| | | ret = amcDecoder->init(&amcdConfig); |
| | | if (!ret) |
| | | { |
| | | LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL; |
| | | return false; |
| | | } |
| | | |
| | | //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source"); |
| | | //PL_V4L2Source_Config v4l2Config; |
| | | //v4l2Config.width = 640; |
| | | //v4l2Config.height = 480; |
| | | //ret = v4l2Source->init(&v4l2Config); |
| | | //if (!ret) { |
| | | // LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL; |
| | | // return false; |
| | | //} |
| | | |
| | | #ifdef USE_ST_SDK |
| | | PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack"); |
| | | ret = sfTrack->init(&sftConfig); |
| | | if (!ret) { |
| | | LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL; |
| | | return false; |
| | | } |
| | | #endif |
| | | } |
| | | |
| | | { |
| | |
| | | } |
| | | |
| | | PL_Paint_Config plPaintCfg; |
| | | plPaintCfg.fontPath = "data/msyh.ttc"; |
| | | plPaintCfg.fontPath = "/data/msyh.ttc";
|
| | | plPaintCfg.plplCtx = &plplContext; |
| | | PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint"); |
| | | ret = plPaint->init(&plPaintCfg); |
| | |
| | | } |
| | | |
| | | return true; |
| | | }
|
| | |
|
| | | bool CameraWrapper::initPl_DecoderPl()
|
| | | {
|
| | | bool ret = false;
|
| | |
|
| | | PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
|
| | | ret = rtspClient->init(&rtspConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
|
| | | ret = amcDecoder->init(&amcdConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
|
| | | //PL_V4L2Source_Config v4l2Config;
|
| | | //v4l2Config.width = 640;
|
| | | //v4l2Config.height = 480;
|
| | | //ret = v4l2Source->init(&v4l2Config);
|
| | | //if (!ret) {
|
| | | // LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | #ifdef USE_ST_SDK
|
| | | PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
|
| | | ret = sfTrack->init(&sftConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | #endif
|
| | |
|
| | | return ret;
|
| | | }
|
| | |
|
| | | void CameraWrapper::lockFace()
|
| | | {
|
| | | faceCacheLocked = true;
|
| | | }
|
| | |
|
| | | void CameraWrapper::releaseFace()
|
| | | {
|
| | | faceCacheLocked = false;
|
| | | }
|
| | |
|
| | | void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
|
| | | {
|
| | | if (faceLabels.size() > 32)
|
| | | faceLabels.clear();
|
| | |
|
| | | faceLabels.insert(std::make_pair(st_track_id, label));
|
| | | } |
| | | |
| | | static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount) |
| | |
| | | cameraWrapper.setFaceLabel(0, L"会员vi"); |
| | | cameraWrapper.setFaceLabel(1, L"会员ab"); |
| | | cameraWrapper.setFaceLabel(3, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(4, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(5, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(6, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(7, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(8, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(9, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(10, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(11, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(12, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(13, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(14, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(15, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(16, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(17, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(18, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(19, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(20, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(21, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(22, L"会员wr"); |
| | | |
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR); |
| | | cameraWrapper.plplContext.params.push_back('F'); |
| | |
| | | cameraWrapper.plplContext.params.push_back(255); |
| | | cameraWrapper.plplContext.params.push_back(255); |
| | | cameraWrapper.plplContext.params.push_back(255); |
| | | // cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT); |
| | | // cameraWrapper.plplContext.params.push_back(20); |
| | | // cameraWrapper.plplContext.params.push_back(20); |
| | | // cameraWrapper.plplContext.params.push_back(100); |
| | | // cameraWrapper.plplContext.params.push_back(100); |
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
|
| | | cameraWrapper.plplContext.params.push_back(20);
|
| | | cameraWrapper.plplContext.params.push_back(20);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | }
|
| | |
|
| | | //struct ScopeMutexLocker
|
| | | //{
|
| | | // pthread_mutex_t* mut;
|
| | | // ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
|
| | | // ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
|
| | | // //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
|
| | | //};
|
| | |
|
| | | void cw_elem_destory_func(PipeLineElem* elem)
|
| | | {
|
| | | delete elem;
|
| | | }
|
| | |
|
| | | bool CameraWrapper::resetPl()
|
| | | {
|
| | | pipeLineDecoderDetector->finit(cw_elem_destory_func);
|
| | | sleep(2);
|
| | | return initPl_DecoderPl();
|
| | | } |
| | | |
| | | /*static*/ void* CameraWrapper::decoder_thd(void* arg) |
| | | { |
| | | LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL; |
| | | |
| | | CameraWrapper& cameraWrapper = *(CameraWrapper*)arg; |
| | | |
| | | while(cameraWrapper.running) |
| | | { |
| | | if (cameraWrapper.killed)
|
| | | {
|
| | | LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
|
| | | cameraWrapper.resetPl();
|
| | | cameraWrapper.killed = false;
|
| | | sleep(2);
|
| | | }
|
| | |
|
| | | PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe(); |
| | | bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last); |
| | | LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL; |
| | | //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
|
| | | |
| | | if (!ret) |
| | | continue; |
| | |
| | | ret = last->gain(pm); |
| | | if (!ret) |
| | | continue; |
| | |
|
| | | cameraWrapper.lastAliveTime = time(nullptr);
|
| | | |
| | | if (! cameraWrapper.faceCacheLocked) |
| | | { |
| | |
| | | LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL; |
| | | } |
| | | |
| | | void CameraWrapper::lockFace() |
| | | /*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
|
| | | { |
| | | faceCacheLocked = true; |
| | | LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
|
| | | CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
|
| | |
|
| | | while(cameraWrapper.running)
|
| | | {
|
| | | sleep(10);
|
| | |
|
| | | if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
|
| | | {
|
| | | PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
|
| | | rtspClient->kill();
|
| | |
|
| | | cameraWrapper.killed = true;
|
| | | } |
| | | |
| | | void CameraWrapper::releaseFace() |
| | | { |
| | | faceCacheLocked = false; |
| | | } |
| | | |
| | | void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label) |
| | | { |
| | | if (faceLabels.size() > 32) |
| | | faceLabels.clear(); |
| | | |
| | | faceLabels.insert(std::make_pair(st_track_id, label)); |
| | | } |