copy from VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
copy to VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperClient.cpp
File was copied from VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp |
| | |
| | | #include "CameraWrapper.h"
|
| | | #include "CameraWrapperClient.h"
|
| | | #include <logger.h>
|
| | | #include <Logger/src/logger.hpp>
|
| | | #include <MaterialBuffer.h>
|
| | |
| | | #include <PL_V4L2Source.h>
|
| | | #include <PL_BlockGrouping.h>
|
| | | #include <PL_ColorConv.h>
|
| | | #include <PL_AndroidMediaCodecEncoder.h>
|
| | |
|
| | | CameraWrapper::~CameraWrapper()
|
| | | {
|
| | | stop();
|
| | | stop();
|
| | | delete pipeLineRender;
|
| | | delete pipeLineAnalyzer;
|
| | | delete pipeLineDecoderDetector;
|
| | |
| | |
|
| | | bool CameraWrapper::start()
|
| | | {
|
| | | LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
|
| | | LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
|
| | |
|
| | | pthread_mutex_init(&live_daemon_mut, NULL);
|
| | |
|
| | | running = true;
|
| | | int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
|
| | | running = false;
|
| | | return false;
|
| | | }
|
| | | int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
|
| | | running = false;
|
| | | return false;
|
| | | }
|
| | |
|
| | | //struct sched_param param;
|
| | | //int policy = -1;
|
| | | //int rc = pthread_getschedparam(decoder_thid, &policy, ¶m);
|
| | | //LOGP(INFO, "pthread_getschedparam, rc=%d, policy=%d, priority=%d", rc, policy, param.__sched_priority);
|
| | | //policy = SCHED_RR;
|
| | | //param.__sched_priority = 1;
|
| | | //rc = pthread_setschedparam(decoder_thid, policy, ¶m);
|
| | | //LOGP(INFO, "pthread_getschedparam, rc=%d, policy=%d, priority=%d", rc, policy, param.__sched_priority);
|
| | |
|
| | | ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
|
| | | if(ret != 0)
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | return true;
|
| | | return true;
|
| | | }
|
| | |
|
| | | void CameraWrapper::stop()
|
| | | {
|
| | | LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
|
| | | |
| | | if (!running)
|
| | | return;
|
| | | LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
|
| | |
|
| | | running = false;
|
| | | if (!running)
|
| | | return;
|
| | |
|
| | | running = false;
|
| | | pthread_join(decoder_thid, NULL);
|
| | | pthread_join(live_daemon_thid, NULL);
|
| | |
|
| | |
| | |
|
| | | for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
|
| | | {
|
| | | if (!iter->test_face_in_cone(35.0f, 35.0f, 35.0f))
|
| | | continue;
|
| | |
|
| | | RectWrapper rw;
|
| | | rw.rect = iter->rect;
|
| | | rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
|
| | |
| | | PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
|
| | | PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
|
| | | PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
|
| | | //PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
|
| | |
|
| | | bool ret = initPl_DecoderPl();
|
| | | if (!ret)
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
|
| | | ret = plScale->init(&plScaleCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
|
| | | plScaleCfg.toWidth = 640;
|
| | | plScaleCfg.toHeight = 480;
|
| | | ret = plScale->init(&plScaleCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | PL_ColorConv_Config PLColorConvCfg;
|
| | | PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
| | | ret = plColorConv->init(&PLColorConvCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | //PL_AndroidMediaCodecEncoder_Config amceCfg;
|
| | | //amceCfg.ak_bit_rate = 5000000; // 512KB
|
| | | //amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
|
| | | //amceCfg.ak_frame_rate = 25;
|
| | | //amceCfg.ak_height = 480;
|
| | | //amceCfg.ak_i_frame_interval = 20;
|
| | | //amceCfg.ak_mime = "video/avc";
|
| | | //amceCfg.ak_width = 640;
|
| | | //PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
|
| | | //ret = plAMCE->init(&amceCfg);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | PL_ColorConv_Config PLColorConvCfg;
|
| | | PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
| | | ret = plColorConv->init(&PLColorConvCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | PL_Paint_Config plPaintCfg;
|
| | | plPaintCfg.fontPath = fontPath;
|
| | | plPaintCfg.fontPath = fontPath;
|
| | | plPaintCfg.plplCtx = &plplContext;
|
| | | PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
|
| | | ret = plPaint->init(&plPaintCfg);
|
| | |
| | | }
|
| | | }
|
| | |
|
| | | return true;
|
| | | return true;
|
| | | }
|
| | |
|
| | | bool CameraWrapper::initPl_DecoderPl()
|
| | |
| | | {
|
| | | plplContext.cmds.push_back(PLPLC_COLOR);
|
| | | plplContext.params.push_back('F');
|
| | | if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
|
| | | if (iter->test_face_in_cone(35.0f, 35.0f, 35.0f))
|
| | | {
|
| | | if (iter->outOfFrame)
|
| | | {
|
| | |
| | |
|
| | | /*static*/ void* CameraWrapper::decoder_thd(void* arg)
|
| | | {
|
| | | LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
|
| | | CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
|
| | | LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
|
| | | CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
|
| | |
|
| | | while(cameraWrapper.running)
|
| | | {
|
| | | while(cameraWrapper.running)
|
| | | {
|
| | | if (cameraWrapper.killed)
|
| | | {
|
| | | LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
|
| | |
| | | sleep(2);
|
| | | }
|
| | |
|
| | | PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
|
| | | bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
|
| | | //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
|
| | | PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
|
| | | bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
|
| | | //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
|
| | |
|
| | | if (!ret)
|
| | | continue;
|
| | | if (!ret)
|
| | | continue;
|
| | |
|
| | | PipeMaterial pm;
|
| | | ret = last->gain(pm);
|
| | |
| | | //#debug
|
| | | //test_paint(cameraWrapper);
|
| | |
|
| | | cameraWrapper.pipeLineRender->pipe(&pm);
|
| | | cameraWrapper.pipeLineRender->pipe(&pm);//#todo
|
| | | //if (cameraWrapper.pipeLineRender->check_pipe_complete(last = cameraWrapper.pipeLineRender->pipe(&pm)))
|
| | | // last->gain(pm);
|
| | | }
|
| | | |
| | | LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
|
| | |
|
| | | LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
|
| | | }
|
| | |
|
| | | /*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
|