From ad2575022f1a06f267b34c21bb99f6f83ea70854 Mon Sep 17 00:00:00 2001
From: xuxiuxi <xuxiuxi@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期六, 22 七月 2017 16:13:09 +0800
Subject: [PATCH]
---
VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp | 863 ++++++++++++++++++++++++++++++--------------------------
1 files changed, 459 insertions(+), 404 deletions(-)
diff --git a/VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp b/VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
index a82f5c3..d7bea59 100644
--- a/VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
+++ b/VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapper.cpp
@@ -1,404 +1,459 @@
-锘�#include "CameraWrapper.h"
-#include <logger.h>
-#include <Logger/src/logger.hpp>
-#include <PL_Gainer.h>
-#include <PL_Paint.h>
-#include <PL_Scale.h>
-#include <MaterialBuffer.h>
-#include <PL_V4L2Source.h>
-#include <PL_BlockGrouping.h>
-#include <PL_ColorConv.h>
-CameraWrapper::~CameraWrapper()
-{
- stop();
- delete pipeLineDecoderDetector;
- delete pipeLineAnalyzer;
- delete pipeLineRender;
-}
-
-bool CameraWrapper::start()
-{
- LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
-
- running = true;
- int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
- if(ret != 0)
- {
- LOGP(ERROR, "pthread_create: %s/n", strerror(ret));
- running = false;
- return false;
- }
-
- return true;
-}
-
-void CameraWrapper::stop()
-{
- LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
-
- if (!running)
- return;
-
- running = false;
- pthread_join(decoder_thid, NULL);
-}
-
-static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
-{
- return 0.0f;
-}
-
-static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
-{
- const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
-
- for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
- {
- RectWrapper rw;
- rw.rect = iter->rect;
- rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
- rw.userData = (void*)(iter->id);
- rects.push_back(rw);
- }
-}
-
-bool CameraWrapper::initPl()
-{
- PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
- PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
- PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
- PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
- PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
- PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
- PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
- PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
- PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
- PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
- bool ret = false;
-
- {
- PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
- ret = rtspClient->init(&rtspConfig);
- if (!ret)
- {
- LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
- return false;
- }
-
- PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
- ret = amcDecoder->init(&amcdConfig);
- if (!ret)
- {
- LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
- return false;
- }
-
- //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
- //PL_V4L2Source_Config v4l2Config;
- //v4l2Config.width = 640;
- //v4l2Config.height = 480;
- //ret = v4l2Source->init(&v4l2Config);
- //if (!ret) {
- // LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
- // return false;
- //}
-
-#ifdef USE_ST_SDK
- PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
- ret = sfTrack->init(&sftConfig);
- if (!ret) {
- LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
- return false;
- }
-#endif
- }
-
- {
- PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
- ret = plGainer->init(nullptr);
- if (!ret)
- {
- LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
- return false;
- }
-
- PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
- PL_BlockGrouping_Config plbgConfig;
- plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
- plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
- ret = plBG->init(&plbgConfig);
- if (!ret)
- {
- LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
- return false;
- }
- }
-
- {
- PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
- ret = plGainer->init(nullptr);
- if (!ret)
- {
- LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
- return false;
- }
-
- ANativeWindow* window = (ANativeWindow*)(windowRender);
- ANativeWindow_Buffer buffer;
- if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0)
- {
- plScaleCfg.toHeight=buffer.height;
- plScaleCfg.toWidth=buffer.width;
- ANativeWindow_unlockAndPost(window);
- } else
- {
- plScaleCfg.toHeight=480;
- plScaleCfg.toWidth=640;
- }
- PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
- ret = plScale->init(&plScaleCfg);
- if (!ret)
- {
- LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
- return false;
- }
-
- PL_ColorConv_Config PLColorConvCfg;
- PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
- ret = plColorConv->init(&PLColorConvCfg);
- if (!ret)
- {
- LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
- return false;
- }
-
- PL_Paint_Config plPaintCfg;
- plPaintCfg.fontPath = "data/msyh.ttc";
- plPaintCfg.plplCtx = &plplContext;
- PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
- ret = plPaint->init(&plPaintCfg);
- if (!ret)
- {
- LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
- return false;
- }
-
- PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
- ret = asvRender->init(&asvrConfig);
- if (!ret)
- {
- LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
- return false;
- }
- }
-
- return true;
-}
-
-static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
-{
- //jni thread may not able to invoke java callback
- // double check it's all ok
- int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
- if (getEnvStat == JNI_EDETACHED)
- {
- //LOG_WARN << "GetEnv: not attached" << std::endl;
- if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
- LOG_WARN << "Failed to attach" << LOG_ENDL;
- else
- getEnvStat = JNI_OK;
- }
- else if (getEnvStat == JNI_OK)
- {
- }
- else if (getEnvStat == JNI_EVERSION)
- LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
- else if (getEnvStat == JNI_ERR)
- LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
-
- cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
-
- if (cameraWrapper.javaEnv->ExceptionCheck())
- cameraWrapper.javaEnv->ExceptionDescribe();
-
- cameraWrapper.javaVM->DetachCurrentThread();
-}
-
-bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
-{
- CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
- PLPLContext& plplContext(cameraWrapper.plplContext);
- const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
-
- plplContext.cmds.clear();
- plplContext.params.clear();
-
- float width_scale =((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
- float height_scale =((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
-
- for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
- {
- plplContext.cmds.push_back(PLPLC_COLOR);
- plplContext.params.push_back('F');
- if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
- {
- if (iter->outOfFrame)
- {
- plplContext.params.push_back(255);
- plplContext.params.push_back(255);
- plplContext.params.push_back(0);
- plplContext.params.push_back(255);
- }
- else
- {
- plplContext.params.push_back(255);
- plplContext.params.push_back(0);
- plplContext.params.push_back(0);
- plplContext.params.push_back(255);
- }
- }
- else
- {
- plplContext.params.push_back(0);
- plplContext.params.push_back(255);
- plplContext.params.push_back(0);
- plplContext.params.push_back(255);
- }
-
- plplContext.cmds.push_back(PLPLC_RECT);
- plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
- plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
- plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
- plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
-
- std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
- if (iterFaceLabel != cameraWrapper.faceLabels.end())
- {
- plplContext.cmds.push_back(PLPLC_WTEXT);
- plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
- plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
- const wchar_t* label = iterFaceLabel->second.c_str();
- plplContext.params.push_back(PLPLType(label));
- }
- }
-
- return false;
-}
-
-bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
-{
- CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
-
- if (cameraWrapper.faceCacheLocked)
- return false;
-
- int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
- if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
- return false;
-
- cameraWrapper.faceCache.cachePm(*pm);
- invokeCallback(cameraWrapper, faceCount);
-
- return false;
-}
-
-void test_paint(CameraWrapper& cameraWrapper)
-{
- cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
- cameraWrapper.plplContext.params.push_back(100);
- cameraWrapper.plplContext.params.push_back(100);
- cameraWrapper.plplContext.params.push_back(PLPLType(L"涓枃鏄晩"));
-
- cameraWrapper.setFaceLabel(0, L"浼氬憳vi");
- cameraWrapper.setFaceLabel(1, L"浼氬憳ab");
- cameraWrapper.setFaceLabel(3, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(4, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(5, L"浼氬憳wn");
- cameraWrapper.setFaceLabel(6, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(7, L"浼氬憳wn");
- cameraWrapper.setFaceLabel(8, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(9, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(10, L"浼氬憳wn");
- cameraWrapper.setFaceLabel(11, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(12, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(13, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(14, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(15, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(16, L"浼氬憳wn");
- cameraWrapper.setFaceLabel(17, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(18, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(19, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(20, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(21, L"浼氬憳wr");
- cameraWrapper.setFaceLabel(22, L"浼氬憳wr");
-
- cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
- cameraWrapper.plplContext.params.push_back('F');
- cameraWrapper.plplContext.params.push_back(255);
- cameraWrapper.plplContext.params.push_back(255);
- cameraWrapper.plplContext.params.push_back(255);
- cameraWrapper.plplContext.params.push_back(255);
-// cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
-// cameraWrapper.plplContext.params.push_back(20);
-// cameraWrapper.plplContext.params.push_back(20);
-// cameraWrapper.plplContext.params.push_back(100);
-// cameraWrapper.plplContext.params.push_back(100);
-}
-
-/*static*/ void* CameraWrapper::decoder_thd(void* arg)
-{
- LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
-
- CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
-
- while(cameraWrapper.running)
- {
- PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
- bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
- LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
-
- if (!ret)
- continue;
-
- PipeMaterial pm;
- ret = last->gain(pm);
- if (!ret)
- continue;
-
- if (! cameraWrapper.faceCacheLocked)
- {
- PipeMaterial pmAnalizer(pm);
- PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
- bool ret = last->gain(pmAnalizer);
- if (ret)
- pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
- }
-
- pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
-
- //#debug
- //test_paint(cameraWrapper);
-
- cameraWrapper.pipeLineRender->pipe(&pm);
- }
-
- LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
-}
-
-void CameraWrapper::lockFace()
-{
- faceCacheLocked = true;
-}
-
-void CameraWrapper::releaseFace()
-{
- faceCacheLocked = false;
-}
-
-void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
-{
- if (faceLabels.size() > 32)
- faceLabels.clear();
-
- faceLabels.insert(std::make_pair(st_track_id, label));
-}
+#include "CameraWrapper.h"
+#include <logger.h>
+#include <Logger/src/logger.hpp>
+#include <MaterialBuffer.h>
+#include <PL_Gainer.h>
+#include <PL_Paint.h>
+#include <PL_Scale.h>
+#include <PL_V4L2Source.h>
+#include <PL_BlockGrouping.h>
+#include <PL_ColorConv.h>
+
+CameraWrapper::~CameraWrapper()
+{
+ stop();
+ delete pipeLineRender;
+ delete pipeLineAnalyzer;
+ delete pipeLineDecoderDetector;
+}
+
+bool CameraWrapper::start()
+{
+ LOG_INFO << "CameraWrapper::start" << LOG_ENDL;
+
+ pthread_mutex_init(&live_daemon_mut, NULL);
+
+ running = true;
+ int ret = pthread_create(&decoder_thid, NULL, CameraWrapper::decoder_thd, this);
+ if(ret != 0)
+ {
+ LOGP(ERROR, "pthread_create decoder_thid: %s/n", strerror(ret));
+ running = false;
+ return false;
+ }
+
+ ret = pthread_create(&live_daemon_thid, NULL, CameraWrapper::live_daemon_thd, this);
+ if(ret != 0)
+ {
+ LOGP(ERROR, "pthread_create live_daemon_thid: %s/n", strerror(ret));
+ running = false;
+ return false;
+ }
+
+ return true;
+}
+
+void CameraWrapper::stop()
+{
+ LOG_INFO << "CameraWrapper::stop" << LOG_ENDL;
+
+ if (!running)
+ return;
+
+ running = false;
+ pthread_join(decoder_thid, NULL);
+ pthread_join(live_daemon_thid, NULL);
+
+ pthread_mutex_destroy(&live_daemon_mut);
+}
+
+static float pl_analizer_plbg_user_score_2_func(const MB_Frame* frame, const PLGH_Rect& rects, uint8_t* croppedData)
+{
+ return 0.0f;
+}
+
+static void pl_analizer_plbg_get_rect_func(const PipeMaterial& ptr_pm, const MB_Frame& data_frame, std::list<RectWrapper>& rects)
+{
+ const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(ptr_pm.buffer));
+
+ for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
+ {
+ RectWrapper rw;
+ rw.rect = iter->rect;
+ rw.user_score_1 = ((90.0f - std::abs(iter->yaw)) + (90.0f - std::abs(iter->pitch)) + (90.0f - std::abs(iter->roll))) / 90.0f / 3 * iter->score;
+ rw.userData = (void*)(iter->id);
+ rects.push_back(rw);
+ }
+}
+
+bool CameraWrapper::initPl()
+{
+ PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
+ PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
+ PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
+ PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
+ PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
+ PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
+ PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
+ PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
+ PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
+ PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
+
+ bool ret = initPl_DecoderPl();
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
+ return false;
+ }
+
+ {
+ PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
+ ret = plGainer->init(nullptr);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
+ return false;
+ }
+
+ PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
+ PL_BlockGrouping_Config plbgConfig;
+ plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
+ plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
+ ret = plBG->init(&plbgConfig);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
+ return false;
+ }
+ }
+
+ {
+ PL_Gainer* plGainer = (PL_Gainer*)pipeLineRender->push_elem("PL_Gainer");
+ ret = plGainer->init(nullptr);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineRender.plGainer init error" << LOG_ENDL;
+ return false;
+ }
+
+ ANativeWindow* window = (ANativeWindow*)(windowRender);
+ ANativeWindow_Buffer buffer;
+ if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0)
+ {
+ plScaleCfg.toHeight=buffer.height;
+ plScaleCfg.toWidth=buffer.width;
+ ANativeWindow_unlockAndPost(window);
+ } else
+ {
+ plScaleCfg.toHeight=480;
+ plScaleCfg.toWidth=640;
+ }
+ PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
+ ret = plScale->init(&plScaleCfg);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
+ return false;
+ }
+
+ PL_ColorConv_Config PLColorConvCfg;
+ PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
+ ret = plColorConv->init(&PLColorConvCfg);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
+ return false;
+ }
+
+ PL_Paint_Config plPaintCfg;
+ plPaintCfg.fontPath = "/data/msyh.ttc";
+ plPaintCfg.plplCtx = &plplContext;
+ PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
+ ret = plPaint->init(&plPaintCfg);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL;
+ return false;
+ }
+
+ PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender");
+ ret = asvRender->init(&asvrConfig);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineRender.asvRender init error" << LOG_ENDL;
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool CameraWrapper::initPl_DecoderPl()
+{
+ bool ret = false;
+
+ PL_RTSPClient* rtspClient = (PL_RTSPClient*)pipeLineDecoderDetector->push_elem("PL_RTSPClient");
+ ret = rtspClient->init(&rtspConfig);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineDecoderDetector.rtspClient.init error" << LOG_ENDL;
+ return false;
+ }
+
+ PL_AndroidMediaCodecDecoder* amcDecoder = (PL_AndroidMediaCodecDecoder*)pipeLineDecoderDetector->push_elem("PL_AndroidMediaCodecDecoder");
+ ret = amcDecoder->init(&amcdConfig);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineDecoderDetector.amcDecoder.init error" << LOG_ENDL;
+ return false;
+ }
+
+ //PL_V4L2Source *v4l2Source = (PL_V4L2Source *) pipeLineDecoderDetector->push_elem("PL_V4L2Source");
+ //PL_V4L2Source_Config v4l2Config;
+ //v4l2Config.width = 640;
+ //v4l2Config.height = 480;
+ //ret = v4l2Source->init(&v4l2Config);
+ //if (!ret) {
+ // LOG_ERROR << "pipeLineDecoderDetector.v4l2Source.init error" << LOG_ENDL;
+ // return false;
+ //}
+
+#ifdef USE_ST_SDK
+ PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
+ ret = sfTrack->init(&sftConfig);
+ if (!ret)
+ {
+ LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
+ return false;
+ }
+#endif
+
+ return ret;
+}
+
+void CameraWrapper::lockFace()
+{
+ faceCacheLocked = true;
+}
+
+void CameraWrapper::releaseFace()
+{
+ faceCacheLocked = false;
+}
+
+void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label)
+{
+ if (faceLabels.size() > 32)
+ faceLabels.clear();
+
+ faceLabels.insert(std::make_pair(st_track_id, label));
+}
+
+static void invokeCallback(CameraWrapper& cameraWrapper, int faceCount)
+{
+ //jni thread may not able to invoke java callback
+ // double check it's all ok
+ int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
+ if (getEnvStat == JNI_EDETACHED)
+ {
+ //LOG_WARN << "GetEnv: not attached" << std::endl;
+ if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
+ LOG_WARN << "Failed to attach" << LOG_ENDL;
+ else
+ getEnvStat = JNI_OK;
+ }
+ else if (getEnvStat == JNI_OK)
+ {
+ }
+ else if (getEnvStat == JNI_EVERSION)
+ LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
+ else if (getEnvStat == JNI_ERR)
+ LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
+
+ cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, faceCount);
+
+ if (cameraWrapper.javaEnv->ExceptionCheck())
+ cameraWrapper.javaEnv->ExceptionDescribe();
+
+ cameraWrapper.javaVM->DetachCurrentThread();
+}
+
+bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
+{
+ CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
+ PLPLContext& plplContext(cameraWrapper.plplContext);
+ const st_ff_vect_t& faceFeatures(*(const st_ff_vect_t*)(pm->buffer));
+
+ plplContext.cmds.clear();
+ plplContext.params.clear();
+
+ float width_scale = ((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width;
+ float height_scale = ((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height;
+
+ for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
+ {
+ plplContext.cmds.push_back(PLPLC_COLOR);
+ plplContext.params.push_back('F');
+ if (iter->test_face_in_cone(30.0f, 30.0f, 30.0f))
+ {
+ if (iter->outOfFrame)
+ {
+ plplContext.params.push_back(255);
+ plplContext.params.push_back(255);
+ plplContext.params.push_back(0);
+ plplContext.params.push_back(255);
+ }
+ else
+ {
+ plplContext.params.push_back(255);
+ plplContext.params.push_back(0);
+ plplContext.params.push_back(0);
+ plplContext.params.push_back(255);
+ }
+ }
+ else
+ {
+ plplContext.params.push_back(0);
+ plplContext.params.push_back(255);
+ plplContext.params.push_back(0);
+ plplContext.params.push_back(255);
+ }
+
+ plplContext.cmds.push_back(PLPLC_RECT);
+ plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
+ plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
+ plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale));
+ plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale));
+
+ std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
+ if (iterFaceLabel != cameraWrapper.faceLabels.end())
+ {
+ plplContext.cmds.push_back(PLPLC_WTEXT);
+ plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale));
+ plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale));
+ const wchar_t* label = iterFaceLabel->second.c_str();
+ plplContext.params.push_back(PLPLType(label));
+ }
+ }
+
+ return false;
+}
+
+bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
+{
+ CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
+
+ if (cameraWrapper.faceCacheLocked)
+ return false;
+
+ int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
+ if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
+ return false;
+
+ cameraWrapper.faceCache.cachePm(*pm);
+ invokeCallback(cameraWrapper, faceCount);
+
+ return false;
+}
+
+void test_paint(CameraWrapper& cameraWrapper)
+{
+ cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
+ cameraWrapper.plplContext.params.push_back(100);
+ cameraWrapper.plplContext.params.push_back(100);
+ cameraWrapper.plplContext.params.push_back(PLPLType(L"涓枃鏄晩"));
+
+ cameraWrapper.setFaceLabel(0, L"浼氬憳vi");
+ cameraWrapper.setFaceLabel(1, L"浼氬憳ab");
+ cameraWrapper.setFaceLabel(3, L"浼氬憳wr");
+
+ cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
+ cameraWrapper.plplContext.params.push_back('F');
+ cameraWrapper.plplContext.params.push_back(255);
+ cameraWrapper.plplContext.params.push_back(255);
+ cameraWrapper.plplContext.params.push_back(255);
+ cameraWrapper.plplContext.params.push_back(255);
+ cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
+ cameraWrapper.plplContext.params.push_back(20);
+ cameraWrapper.plplContext.params.push_back(20);
+ cameraWrapper.plplContext.params.push_back(100);
+ cameraWrapper.plplContext.params.push_back(100);
+}
+
+//struct ScopeMutexLocker
+//{
+// pthread_mutex_t* mut;
+// ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
+// ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
+// //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
+//};
+
+void cw_elem_destory_func(PipeLineElem* elem)
+{
+ delete elem;
+}
+
+bool CameraWrapper::resetPl()
+{
+ pipeLineDecoderDetector->finit(cw_elem_destory_func);
+ sleep(2);
+ return initPl_DecoderPl();
+}
+
+/*static*/ void* CameraWrapper::decoder_thd(void* arg)
+{
+ LOG_INFO << "CameraWrapper::decoder_thd start" << LOG_ENDL;
+ CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
+
+ while(cameraWrapper.running)
+ {
+ if (cameraWrapper.killed)
+ {
+ LOG_WARN << "CameraWrapper::killed" << LOG_ENDL;
+ cameraWrapper.resetPl();
+ cameraWrapper.killed = false;
+ sleep(2);
+ }
+
+ PipeLineElem* last = cameraWrapper.pipeLineDecoderDetector->pipe();
+ bool ret = cameraWrapper.pipeLineDecoderDetector->check_pipe_complete(last);
+ //LOG_DEBUG << "pipe ret=" << ret << LOG_ENDL;
+
+ if (!ret)
+ continue;
+
+ PipeMaterial pm;
+ ret = last->gain(pm);
+ if (!ret)
+ continue;
+
+ cameraWrapper.lastAliveTime = time(nullptr);
+
+ if (! cameraWrapper.faceCacheLocked)
+ {
+ PipeMaterial pmAnalizer(pm);
+ PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
+ bool ret = last->gain(pmAnalizer);
+ if (ret)
+ pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
+ }
+
+ pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
+
+ //#debug
+ //test_paint(cameraWrapper);
+
+ cameraWrapper.pipeLineRender->pipe(&pm);
+ }
+
+ LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
+}
+
+/*static*/ void* CameraWrapper::live_daemon_thd(void* arg)
+{
+ LOG_INFO << "CameraWrapper::live_daemon_thd start" << LOG_ENDL;
+ CameraWrapper& cameraWrapper = *(CameraWrapper*)arg;
+
+ while(cameraWrapper.running)
+ {
+ sleep(10);
+
+ if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
+ {
+ PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
+ rtspClient->kill();
+
+ cameraWrapper.killed = true;
+ }
+ }
+}
--
Gitblit v1.8.0