| | |
| | | #include <PL_Gainer.h>
|
| | | #include <PL_Paint.h>
|
| | | #include <PL_Scale.h>
|
| | | #include <PL_V4L2Source.h>
|
| | | #include <PL_BlockGrouping.h>
|
| | | #include <PL_ColorConv.h>
|
| | | #include <PL_AndroidMediaCodecEncoder.h>
|
| | |
| | | {
|
| | | stop();
|
| | | delete pipeLineRender;
|
| | | delete pipeLineAnalyzer;
|
| | | delete pipeLineDecoderDetector;
|
| | | }
|
| | |
|
| | |
| | | running = false;
|
| | | return false;
|
| | | }
|
| | |
|
| | | ret = pthread_create(&jni_call_thid, NULL, CameraWrapper::jni_call_thd, this);
|
| | | if(ret != 0)
|
| | | {
|
| | | LOGP(ERROR, "pthread_create jni_call_thid: %s/n", strerror(ret));
|
| | | running = false;
|
| | | return false;
|
| | | }
|
| | | return true;
|
| | | }
|
| | |
|
| | |
| | | PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient);
|
| | | PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder);
|
| | | PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
|
| | | //PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
|
| | | PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd);
|
| | | PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
|
| | | PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale);
|
| | | PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv);
|
| | | PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
|
| | | PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
|
| | | PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
|
| | | //PipeLine::register_global_elem_creator("PL_AndroidMediaCodecEncoder", create_PL_AndroidMediaCodecEncoder);
|
| | |
|
| | | bool ret = initPl_DecoderPl();
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineDecoderDetector init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | {
|
| | | PL_Gainer* plGainer = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_Gainer");
|
| | | ret = plGainer->init(nullptr);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineAnalyzer.plGainer.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
|
| | | PL_BlockGrouping_Config plbgConfig;
|
| | | plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
|
| | | plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
|
| | | ret = plBG->init(&plbgConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineAnalyzer.plBG.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | }
|
| | |
|
| | | {
|
| | |
| | | }
|
| | |
|
| | | PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale");
|
| | | plScaleCfg.toWidth = 640;
|
| | | plScaleCfg.toHeight = 480;
|
| | | ret = plScale->init(&plScaleCfg);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | |
|
| | | //PL_AndroidMediaCodecEncoder_Config amceCfg;
|
| | | //amceCfg.ak_bit_rate = 5000000; // 512KB
|
| | | //amceCfg.ak_color_format = 21; // COLOR_FormatYUV420SemiPlanar;
|
| | | //amceCfg.ak_frame_rate = 25;
|
| | | //amceCfg.ak_height = 480;
|
| | | //amceCfg.ak_i_frame_interval = 20;
|
| | | //amceCfg.ak_mime = "video/avc";
|
| | | //amceCfg.ak_width = 640;
|
| | | //PL_AndroidMediaCodecEncoder* plAMCE = (PL_AndroidMediaCodecEncoder*)pipeLineRender->push_elem("PL_AndroidMediaCodecEncoder");
|
| | | //ret = plAMCE->init(&amceCfg);
|
| | | //if (!ret)
|
| | | //{
|
| | | // LOG_ERROR << "pipeLineRender.plAMCE init error" << LOG_ENDL;
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | PL_ColorConv_Config PLColorConvCfg;
|
| | | PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv");
|
| | |
| | | // return false;
|
| | | //}
|
| | |
|
| | | #ifdef USE_ST_SDK
|
| | | // PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");
|
| | | // ret = sfTrack->init(&sftConfig);
|
| | | // if (!ret)
|
| | | // {
|
| | | // LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
|
| | | // return false;
|
| | | // }
|
| | | PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd");
|
| | | ret = sfTrack->init(&sftConfig);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL;
|
| | | return false;
|
| | | }
|
| | | #endif
|
| | |
|
| | | return ret;
|
| | | }
|
| | |
|
| | |
| | | faceLabels.clear();
|
| | |
|
| | | faceLabels.insert(std::make_pair(st_track_id, label));
|
| | | LOG_ERROR <<"client setFaceLabel : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL;
|
| | | }
|
| | |
|
| | | void CameraWrapper::fireFaceCount(int faceCount)
|
| | | {
|
| | | // double check it's all ok
|
| | | int getEnvStat = javaVM->GetEnv((void **)&(javaEnv), JNI_VERSION_1_6);
|
| | | if (getEnvStat == JNI_EDETACHED)
|
| | | {
|
| | | //LOG_WARN << "GetEnv: not attached" << std::endl;
|
| | | if (javaVM->AttachCurrentThread(&(javaEnv), NULL) != 0)
|
| | | LOG_WARN << "Failed to attach" << LOG_ENDL;
|
| | | else
|
| | | getEnvStat = JNI_OK;
|
| | | }
|
| | | else if (getEnvStat == JNI_OK){
|
| | | }
|
| | | else if (getEnvStat == JNI_EVERSION)
|
| | | LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
|
| | | else if (getEnvStat == JNI_ERR)
|
| | | LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
|
| | |
|
| | | javaEnv->CallStaticVoidMethod(faceCallbackClazz, faceCallbackFunc, cameraIdx, faceCount);
|
| | |
|
| | | if (javaEnv->ExceptionCheck())
|
| | | javaEnv->ExceptionDescribe();
|
| | |
|
| | | javaVM->DetachCurrentThread();
|
| | | this->faceCount = faceCount;
|
| | | this->faceCountChanged = true;
|
| | | }
|
| | |
|
| | | bool cw_pm_breaker_ptr_paint(const PipeMaterial* pm, void* args)
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | //bool cw_pm_breaker_ptr_face(const PipeMaterial* pm, void* args)
|
| | | //{
|
| | | // CameraWrapper& cameraWrapper = *(CameraWrapper*)args;
|
| | | //
|
| | | // if (cameraWrapper.faceCacheLocked)
|
| | | // return false;
|
| | | //
|
| | | // int faceCount = cameraWrapper.faceCache.getFaceCount(*pm);
|
| | | // if (faceCount <= 0 || cameraWrapper.faceCallbackFunc == 0)
|
| | | // return false;
|
| | | //
|
| | | // cameraWrapper.faceCache.cachePm(*pm);
|
| | | // invokeCallback(cameraWrapper, faceCount);
|
| | | //
|
| | | // return false;
|
| | | //}
|
| | |
|
| | | void test_paint(CameraWrapper& cameraWrapper)
|
| | | {
|
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
|
| | | //cameraWrapper.plplContext.clear();
|
| | |
|
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊"));
|
| | |
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | }
|
| | |
|
| | | //struct ScopeMutexLocker
|
| | | //{
|
| | | // pthread_mutex_t* mut;
|
| | | // ScopeMutexLocker(pthread_mutex_t* _mut) : mut(_mut){pthread_mutex_lock(mut);};
|
| | | // ~ScopeMutexLocker(){pthread_mutex_unlock(mut);}
|
| | | // //ScopeMutexLocker(&(cameraWrapper.live_daemon_mut));
|
| | | //};
|
| | |
|
| | | void cw_elem_destory_func(PipeLineElem* elem)
|
| | | {
|
| | |
| | |
|
| | | cameraWrapper.lastAliveTime = time(nullptr);
|
| | |
|
| | | //if (! cameraWrapper.faceCacheLocked)
|
| | | //{
|
| | | // PipeMaterial pmAnalizer(pm);
|
| | | // PipeLineElem* last = cameraWrapper.pipeLineAnalyzer->pipe(&pmAnalizer);
|
| | | // bool ret = last->gain(pmAnalizer);
|
| | | // if (ret)
|
| | | // pmAnalizer.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_face, &(cameraWrapper));
|
| | | //}
|
| | |
|
| | | pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
|
| | | //pm.breake(PipeMaterial::PMT_PTR, MB_Frame::MBFT__FIRST, cw_pm_breaker_ptr_paint, &(cameraWrapper));
|
| | |
|
| | | //#debug
|
| | | //test_paint(cameraWrapper);
|
| | |
|
| | | cameraWrapper.pipeLineRender->pipe(&pm);//#todo
|
| | | //if (cameraWrapper.pipeLineRender->check_pipe_complete(last = cameraWrapper.pipeLineRender->pipe(&pm)))
|
| | | // last->gain(pm);
|
| | | cameraWrapper.pipeLineRender->pipe(&pm);
|
| | | }
|
| | |
|
| | | LOG_INFO << "CameraWrapper::decoder_thd stop, ret=" << LOG_ENDL;
|
| | |
| | | while(cameraWrapper.running)
|
| | | {
|
| | | sleep(10);
|
| | |
|
| | | continue;
|
| | | if (time(nullptr) - cameraWrapper.lastAliveTime > 20)
|
| | | {
|
| | | PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0);
|
| | |
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | void *CameraWrapper::jni_call_thd(void *arg) {
|
| | |
|
| | | CameraWrapper* cameraWrapper = (CameraWrapper*)arg;
|
| | | while(cameraWrapper->running)
|
| | | {
|
| | | if(cameraWrapper->faceCountChanged){
|
| | | // double check it's all ok
|
| | | LOG_ERROR <<"client fireFaceCount start!!!!!!!!"<< LOG_ENDL;
|
| | | int getEnvStat = cameraWrapper->javaVM->GetEnv((void **)&(cameraWrapper->javaEnv), JNI_VERSION_1_6);
|
| | | if (getEnvStat == JNI_EDETACHED)
|
| | | {
|
| | | //LOG_WARN << "GetEnv: not attached" << std::endl;
|
| | | if (cameraWrapper->javaVM->AttachCurrentThread(&(cameraWrapper->javaEnv), NULL) != 0)
|
| | | LOG_WARN << "Failed to attach" << LOG_ENDL;
|
| | | else
|
| | | getEnvStat = JNI_OK;
|
| | | }
|
| | | else if (getEnvStat == JNI_OK){
|
| | | }
|
| | | else if (getEnvStat == JNI_EVERSION)
|
| | | LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
|
| | | else if (getEnvStat == JNI_ERR)
|
| | | LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
|
| | | LOG_ERROR <<"client fireFaceCount end!!!!!!!!!!!!"<< LOG_ENDL;
|
| | |
|
| | | cameraWrapper->javaEnv->CallStaticVoidMethod(cameraWrapper->faceCallbackClazz,
|
| | | cameraWrapper->faceCallbackFunc,
|
| | | cameraWrapper->cameraIdx,
|
| | | cameraWrapper->faceCount);
|
| | |
|
| | | if (cameraWrapper->javaEnv->ExceptionCheck())
|
| | | cameraWrapper->javaEnv->ExceptionDescribe();
|
| | | cameraWrapper->javaVM->DetachCurrentThread();
|
| | | cameraWrapper->faceCountChanged = false;
|
| | | }
|
| | | usleep(10000);
|
| | | }
|
| | |
|
| | | }
|