VisitFace/RtspNativeCodec/app/src/main/cpp/CMakeLists.txt
@@ -6,8 +6,8 @@ #set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -std=c++11 -fno-rtti -Wall -UNDEBUG") #set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -std=c++11 -DANDROID_PLATFORM=android-22 -DANDROID_TOOLCHAIN=gcc -DANDROID_STL=gnustl_static -Wall -UNDEBUG") set(WORKSPACE_PATH "D:/workspace/proxy") set(ARCH "armeabi-v7a") set(WORKSPACE_PATH "D:/work/proxy") set(ARCH "arm64-v8a") include_directories( "${WORKSPACE_PATH}/RtspFace" @@ -66,6 +66,7 @@ "${WORKSPACE_PATH}/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp" "${WORKSPACE_PATH}/RtspFace/PL_AndroidSurfaceViewRender.cpp" "${WORKSPACE_PATH}/RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp" "${WORKSPACE_PATH}/RtspFace/PL_SensetimeFaceTrack.cpp" "${WORKSPACE_PATH}/RtspFace/PL_Gainer.cpp" "${WORKSPACE_PATH}/RtspFace/PL_Scale.cpp" "${WORKSPACE_PATH}/RtspFace/PL_ColorConv.cpp" VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperClient.cpp
@@ -204,11 +204,13 @@ faceLabels.clear(); faceLabels.insert(std::make_pair(st_track_id, label)); LOG_ERROR <<"client setFaceLabel : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; } void CameraWrapper::fireFaceCount(int faceCount) { // double check it's all ok LOG_ERROR <<"client fireFaceCount start!!!!!!!!"<< LOG_ENDL; int getEnvStat = javaVM->GetEnv((void **)&(javaEnv), JNI_VERSION_1_6); if (getEnvStat == JNI_EDETACHED) { @@ -224,6 +226,7 @@ LOG_WARN << "GetEnv: version not supported" << LOG_ENDL; else if (getEnvStat == JNI_ERR) LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL; LOG_ERROR <<"client fireFaceCount end!!!!!!!!!!!!"<< LOG_ENDL; javaEnv->CallStaticVoidMethod(faceCallbackClazz, faceCallbackFunc, cameraIdx, faceCount); @@ -296,7 +299,7 @@ void test_paint(CameraWrapper& cameraWrapper) { cameraWrapper.plplContext.clear(); //cameraWrapper.plplContext.clear(); cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT); cameraWrapper.plplContext.params.push_back(100); @@ -380,7 +383,7 @@ while(cameraWrapper.running) { sleep(10); continue; if (time(nullptr) - cameraWrapper.lastAliveTime > 20) { PL_RTSPClient* rtspClient = (PL_RTSPClient*)cameraWrapper.pipeLineDecoderDetector->at(0); VisitFace/RtspNativeCodec/app/src/main/cpp/CameraWrapperServer.cpp
@@ -11,6 +11,8 @@ #include <PL_ColorConv.h> #include <PL_AndroidMediaCodecEncoder.h> #include <PL_RTSPServer2.h> #include <iostream> CameraWrapper::~CameraWrapper() { stop(); @@ -95,8 +97,8 @@ PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient); PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder); PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender); PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack); //PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd); //PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack); PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd); PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer); PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale); PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv); @@ -162,16 +164,16 @@ // return false; //} PL_Paint_Config plPaintCfg; plPaintCfg.fontPath = fontPath; plPaintCfg.plplCtx = &plplContext; PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint"); ret = plPaint->init(&plPaintCfg); if (!ret) { LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL; return false; } // PL_Paint_Config plPaintCfg; // plPaintCfg.fontPath = fontPath; // plPaintCfg.plplCtx = &plplContext; // PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint"); // ret = plPaint->init(&plPaintCfg); // if (!ret) // { // LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL; // return false; // } //PL_AndroidSurfaceViewRender* asvRender = (PL_AndroidSurfaceViewRender*)pipeLineRender->push_elem("PL_AndroidSurfaceViewRender"); //ret = asvRender->init(&asvrConfig); @@ -242,20 +244,20 @@ //} #ifdef USE_ST_SDK PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo use multi // PL_SensetimeFaceTrack *sfTrack = (PL_SensetimeFaceTrack *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrack");//#todo use multi // ret = sfTrack->init(&sftConfig); // if (!ret) // { // LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL; // return false; // } PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd"); ret = sfTrack->init(&sftConfig); if (!ret) { LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL; return false; } //PL_SensetimeFaceTrackMultiTrd *sfTrack = (PL_SensetimeFaceTrackMultiTrd *) pipeLineDecoderDetector->push_elem("PL_SensetimeFaceTrackMitiTrd"); //ret = sfTrack->init(&sftConfig); //if (!ret) //{ // LOG_ERROR << "pipeLineDecoderDetector.sfTrack.init error" << LOG_ENDL; // return false; //} #endif return ret; @@ -380,15 +382,36 @@ return false; cameraWrapper.faceCache.cachePm(*pm); //remote call start, 为了保证通用性,未将以下步骤封入RtspFaceDetectClient RtspFaceDetectClient* client = getRtspFaceDetectClient(); if (client == nullptr) return false; //#todo 优化封装 try { LOG_INFO <<"try start"<< LOG_ENDL; auto request = client->fireFaceCountListenerRequest(); request.setCameraIndex(cameraWrapper.cameraIdx); request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm)); auto sendAct = request.send(); sendAct.wait(client->getWaitScope()); // RtspFaceDetect::Client* rClient = getRtspFaceDetectClient()->getClient(); // auto& waitScope = getRtspFaceDetectClient()->getWaitScope(); RtspFaceDetectClient* client = getRtspFaceDetectClient(); RtspFaceDetect::Client rClient = client->getEzRpcClient()->getMain<RtspFaceDetect>(); auto& waitScope = client->getWaitScope(); auto request = rClient.fireFaceCountListenerRequest(); request.setCameraIndex(cameraWrapper.cameraIdx); request.setFaceCount(cameraWrapper.faceCache.getFaceCount(*pm)); LOG_INFO <<"call client : i have face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; auto sendAct = request.send(); sendAct.wait(waitScope); } catch (const kj::Exception& e) { LOG_INFO <<"catch!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; LOG_ERROR << "catch!!!" <<e.getDescription().cStr() << LOG_ENDL; std::cout << e.getDescription().cStr() << std::endl; return false; } catch (std::exception e){ LOG_ERROR << "catch!!!" <<e.what() << LOG_ENDL; } //remote call end return true; } VisitFace/RtspNativeCodec/app/src/main/cpp/RemoteFuncClient.cpp
@@ -6,44 +6,94 @@ #include "RemoteFunc.h" #include "RtspNativeCodecJNIClient.h" RemoteServer<RtspFaceDetectImpl> remoteServer("127.0.0.1", 8111); #include <string> #include <iostream> using namespace std; //#todo 修改服务线程构造参数传入方式 RemoteServer<RtspFaceDetectImpl> remoteServer("192.168.1.97", 8111); static RtspFaceViewClient* rtspFaceViewClient = nullptr; void startRemoteServer() { LOG_ERROR << "pansen : this is clinet" << std::endl; remoteServer.start(); //#todo 调用位置移动 // try // { // RtspFaceViewClient rtspFaceViewClient = RtspFaceViewClient(); // RtspFaceView::Client& rClient = rtspFaceViewClient.getClient(); // // auto& waitScope = rtspFaceViewClient.getWaitScope(); // //auto request = rClient.abcdef(); // // auto sendAct = request.send(); // // sendAct.wait(waitScope); // } // catch (const kj::Exception& e) // { // cout << e.getDescription().cStr() << endl; // return; // } //#end } RtspFaceViewClient* getRtspFaceViewClient() RtspFaceViewClient * getRtspFaceViewClient() { if (rtspFaceViewClient != nullptr) { try { rtspFaceViewClient = new RtspFaceViewClient; } catch(...) { delete rtspFaceViewClient; rtspFaceViewClient = nullptr; LOG_ERROR << "aaaaaaaa"<<LOG_ENDL;//#todo } } if(rtspFaceViewClient == nullptr) rtspFaceViewClient = new RtspFaceViewClient; return rtspFaceViewClient; } RtspFaceViewClient::RtspFaceViewClient() : rpcClient("127.0.0.1", 8112), RtspFaceView::Client(rpcClient.getMain<RtspFaceView>()) rpcClient(nullptr) {} RtspFaceView::Client& RtspFaceViewClient::getClient() { if (client == nullptr) { try { client = new RtspFaceView::Client(getEzRpcClient()->getMain<RtspFaceView>()); } catch (const kj::Exception& e) { delete client; client = nullptr; throw e; } } return *client; } ::kj::WaitScope &RtspFaceViewClient::getWaitScope() { return rpcClient.getWaitScope(); return getEzRpcClient()->getWaitScope(); } capnp::EzRpcClient * RtspFaceViewClient::getEzRpcClient() { if (rpcClient == nullptr) { try { //#todo ip modify rpcClient = new capnp::EzRpcClient("192.168.1.94", 8112); } catch (const kj::Exception& e) { delete rpcClient; rpcClient = nullptr; throw e; } } return rpcClient; } ::kj::Promise<void> RtspFaceDetectImpl::fireFaceCountListener(FireFaceCountListenerContext context) { auto results = context.getResults(); LOG_ERROR << "!!!!!!!!!!!!!!!!!!!!!WHO find face!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" << LOG_ENDL; remoteFireFaceCountListener(context.getParams(), results); context.setResults(results); return kj::READY_NOW; } VisitFace/RtspNativeCodec/app/src/main/cpp/RemoteFuncClient.h
@@ -5,26 +5,29 @@ #ifndef RTSPNATIVECODEC_REMOTEMETHODCLIENT_H #define RTSPNATIVECODEC_REMOTEMETHODCLIENT_H #include <string> #include <capnp/message.h> #include <capnp/serialize-packed.h> #include <capnp/ez-rpc.h> #include "RemoteMethod.capnp.h" #include "CameraWrapperServer.h" void startRemoteServer(); class RtspFaceViewClient : RtspFaceView::Client // class RtspFaceViewClient { public: RtspFaceViewClient(); RtspFaceView::Client& getClient(); ~RtspFaceViewClient() {}; ::kj::WaitScope &getWaitScope(); capnp::EzRpcClient* getEzRpcClient(); private: capnp::EzRpcClient rpcClient; RtspFaceView::Client *client; capnp::EzRpcClient* rpcClient; }; void startRemoteServer(); RtspFaceViewClient* getRtspFaceViewClient(); class RtspFaceDetectImpl final : public RtspFaceDetect::Server VisitFace/RtspNativeCodec/app/src/main/cpp/RemoteFuncServer.cpp
@@ -2,46 +2,100 @@ // Created by pansen on 2017/8/4. // #include "AppConfig.h" #include "RemoteFunc.h" #include "RemoteFuncServer.h" #include "RemoteFunc.h" #include "RtspNativeCodecJNIServer.h" RemoteServer<RtspFaceViewImpl> remoteServer("127.0.0.1", 8112); #include <string> #include <iostream> using namespace std; //#todo 修改服务线程构造参数传入方式 RemoteServer<RtspFaceViewImpl> remoteServer("192.168.1.94", 8112); static RtspFaceDetectClient* rtspFaceDetectClient = nullptr; void startRemoteServer() { ; AppConfig config; config.loadConfig("data/config.cfg"); remoteServer.start(); remoteServer.start(); // //#todo 调用位置移动 // try // { // RtspFaceDetectClient rtspFaceDetectClient = RtspFaceDetectClient(); // RtspFaceDetect::Client& rClient = rtspFaceDetectClient.getClient(); // // auto& waitScope = rtspFaceDetectClient.getWaitScope(); // auto request = rClient.fireFaceCountListenerRequest(); // auto sendAct = request.send(); // sendAct.wait(waitScope); // } // catch (const kj::Exception& e) // { // cout << e.getDescription().cStr() << endl; // return; // } //#end } RtspFaceDetectClient* getRtspFaceDetectClient() RtspFaceDetectClient * getRtspFaceDetectClient() { if (rtspFaceDetectClient != nullptr) { try { rtspFaceDetectClient = new RtspFaceDetectClient; } catch(...) { delete rtspFaceDetectClient; rtspFaceDetectClient = nullptr; LOG_ERROR << "aaaaaaaa"<<LOG_ENDL;//#todo } } return rtspFaceDetectClient; if(rtspFaceDetectClient == nullptr) rtspFaceDetectClient = new RtspFaceDetectClient; return rtspFaceDetectClient; } //#todo //#todo 构造函数修改 RtspFaceDetectClient::RtspFaceDetectClient() : rpcClient("127.0.0.1", 8111), RtspFaceDetect::Client(rpcClient.getMain<RtspFaceDetect>()) {} rpcClient(nullptr) { } RtspFaceDetect::Client* RtspFaceDetectClient::getClient() { LOG_INFO << "new client @ " << (int*)client << LOG_ENDL; if (client == nullptr) { LOG_INFO << "new client 2" << LOG_ENDL; try { client = new RtspFaceDetect::Client(getEzRpcClient()->getMain<RtspFaceDetect>()); } catch (const kj::Exception& e) { LOG_ERROR << "new client error" << LOG_ENDL; delete client; client = nullptr; throw e; } } return client; } ::kj::WaitScope &RtspFaceDetectClient::getWaitScope() { return rpcClient.getWaitScope(); return getEzRpcClient()->getWaitScope(); } capnp::EzRpcClient * RtspFaceDetectClient::getEzRpcClient() { if (rpcClient == nullptr) { try { //#todo ip test //rpcClient = new capnp::EzRpcClient("192.168.1.89", 8111); //client rpcClient = new capnp::EzRpcClient("192.168.1.97", 8111); } catch (const kj::Exception& e) { delete rpcClient; rpcClient = nullptr; throw e; } } return rpcClient; } ::kj::Promise<void> RtspFaceViewImpl::getFaceList(GetFaceListContext context) @@ -49,6 +103,8 @@ auto results = context.getResults(); remoteGetFaceList(context.getParams(), results); context.setResults(results); LOG_ERROR <<"getFaceList : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; return kj::READY_NOW; } ::kj::Promise<void> RtspFaceViewImpl::getFaceImages(GetFaceImagesContext context) @@ -56,6 +112,8 @@ auto results = context.getResults(); remoteGetFaceListImage(context.getParams(), results); context.setResults(results); LOG_ERROR <<"getFaceImages : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; return kj::READY_NOW; } ::kj::Promise<void> RtspFaceViewImpl::setFaceLabel(SetFaceLabelContext context) @@ -63,4 +121,6 @@ auto results = context.getResults(); remoteSetFaceLabel(context.getParams(), results); context.setResults(results); LOG_ERROR <<"setFaceLabel : !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; return kj::READY_NOW; } VisitFace/RtspNativeCodec/app/src/main/cpp/RemoteFuncServer.h
@@ -5,27 +5,32 @@ #ifndef RTSPNATIVECODEC_REMOTEMETHODSERVER_H #define RTSPNATIVECODEC_REMOTEMETHODSERVER_H #include <string> #include <capnp/message.h> #include <capnp/serialize-packed.h> #include <capnp/ez-rpc.h> #include "RemoteMethod.capnp.h" class RtspFaceDetectClient : public RtspFaceDetect::Client //C++模块中的client,用于发现人脸时通知java模块 class RtspFaceDetectClient { public: RtspFaceDetectClient(); RtspFaceDetect::Client* getClient(); ~RtspFaceDetectClient() {}; ::kj::WaitScope &getWaitScope(); capnp::EzRpcClient* getEzRpcClient(); private: capnp::EzRpcClient rpcClient; RtspFaceDetect::Client* client; capnp::EzRpcClient* rpcClient; }; void startRemoteServer(); RtspFaceDetectClient* getRtspFaceDetectClient(); //C++模块中的服务端,用于响应java端的调用及传值 class RtspFaceViewImpl final : public RtspFaceView::Server { public: VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNIClient.cpp
@@ -14,11 +14,13 @@ #include <face_daemon_proto.h> #include <stdlib.h> #include <iostream> #include "DebugNetwork.h" #include "TeleWrapper.h" #include "cpu_sched_test.h" #include "RemoteFuncClient.h" //#include <mediastreamer2/include/mediastreamer2/msjava.h> std::stringstream logss; @@ -36,6 +38,7 @@ const size_t _faceImagesSize = MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT * 3; uint8_t _faceImages[_faceImagesSize]; // android stack is small void remoteFireFaceCountListener(const RtspFaceDetect::FireFaceCountListenerParams::Reader& params, RtspFaceDetect::FireFaceCountListenerResults::Builder& results){ int count = params.getFaceCount(); int index = params.getCameraIndex(); @@ -46,20 +49,30 @@ void Java_cn_com_basic_face_util_RtspFaceNative_init(JNIEnv *env, jclass clazz) { g_logger.set_level(INFO); g_logger.set_level(INFO); cpu_sched(); cpu_sched(); for (size_t i = 0; i < CAMERA_COUNT; i++) { g_CameraWrappers[i].cameraIdx = i + 1; #ifdef USE_ST_SDK PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd); #endif g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; g_CameraWrappers[i].pipeLineRender = new PipeLine; //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); } for (size_t i = 0; i < CAMERA_COUNT; i++) { g_CameraWrappers[i].cameraIdx = i + 1; g_TeleWrapper.start(); g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; g_CameraWrappers[i].pipeLineRender = new PipeLine; //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); } g_TeleWrapper.start(); //pansen test Client start LOG_ERROR << "pansen test Client start" << std::endl; startRemoteServer(); LOG_ERROR << "pansen test Client end" << std::endl; //pansen test Client end } void @@ -200,13 +213,13 @@ cameraWrapper.asvrConfig.outputOriginFrame = true; cameraWrapper.asvrConfig.outputRenderFrame = true; cameraWrapper.asvrConfig.scaleToWidth = 0; cameraWrapper.asvrConfig.scaleToHeight = 0; cameraWrapper.asvrConfig.directlyDisplay = true; cameraWrapper.asvrConfig.scaleToHeight = 0; cameraWrapper.asvrConfig.directlyDisplay = true; } cameraWrapper.fontPath = "/data/msyh.ttc"; cameraWrapper.fontPath = "/data/msyh.ttc"; bool ret = cameraWrapper.initPl(); bool ret = cameraWrapper.initPl(); if (ret) return (cameraWrapper.start() ? JNI_TRUE : JNI_FALSE); else @@ -356,11 +369,34 @@ size_t buffSize = sizeof(buffer); bool ret = false; #ifdef USE_ST_SDK uint8_t *pBufPB = buffer + sizeof(EVPHeader) + sizeof(FDP_FaceDetectPB); buffSize = sizeof(buffer) - sizeof(EVPHeader) - sizeof(FDP_FaceDetectPB); ret = cameraWrapper.faceCache.getFaceListPb(pBufPB, buffSize); #endif // ps add -----8.11----- try { LOG_ERROR <<"client : i want getFaceList!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; RtspFaceViewClient* client = getRtspFaceViewClient(); RtspFaceView::Client rClient = client->getEzRpcClient()->getMain<RtspFaceView>(); auto& waitScope =client->getWaitScope(); auto request = rClient.getFaceListRequest(); request.setCameraIndex(cameraIdx); auto sendAct = request.send(); auto result =sendAct.wait(waitScope); ret = result.getFaceList().size() > 0; LOG_DEBUG << "@@@ getFaceList get data" << LOG_ENDL; memcpy(pBufPB,result.getFaceList().begin(),buffSize); } catch (const kj::Exception& e) { LOG_ERROR <<e.getDescription().cStr()<< LOG_ENDL; ret = false; } //-------end------- LOG_ERROR <<"client : i want getFaceList end!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; //ret = cameraWrapper.faceCache.getFaceListPb(pBufPB, buffSize); if (!ret) { @@ -405,71 +441,6 @@ return buffSize; } jobject _Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages(JNIEnv *env, jclass clazz, jint cameraIdx, jobject faceImages) { LOG_DEBUG << "@@@ _Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]); std::vector<NativeImgIdx> imgIdxes; memset(_faceImages, 0, _faceImagesSize); bool ret = false; #ifdef USE_ST_SDK size_t faceImagesSize =_faceImagesSize; ret = cameraWrapper.faceCache.getFaceListImage(imgIdxes, _faceImages,faceImagesSize); #endif if (imgIdxes.size() == 0) { LOG_INFO << "No face image captured" << LOG_ENDL; return nullptr; } jclass jcRefByteArray = env->FindClass("cn/com/basic/face/util/RefByteArray"); jfieldID jfRefByteArray_arr = env->GetFieldID(jcRefByteArray, "arr", "[B"); jbyteArray jbaFaceImages = env->NewByteArray(_faceImagesSize); LOG_DEBUG << "_faceImagesSize=" << _faceImagesSize << LOG_ENDL; env->SetByteArrayRegion(jbaFaceImages, 0, _faceImagesSize, (const jbyte *) _faceImages); env->SetObjectField(faceImages, jfRefByteArray_arr, jbaFaceImages); jclass jcArrayList = env->FindClass("java/util/ArrayList"); jmethodID jmArrayList_ctor = env->GetMethodID(jcArrayList, "<init>", "()V"); jmethodID jmArrayList_add = env->GetMethodID(jcArrayList, "add", "(Ljava/lang/Object;)Z"); jclass jcNativeImgIdx = env->FindClass("cn/com/basic/face/util/NativeImgIdx"); jmethodID jmNativeImgIdx_ctor = env->GetMethodID(jcNativeImgIdx, "<init>", "()V"); jfieldID jfNativeImgIdx_offset = env->GetFieldID(jcNativeImgIdx, "offset", "I"); jfieldID jfNativeImgIdx_size = env->GetFieldID(jcNativeImgIdx, "size", "I"); jfieldID jfNativeImgIdx_type = env->GetFieldID(jcNativeImgIdx, "type", "I"); jfieldID jfNativeImgIdx_width = env->GetFieldID(jcNativeImgIdx, "width", "I"); jfieldID jfNativeImgIdx_height = env->GetFieldID(jcNativeImgIdx, "height", "I"); jobject jobjArrayList_ImgIdxes = env->NewObject(jcArrayList, jmArrayList_ctor); for (size_t i = 0; i < imgIdxes.size(); i++) { jobject jobjNativeImgIdx = env->NewObject(jcNativeImgIdx, jmNativeImgIdx_ctor); env->SetIntField(jobjNativeImgIdx, jfNativeImgIdx_offset, imgIdxes[i].offset); env->SetIntField(jobjNativeImgIdx, jfNativeImgIdx_size, imgIdxes[i].size); env->SetIntField(jobjNativeImgIdx, jfNativeImgIdx_type, imgIdxes[i].type); env->SetIntField(jobjNativeImgIdx, jfNativeImgIdx_width, imgIdxes[i].width); env->SetIntField(jobjNativeImgIdx, jfNativeImgIdx_height, imgIdxes[i].height); env->CallBooleanMethod(jobjArrayList_ImgIdxes, jmArrayList_add, jobjNativeImgIdx); LOG_DEBUG << "imgIdx " << i << ":" << std::string(imgIdxes[i]) << LOG_ENDL; } return jobjArrayList_ImgIdxes; } jobject Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages(JNIEnv *env, jclass clazz, jint cameraIdx) { @@ -480,12 +451,40 @@ std::vector<NativeImgIdx> imgIdxes; size_t _faceImagesSize = MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT * 3; uint8_t *_faceImages = new uint8_t[_faceImagesSize]; // android stack is small memset(_faceImages, 0, _faceImagesSize); bool ret = false; #ifdef USE_ST_SDK //#ps add --------8.11------- LOG_ERROR <<"client : i want getFaceImages!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; try { RtspFaceViewClient* client = getRtspFaceViewClient(); RtspFaceView::Client rClient = client->getEzRpcClient()->getMain<RtspFaceView>(); auto& waitScope =client->getWaitScope(); auto request = rClient.getFaceImagesRequest(); request.setCameraIndex(cameraIdx); auto sendAct = request.send(); auto result = sendAct.wait(waitScope); if(result.getImgIndexData().size() >0 && result.getImgData().size()>0) { ret =true; LOG_DEBUG << "@@@ getFaceImages get data" << LOG_ENDL; memcpy(_faceImages,result.getImgData().begin(),result.getImgData().size()); memcpy(&imgIdxes ,result.getImgIndexData().begin() ,result.getImgIndexData().size()); } } catch (const kj::Exception& e) { std::cout << e.getDescription().cStr() << std::endl; ret = false; LOG_ERROR << "Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages try fail" << LOG_ENDL; } LOG_ERROR <<"client : i want getFaceImages end!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; //-------end------- ret = cameraWrapper.faceCache.getFaceListImage(imgIdxes, _faceImages, _faceImagesSize); #endif @@ -625,44 +624,46 @@ bool Java_cn_com_basic_face_util_RtspFaceNative_teleCall(JNIEnv *env, jclass clazz, jstring phone) { std::string _phone; { const char *utfFunc = env->GetStringUTFChars(phone, NULL); _phone = utfFunc; env->ReleaseStringUTFChars(phone, utfFunc); } std::string _phone; { const char *utfFunc = env->GetStringUTFChars(phone, NULL); _phone = utfFunc; env->ReleaseStringUTFChars(phone, utfFunc); } const char *phoneNum = _phone.c_str(); TeleTask task; task.command= TeleTask::CALL; task.param = phoneNum; LOG_INFO << "Java_cn_com_basic_face_util_RtspFaceNative_telCall::jni" << LOG_ENDL; // g_TeleWrapper.pushTask(task); const char *phoneNum = _phone.c_str(); TeleTask task; task.command= TeleTask::CALL; task.param = phoneNum; LOG_INFO << "Java_cn_com_basic_face_util_RtspFaceNative_telCall::jni" << LOG_ENDL; // g_TeleWrapper.pushTask(task); } void Java_cn_com_basic_face_util_RtspFaceNative_teleHang(JNIEnv *env, jclass clazz) { TeleTask task; task.command =TeleTask::HANGUP; // g_TeleWrapper.pushTask(task); TeleTask task; task.command =TeleTask::HANGUP; // g_TeleWrapper.pushTask(task); } void Java_cn_com_basic_face_util_RtspFaceNative_teleShutdown(JNIEnv *env, jclass clazz) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL; g_TeleWrapper.stop(); LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL; g_TeleWrapper.stop(); } void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel" << LOG_ENDL; LOG_ERROR <<"ps ------------start"<< LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]); const jchar* jchars = env->GetStringChars(label, nullptr); std::wstring wstr; LOG_ERROR <<"ps ------------4"<< LOG_ENDL; int lenth = env->GetStringLength(label); for (int i = 0; i < lenth; ++i) { @@ -670,7 +671,39 @@ wchar_t wt = (wchar_t)jc; wstr.push_back(wt); } env->ReleaseStringChars(label,jchars); cameraWrapper.setFaceLabel(stTrackId, wstr); // ps add ------8.11--------- LOG_ERROR <<"client : i want setFacelable!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; try { // auto& waitScope = getRtspFaceViewClient()->getWaitScope(); // RtspFaceView::Client& rClient = getRtspFaceViewClient()->getClient(); RtspFaceViewClient* client = getRtspFaceViewClient(); RtspFaceView::Client rClient = client->getEzRpcClient()->getMain<RtspFaceView>(); auto& waitScope =client->getWaitScope(); auto request = rClient.setFaceLabelRequest(); request.setCameraIndex(cameraIdx); const capnp::Data::Builder &labelData = request.initLabel(wstr.size()); LOG_ERROR <<"ps ------------3"<< LOG_ENDL; memcpy((char*)labelData.begin(), wstr.c_str(), wstr.size()); request.setLabel(labelData); request.setTrackId(stTrackId); LOG_ERROR <<"ps ------------2"<< LOG_ENDL; auto sendAct = request.send(); sendAct.wait(waitScope); } catch (const kj::Exception& e) { std::cout << e.getDescription().cStr() << std::endl; return; } LOG_ERROR <<"client : i want setFacelable end!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; //-------end------- //cameraWrapper.setFaceLabel(stTrackId, wstr); } } // extern C VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNIServer.cpp
@@ -20,6 +20,7 @@ #include "TeleWrapper.h" #include "cpu_sched_test.h" #include "RemoteFuncServer.h" //#include <mediastreamer2/include/mediastreamer2/msjava.h> @@ -57,6 +58,7 @@ memcpy(imgIndexData.begin(), imgIdxes.data(), imgIdxesSize); auto imgData = results.initImgData(_faceImagesSize); memcpy(imgData.begin(), _faceImages, _faceImagesSize); LOG_ERROR <<"c++ server : remoteGetFaceListImage!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"<< LOG_ENDL; } void remoteGetFaceList(const RtspFaceView::GetFaceListParams::Reader& params, RtspFaceView::GetFaceListResults::Builder& results){ @@ -80,9 +82,13 @@ void Java_cn_com_basic_face_util_RtspFaceNative_init(JNIEnv *env, jclass clazz) { g_logger.set_level(INFO); g_logger.set_level(VERBOSE); cpu_sched(); #ifdef USE_ST_SDK PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd); #endif for (size_t i = 0; i < CAMERA_COUNT; i++) { @@ -94,6 +100,9 @@ } g_TeleWrapper.start(); startRemoteServer(); LOG_ERROR << "@@@ this is test Server end" << LOG_ENDL; } void @@ -354,8 +363,8 @@ cameraWrapper.asvrConfig.outputOriginFrame = true; cameraWrapper.asvrConfig.outputRenderFrame = true; cameraWrapper.asvrConfig.scaleToWidth = 0; cameraWrapper.asvrConfig.scaleToHeight = 0; cameraWrapper.asvrConfig.directlyDisplay = true; cameraWrapper.asvrConfig.scaleToHeight = 0; cameraWrapper.asvrConfig.directlyDisplay = true; } { @@ -374,9 +383,9 @@ cameraWrapper.sftConfig.score_min = 0.4f; } cameraWrapper.fontPath = "/data/msyh.ttc"; cameraWrapper.fontPath = "/data/msyh.ttc"; bool ret = cameraWrapper.initPl(); bool ret = cameraWrapper.initPl(); if (ret) return (cameraWrapper.start() ? JNI_TRUE : JNI_FALSE); else @@ -594,34 +603,34 @@ bool Java_cn_com_basic_face_util_RtspFaceNative_teleCall(JNIEnv *env, jclass clazz, jstring phone) { std::string _phone; { const char *utfFunc = env->GetStringUTFChars(phone, NULL); _phone = utfFunc; env->ReleaseStringUTFChars(phone, utfFunc); } std::string _phone; { const char *utfFunc = env->GetStringUTFChars(phone, NULL); _phone = utfFunc; env->ReleaseStringUTFChars(phone, utfFunc); } const char *phoneNum = _phone.c_str(); TeleTask task; task.command= TeleTask::CALL; task.param = phoneNum; LOG_INFO << "Java_cn_com_basic_face_util_RtspFaceNative_telCall::jni" << LOG_ENDL; // g_TeleWrapper.pushTask(task); const char *phoneNum = _phone.c_str(); TeleTask task; task.command= TeleTask::CALL; task.param = phoneNum; LOG_INFO << "Java_cn_com_basic_face_util_RtspFaceNative_telCall::jni" << LOG_ENDL; // g_TeleWrapper.pushTask(task); } void Java_cn_com_basic_face_util_RtspFaceNative_teleHang(JNIEnv *env, jclass clazz) { TeleTask task; task.command =TeleTask::HANGUP; // g_TeleWrapper.pushTask(task); TeleTask task; task.command =TeleTask::HANGUP; // g_TeleWrapper.pushTask(task); } void Java_cn_com_basic_face_util_RtspFaceNative_teleShutdown(JNIEnv *env, jclass clazz) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL; g_TeleWrapper.stop(); LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL; g_TeleWrapper.stop(); } void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label) VisitFace/RtspNativeCodec/app/src/main/java/com/example/nativecodec/NativeCodec.java
@@ -95,13 +95,17 @@ RtspFaceNative.init(); for (int i=0 ;i < 32; i+=2){ RtspFaceNative.setFaceLabel(1, i, "中文名字"); RtspFaceNative.setFaceLabel(1, i+1, "abcd"); RtspFaceNative.setFaceLabel(2, i, "中文名字"); RtspFaceNative.setFaceLabel(2, i+1, "abcd"); // RtspFaceNative.setFaceLabel(1, i, "中文名字"); // RtspFaceNative.setFaceLabel(1, i+1, "abcd"); // RtspFaceNative.setFaceLabel(2, i, "中文名字"); // RtspFaceNative.setFaceLabel(2, i+1, "abcd"); } RtspFaceNative.setLocalIP("192.168.1.93"); //server RtspFaceNative.setLocalIP("192.168.1.94"); //client //RtspFaceNative.setLocalIP("192.168.1.97"); mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1); @@ -294,9 +298,10 @@ //mCreated = createStreamingMediaPlayer(getResources().getAssets(), mSourceString); //#todo ok //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin@192.168.1.188:554/cam/realmonitor?channel=1&subtype=2"); mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin12345@192.168.1.202:554/h264/ch1/main/av_stream"); //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin12345@192.168.1.202:554/h264/ch1/main/av_stream"); //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.22/h264"); //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.201:554/h264/ch1/main/av_stream"); mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.201:554/h264/ch1/main/av_stream"); //mCreated = RtspFaceNative.createPlayer(1, "rtsp://192.168.1.93:8554/"); //mCreated = createPlayer(1, "rtsp://192.168.1.56:8554"); //byte[] arr = new byte[5]; @@ -492,8 +497,8 @@ /** Load jni .so on initialization */ static { System.loadLibrary("rtspface_client"); //System.loadLibrary("rtspface_server"); // System.loadLibrary("rtspface_client"); System.loadLibrary("rtspface_server"); //System.loadLibrary("opencv_java3"); }