From 4b914a5d7e3d7971cb3e3ed49047fa331bd74da3 Mon Sep 17 00:00:00 2001 From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674> Date: 星期二, 13 六月 2017 17:19:34 +0800 Subject: [PATCH] milestone 1 --- VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp | 106 ++++++++++++++++++++++++++++++++++++++-------------- 1 files changed, 77 insertions(+), 29 deletions(-) diff --git a/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp b/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp index fa09ee6..2607ae2 100644 --- a/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp +++ b/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp @@ -24,6 +24,8 @@ //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); Logger g_logger(logss); +static std::string g_stface_license_str; + CameraWrapper g_CameraWrappers[CAMERA_COUNT]; extern "C" @@ -40,9 +42,11 @@ for (size_t i = 0; i < CAMERA_COUNT; i++) { g_CameraWrappers[i].cameraIdx = i + 1; - - g_CameraWrappers[i].pipeLine = new PipeLine; - PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); + + g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; + g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine; + g_CameraWrappers[i].pipeLineRender = new PipeLine; + //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); } } @@ -60,17 +64,17 @@ env->ReleaseStringUTFChars(ipaddr, utf8); } -static jclass _jcOutputSurface = nullptr; -static jmethodID _jmOutputSurface_ctor = nullptr; -static jmethodID _jmOutputSurface_getSurface = nullptr; -static jmethodID _jmOutputSurface_readPixels = nullptr; -static jmethodID _jmOutputSurface_awaitNewImage = nullptr; -static jmethodID _jmOutputSurface_drawImage = nullptr; -static jmethodID _jmOutputSurface_makeCurrent = nullptr; - -static jobject _jobjOutputSurface = nullptr; -static jobject _jobjSurface = nullptr; -static JNIEnv *_env = nullptr; +//static jclass _jcOutputSurface = nullptr; +//static jmethodID _jmOutputSurface_ctor = nullptr; +//static jmethodID _jmOutputSurface_getSurface = nullptr; +//static jmethodID _jmOutputSurface_readPixels = nullptr; +//static jmethodID _jmOutputSurface_awaitNewImage = nullptr; +//static jmethodID _jmOutputSurface_drawImage = nullptr; +//static jmethodID _jmOutputSurface_makeCurrent = nullptr; +// +//static jobject _jobjOutputSurface = nullptr; +//static jobject _jobjSurface = nullptr; +//static JNIEnv *_env = nullptr; void readPixels(void* surface, uint8_t* buf, size_t& maxSize) { @@ -192,14 +196,14 @@ CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); // obtain a native windowRender from a Java surface - //if (cameraWrapper.windowDecode != nullptr) - //{ - // ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); - // cameraWrapper.windowDecode = NULL; - //} - //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceDecoder); - //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceDecoder); - //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceDecoder)); + if (cameraWrapper.windowDecode != nullptr) + { + ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); + cameraWrapper.windowDecode = NULL; + } + cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceRender); + //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceRender); + //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); if (cameraWrapper.windowRender != nullptr) { @@ -207,6 +211,7 @@ cameraWrapper.windowRender = NULL; } cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surfaceRender); + cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; // support reset //cameraWrapper.windowRender = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); LOGP(DEBUG, "@@@ setsurface %p,%p", cameraWrapper.windowDecode, cameraWrapper.windowRender); @@ -220,22 +225,49 @@ CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); + int w=0,h=0;//#todo + { const char *utf8Uri = env->GetStringUTFChars(uri, NULL); cameraWrapper.rtspConfig.rtspURL = utf8Uri; env->ReleaseStringUTFChars(uri, utf8Uri); - cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; + if(cameraWrapper.rtspConfig.rtspURL.find("/main/") != std::string::npos || cameraWrapper.rtspConfig.rtspURL.find("subtype=0") != std::string::npos)//#todo temp use + { + w=1920;h=1080; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264/") != std::string::npos && cameraWrapper.rtspConfig.rtspURL.find("/sub/") != std::string::npos) + { + w=640;h=480; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264_2") != std::string::npos) + { + w=640;h=480; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264") != std::string::npos) + { + w = 1920;h = 1080; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("subtype=2") != std::string::npos) + { + w=1280;h=720; + } + else + { + w=640;h=480; + } + + cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not cameraWrapper.rtspConfig.verbosityLevel = 1; cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; - cameraWrapper.rtspConfig.args = nullptr; + cameraWrapper.rtspConfig.args = nullptr; } { cameraWrapper.amcdConfig.ak_mime = "video/avc"; - cameraWrapper.amcdConfig.ak_width = 1920; - cameraWrapper.amcdConfig.ak_height = 1080; // nvida yuv420p,1088; Amlogic yuv420p,1080 + cameraWrapper.amcdConfig.ak_width = w; + cameraWrapper.amcdConfig.ak_height = h; // nvida yuv420p,1088; Amlogic yuv420p,1080; RK3288 NV12 //cameraWrapper.amcdConfig.windowSurfaceDecode = cameraWrapper.windowDecode; //cameraWrapper.windowDecode; //cameraWrapper.amcdConfig.windowSurfaceRender = cameraWrapper.windowRender; //cameraWrapper.windowRender; cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; @@ -254,8 +286,8 @@ cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; cameraWrapper.asvrConfig.outputOriginFrame = true; cameraWrapper.asvrConfig.outputRenderFrame = true; - cameraWrapper.asvrConfig.scaleToWidth = 640; - cameraWrapper.asvrConfig.scaleToHeight = 480; + cameraWrapper.asvrConfig.scaleToWidth = 0; + cameraWrapper.asvrConfig.scaleToHeight = 0; } { @@ -264,9 +296,14 @@ cameraWrapper.sftConfig.draw_face_rect = false; cameraWrapper.sftConfig.draw_face_feature_point = false; cameraWrapper.sftConfig.generate_face_feature = true; + cameraWrapper.sftConfig.generate_face_point = false; cameraWrapper.sftConfig.explode_feature_rect_x = 30; cameraWrapper.sftConfig.explode_feature_rect_y = 40; - cameraWrapper.sftConfig.doTrackPerFrame = 50; + cameraWrapper.sftConfig.clamp_feature_rect = true; + cameraWrapper.sftConfig.doTrackPerFrame = 1; + //cameraWrapper.sftConfig.license_str = g_stface_license_str; + cameraWrapper.sftConfig.visionConeAngle = 35.0f; + cameraWrapper.sftConfig.score_min = 0.2f; } bool ret = cameraWrapper.initPl(); @@ -601,4 +638,15 @@ // dbgReadTexture(); //} +void Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense(JNIEnv *env, jclass clazz, jstring lic) +{ + LOG_WARN << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense" << LOG_ENDL; + + const char *utfLic = env->GetStringUTFChars(lic, NULL); + g_stface_license_str = utfLic; + env->ReleaseStringUTFChars(lic, utfLic); + + //LOG_ERROR << g_stface_license_str << LOG_ERROR; +} + } -- Gitblit v1.8.0