From 4b914a5d7e3d7971cb3e3ed49047fa331bd74da3 Mon Sep 17 00:00:00 2001 From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674> Date: 星期二, 13 六月 2017 17:19:34 +0800 Subject: [PATCH] milestone 1 --- VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp | 254 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 files changed, 230 insertions(+), 24 deletions(-) diff --git a/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp b/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp index 834affc..2607ae2 100644 --- a/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp +++ b/VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNI.cpp @@ -17,10 +17,14 @@ #include "DebugNetwork.h" +//#include <mediastreamer2/include/mediastreamer2/msjava.h> + std::stringstream logss; //std::fstream logss("/storage/sdcard/log.log", std::fstream::out); //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); Logger g_logger(logss); + +static std::string g_stface_license_str; CameraWrapper g_CameraWrappers[CAMERA_COUNT]; @@ -38,9 +42,11 @@ for (size_t i = 0; i < CAMERA_COUNT; i++) { g_CameraWrappers[i].cameraIdx = i + 1; - - g_CameraWrappers[i].pipeLine = new PipeLine; - PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); + + g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; + g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine; + g_CameraWrappers[i].pipeLineRender = new PipeLine; + //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); } } @@ -58,8 +64,130 @@ env->ReleaseStringUTFChars(ipaddr, utf8); } +//static jclass _jcOutputSurface = nullptr; +//static jmethodID _jmOutputSurface_ctor = nullptr; +//static jmethodID _jmOutputSurface_getSurface = nullptr; +//static jmethodID _jmOutputSurface_readPixels = nullptr; +//static jmethodID _jmOutputSurface_awaitNewImage = nullptr; +//static jmethodID _jmOutputSurface_drawImage = nullptr; +//static jmethodID _jmOutputSurface_makeCurrent = nullptr; +// +//static jobject _jobjOutputSurface = nullptr; +//static jobject _jobjSurface = nullptr; +//static JNIEnv *_env = nullptr; + +void readPixels(void* surface, uint8_t* buf, size_t& maxSize) +{ + //ANativeWindow* window = (ANativeWindow*)surface; + + CameraWrapper& cameraWrapper(g_CameraWrappers[0]); + + int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6); + if (getEnvStat == JNI_EDETACHED) + { + //LOG_WARN << "GetEnv: not attached" << std::endl; + if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0) + LOG_WARN << "Failed to attach" << LOG_ENDL; + else + getEnvStat = JNI_OK; + } + else if (getEnvStat == JNI_OK) + { + } + else if (getEnvStat == JNI_EVERSION) + LOG_WARN << "GetEnv: version not supported" << LOG_ENDL; + else if (getEnvStat == JNI_ERR) + LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL; + + if (maxSize==0) + { + //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_makeCurrent); + } + else + { + //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_awaitNewImage); + //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_drawImage); +// + //jbyteArray jbarrBuffer = (jbyteArray)cameraWrapper.javaEnv->CallObjectMethod(_jobjOutputSurface, _jmOutputSurface_readPixels); + //if (jbarrBuffer != nullptr) + // cameraWrapper.javaEnv->GetByteArrayRegion(jbarrBuffer, 0, 640 * 480 * 2, (jbyte*)buf); + } + + if (cameraWrapper.javaEnv->ExceptionCheck()) + cameraWrapper.javaEnv->ExceptionDescribe(); + + cameraWrapper.javaVM->DetachCurrentThread(); +} + + +/* +static void* createDecodeSurface() +{ + CameraWrapper& cameraWrapper(g_CameraWrappers[0]); + + //OutputSurface outputSurface = null; + //outputSurface = new OutputSurface(mWidth, mHeight); + //outputSurface.getSurface(); + //outputSurface.release(); + + + //int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6); + //if (getEnvStat == JNI_EDETACHED) + //{ + // //LOG_WARN << "GetEnv: not attached" << std::endl; + // if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0) + // LOG_WARN << "Failed to attach" << LOG_ENDL; + // else + // getEnvStat = JNI_OK; + //} + //else if (getEnvStat == JNI_OK) + //{ + //} + //else if (getEnvStat == JNI_EVERSION) + // LOG_WARN << "GetEnv: version not supported" << LOG_ENDL; + //else if (getEnvStat == JNI_ERR) + // LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL; + + //void* ret = nullptr; + //{ + // jclass jcOutputSurface = _jcOutputSurface = cameraWrapper.javaEnv->FindClass("com/example/nativecodec/OutputSurface"); + // _jcOutputSurface = static_cast<jclass>( cameraWrapper.javaEnv->NewGlobalRef( jcOutputSurface )); +// + // jmethodID jmOutputSurface_ctor = _jmOutputSurface_ctor = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "<init>", "(II)V"); + // jmethodID jmOutputSurface_getSurface = _jmOutputSurface_getSurface = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "getSurface", "()Landroid/view/Surface;"); + // jmethodID jmOutputSurface_readPixels = _jmOutputSurface_readPixels = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "readPixels", "()[B"); +// + // jmethodID jmOutputSurface_awaitNewImage = _jmOutputSurface_awaitNewImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "awaitNewImage", "()V"); + // jmethodID jmOutputSurface_drawImage = _jmOutputSurface_drawImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "drawImage", "()V"); + // jmethodID jmOutputSurface_makeCurrent = _jmOutputSurface_makeCurrent = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "makeCurrent", "()V"); +// + // jobject jobjOutputSurface = _jobjOutputSurface = cameraWrapper.javaEnv->NewObject(jcOutputSurface , jmOutputSurface_ctor, 1920, 1088); + // _jobjOutputSurface = static_cast<jobject>( cameraWrapper.javaEnv->NewGlobalRef( jobjOutputSurface )); +// + // jobject jobjSurface = _jobjSurface = cameraWrapper.javaEnv->CallObjectMethod(jobjOutputSurface, jmOutputSurface_getSurface); + // ret = ANativeWindow_fromSurface(cameraWrapper.javaEnv, jobjSurface); + // cameraWrapper.amcdConfig.readPixels_callback = readPixels; + //} + + //if (cameraWrapper.javaEnv->ExceptionCheck()) + // cameraWrapper.javaEnv->ExceptionDescribe(); +// + //cameraWrapper.javaVM->DetachCurrentThread(); + + //return ret; +} +*/ + +static void* getSurfaceHolderGetSurface(JNIEnv* env, jobject jobjSurfaceHolder) +{ + jclass jcSurfaceHolder = env->FindClass("android/view/SurfaceHolder"); + jmethodID jmSurfaceHolder_getSurface = env->GetMethodID(jcSurfaceHolder, "getSurface", "()Landroid/view/Surface;"); + jobject surface = env->CallObjectMethod(jobjSurfaceHolder, jmSurfaceHolder_getSurface); + return surface; +} + // set the surface -void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surface) +void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surfaceRender) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setSurface" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); @@ -67,14 +195,26 @@ CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); - // obtain a native window from a Java surface - if (cameraWrapper.window) - { - ANativeWindow_release((ANativeWindow*)(cameraWrapper.window)); - cameraWrapper.window = NULL; + // obtain a native windowRender from a Java surface + if (cameraWrapper.windowDecode != nullptr) + { + ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); + cameraWrapper.windowDecode = NULL; } - cameraWrapper.window = ANativeWindow_fromSurface(env, surface); - LOGP(DEBUG, "@@@ setsurface %p", cameraWrapper.window); + cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceRender); + //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceRender); + //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); + + if (cameraWrapper.windowRender != nullptr) + { + ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowRender)); + cameraWrapper.windowRender = NULL; + } + cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surfaceRender); + cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; // support reset + //cameraWrapper.windowRender = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); + + LOGP(DEBUG, "@@@ setsurface %p,%p", cameraWrapper.windowDecode, cameraWrapper.windowRender); } jboolean Java_cn_com_basic_face_util_RtspFaceNative_createPlayer(JNIEnv* env, jclass clazz, jint cameraIdx, jstring uri) @@ -85,36 +225,69 @@ CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); + int w=0,h=0;//#todo + { const char *utf8Uri = env->GetStringUTFChars(uri, NULL); cameraWrapper.rtspConfig.rtspURL = utf8Uri; env->ReleaseStringUTFChars(uri, utf8Uri); - cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; + if(cameraWrapper.rtspConfig.rtspURL.find("/main/") != std::string::npos || cameraWrapper.rtspConfig.rtspURL.find("subtype=0") != std::string::npos)//#todo temp use + { + w=1920;h=1080; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264/") != std::string::npos && cameraWrapper.rtspConfig.rtspURL.find("/sub/") != std::string::npos) + { + w=640;h=480; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264_2") != std::string::npos) + { + w=640;h=480; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("/h264") != std::string::npos) + { + w = 1920;h = 1080; + } + else if (cameraWrapper.rtspConfig.rtspURL.find("subtype=2") != std::string::npos) + { + w=1280;h=720; + } + else + { + w=640;h=480; + } + + cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not cameraWrapper.rtspConfig.verbosityLevel = 1; cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; - cameraWrapper.rtspConfig.args = nullptr; + cameraWrapper.rtspConfig.args = nullptr; } { cameraWrapper.amcdConfig.ak_mime = "video/avc"; - cameraWrapper.amcdConfig.ak_width = 640; - cameraWrapper.amcdConfig.ak_height = 480; - cameraWrapper.amcdConfig.windowSurface = nullptr; // cameraWrapper.window - cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; + cameraWrapper.amcdConfig.ak_width = w; + cameraWrapper.amcdConfig.ak_height = h; // nvida yuv420p,1088; Amlogic yuv420p,1080; RK3288 NV12 + //cameraWrapper.amcdConfig.windowSurfaceDecode = cameraWrapper.windowDecode; //cameraWrapper.windowDecode; + //cameraWrapper.amcdConfig.windowSurfaceRender = cameraWrapper.windowRender; //cameraWrapper.windowRender; + cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; + cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = false; cameraWrapper.amcdConfig.generateDecodedDataPerFrame = 1; - cameraWrapper.amcdConfig.renderFromOutputBuffIdx = false;//true - cameraWrapper.amcdConfig.renderFromOutputBuff = false; + + //cameraWrapper.amcdConfig.createSurface_callback = createDecodeSurface; + //cameraWrapper.amcdConfig.readPixels_callback = readPixels; + #ifndef USE_ST_SDK - cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = true; +cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = true;//#todo #endif } { - cameraWrapper.asvrConfig.windowSurface = cameraWrapper.window; + cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; cameraWrapper.asvrConfig.outputOriginFrame = true; cameraWrapper.asvrConfig.outputRenderFrame = true; + cameraWrapper.asvrConfig.scaleToWidth = 0; + cameraWrapper.asvrConfig.scaleToHeight = 0; } { @@ -122,8 +295,15 @@ cameraWrapper.sftConfig.detect_face_cnt_limit = MAX_FACE; cameraWrapper.sftConfig.draw_face_rect = false; cameraWrapper.sftConfig.draw_face_feature_point = false; - cameraWrapper.sftConfig.generate_face_feature = true; - cameraWrapper.sftConfig.doTrackPerFrame = 50; + cameraWrapper.sftConfig.generate_face_feature = true; + cameraWrapper.sftConfig.generate_face_point = false; + cameraWrapper.sftConfig.explode_feature_rect_x = 30; + cameraWrapper.sftConfig.explode_feature_rect_y = 40; + cameraWrapper.sftConfig.clamp_feature_rect = true; + cameraWrapper.sftConfig.doTrackPerFrame = 1; + //cameraWrapper.sftConfig.license_str = g_stface_license_str; + cameraWrapper.sftConfig.visionConeAngle = 35.0f; + cameraWrapper.sftConfig.score_min = 0.2f; } bool ret = cameraWrapper.initPl(); @@ -144,6 +324,8 @@ cameraWrapper.stop(); } + +//#todo remove //see: https://github.com/BelledonneCommunications/mediastreamer2/blob/master/src/android/android_mediacodec.cpp static int handle_java_exception(JNIEnv *env) { if (env->ExceptionCheck()) { @@ -153,7 +335,7 @@ } return 0; } - +//#todo remove static bool _loadClass(JNIEnv *env, const char *className, jclass *_class) { *_class = env->FindClass(className); if(handle_java_exception(env) == -1 || *_class == NULL) { @@ -163,6 +345,7 @@ return true; } +//#todo remove static bool _getStaticMethodID(JNIEnv *env, jclass _class, const char *name, const char *sig, jmethodID *method) { *method = env->GetStaticMethodID(_class, name, sig); if(handle_java_exception(env) == -1 || *method == NULL) { @@ -184,6 +367,8 @@ // set for multithread callback env->GetJavaVM(&(cameraWrapper.javaVM)); cameraWrapper.javaEnv = env; + + //ms_set_jvm(cameraWrapper.javaVM); std::string _className; { @@ -443,4 +628,25 @@ #endif } +jstring Java_cn_com_basic_face_util_RtspFaceNative_dbgGetLog(JNIEnv *env, jclass clazz) +{ + return env->NewStringUTF(g_dbgLog.c_str()); +} + +//void Java_cn_com_basic_face_util_RtspFaceNative_dbgReadTexture(JNIEnv* env, jclass clazz) +//{ +// dbgReadTexture(); +//} + +void Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense(JNIEnv *env, jclass clazz, jstring lic) +{ + LOG_WARN << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense" << LOG_ENDL; + + const char *utfLic = env->GetStringUTFChars(lic, NULL); + g_stface_license_str = utfLic; + env->ReleaseStringUTFChars(lic, utfLic); + + //LOG_ERROR << g_stface_license_str << LOG_ERROR; +} + } -- Gitblit v1.8.0