#include "RtspNativeCodecJNI.h" #include "CameraWrapper.h" #include #include //#include "looper.h" #include #include #include #include std::stringstream logss; //std::fstream logss("/storage/sdcard/log.log", std::fstream::out); //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); Logger g_logger(logss); CameraWrapper g_CameraWrappers[CAMERA_COUNT]; extern "C" { void Java_cn_com_basic_face_util_RtspFaceNative_init(JNIEnv *env, jclass clazz) { g_logger.set_level(VERBOSE); PipeLine::register_global_elem_creator("PL_RTSPClient", create_PL_RTSPClient); PipeLine::register_global_elem_creator("PL_AndroidMediaCodecDecoder", create_PL_AndroidMediaCodecDecoder); #ifdef USE_ST_SDK PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack); #endif for (size_t i = 0; i < CAMERA_COUNT; i++) { g_CameraWrappers[i].cameraIdx = i + 1; g_CameraWrappers[i].pipeLine = new PipeLine; PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); } } // set the surface void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surface) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setSurface" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); // obtain a native window from a Java surface if (cameraWrapper.window) { ANativeWindow_release((ANativeWindow*)(cameraWrapper.window)); cameraWrapper.window = NULL; } cameraWrapper.window = ANativeWindow_fromSurface(env, surface); LOGP(DEBUG, "@@@ setsurface %p", cameraWrapper.window); } jboolean Java_cn_com_basic_face_util_RtspFaceNative_createPlayer(JNIEnv* env, jclass clazz, jint cameraIdx, jstring uri) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_createPlayer" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); { const char *utf8Uri = env->GetStringUTFChars(uri, NULL); cameraWrapper.rtspConfig.rtspURL = utf8Uri; env->ReleaseStringUTFChars(uri, utf8Uri); cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not cameraWrapper.rtspConfig.verbosityLevel = 1; cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; cameraWrapper.rtspConfig.args = nullptr; } { cameraWrapper.amcdConfig.ak_mime = "video/avc"; cameraWrapper.amcdConfig.ak_width = 1920; cameraWrapper.amcdConfig.ak_height = 1080; cameraWrapper.amcdConfig.windowSurface = cameraWrapper.window = nullptr;//#todo cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; cameraWrapper.amcdConfig.generateDecodedDataPerFrame = 1; #ifndef USE_ST_SDK cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = true; #endif } { cameraWrapper.sftConfig.point_size = 21; cameraWrapper.sftConfig.detect_face_cnt_limit = MAX_FACE; cameraWrapper.sftConfig.draw_face_rect = false; cameraWrapper.sftConfig.draw_face_feature_point = false; cameraWrapper.sftConfig.generate_face_feature = true; } bool ret = cameraWrapper.initPl(); if (ret) return (cameraWrapper.start() ? JNI_TRUE : JNI_FALSE); else return JNI_FALSE; } // shut down the native media system void Java_cn_com_basic_face_util_RtspFaceNative_shutdown(JNIEnv* env, jclass clazz, jint cameraIdx) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_shutdown" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); cameraWrapper.stop(); } //see: https://github.com/BelledonneCommunications/mediastreamer2/blob/master/src/android/android_mediacodec.cpp static int handle_java_exception(JNIEnv *env) { if (env->ExceptionCheck()) { env->ExceptionDescribe(); env->ExceptionClear(); return -1; } return 0; } static bool _loadClass(JNIEnv *env, const char *className, jclass *_class) { *_class = env->FindClass(className); if(handle_java_exception(env) == -1 || *_class == NULL) { LOGP(ERROR, "Could not load Java class [%s]", className); return false; } return true; } static bool _getStaticMethodID(JNIEnv *env, jclass _class, const char *name, const char *sig, jmethodID *method) { *method = env->GetStaticMethodID(_class, name, sig); if(handle_java_exception(env) == -1 || *method == NULL) { LOGP(ERROR, "Could not get static method %s[%s]", name, sig); return false; } return true; } jboolean Java_cn_com_basic_face_util_RtspFaceNative_setFaceCallback(JNIEnv* env, jclass clazz, jint cameraIdx, jstring className, jstring funcName) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setFaceCallback" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); //jclass cls = env->GetObjectClass(clazz); // set for multithread callback env->GetJavaVM(&(cameraWrapper.javaVM)); cameraWrapper.javaEnv = env; std::string _className; { const char *utfFunc = env->GetStringUTFChars(className, NULL); _className = utfFunc; env->ReleaseStringUTFChars(className, utfFunc); } std::string _funcName; { const char *utfFunc = env->GetStringUTFChars(funcName, NULL); _funcName = utfFunc; env->ReleaseStringUTFChars(funcName, utfFunc); } _loadClass(env, _className.c_str(), &(cameraWrapper.faceCallbackClazz)); cameraWrapper.faceCallbackClazz = static_cast( env->NewGlobalRef( cameraWrapper.faceCallbackClazz )); //#todo need release? _getStaticMethodID(env, cameraWrapper.faceCallbackClazz, _funcName.c_str(), "(II)V", &(cameraWrapper.faceCallbackFunc)); // call test (sync) cameraWrapper.javaEnv->CallStaticVoidMethod(cameraWrapper.faceCallbackClazz, cameraWrapper.faceCallbackFunc, cameraWrapper.cameraIdx, 0); } void Java_cn_com_basic_face_util_RtspFaceNative_lockFace(JNIEnv* env, jclass clazz, jint cameraIdx) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_lockFace" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); cameraWrapper.lockFace(); } void Java_cn_com_basic_face_util_RtspFaceNative_releaseFace(JNIEnv* env, jclass clazz, jint cameraIdx) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_releaseFace" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); cameraWrapper.releaseFace(); } jint Java_cn_com_basic_face_util_RtspFaceNative_getFaceList(JNIEnv* env, jclass clazz, jint cameraIdx, jbyteArray faceListPb) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_getFaceList" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); // Y channel of YUV420p, packed in protobuf uint8_t buffer[MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT]; // #todo optimize not copy data here, set data to jbyteArray directly size_t buffSize = sizeof(buffer); const size_t arrSize = env->GetArrayLength(faceListPb); bool ret = false; #ifdef USE_ST_SDK ret = cameraWrapper.faceCache.getFaceListPb(buffer, buffSize); #endif if (!ret) { LOG_INFO << "No face captured" << LOG_ENDL; return 0; } else { LOG_INFO << "Face captured " << LOG_ENDL; buffSize = std::min(buffSize, arrSize); env->SetByteArrayRegion(faceListPb, 0, buffSize, (const jbyte*)buffer); return buffSize; } } jint Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages(JNIEnv* env, jclass clazz, jint cameraIdx, jintArray faceImagesIdx, jbyteArray faceImages) { LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_getFaceImages" << LOG_ENDL; assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); cameraIdx -= 1; CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); size_t count = 0; int _faceImagesIdx[MAX_FACE] = {-1}; uint8_t _faceImages[MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT]; size_t _faceImagesSize = sizeof(_faceImages); bool ret = false; const size_t faceImagesIdxArrSize = env->GetArrayLength(faceImagesIdx); // count of int const size_t faceImagesArrSize = env->GetArrayLength(faceImages); #ifdef USE_ST_SDK ret = cameraWrapper.faceCache.getFaceListImage(_faceImagesIdx, count, _faceImages, _faceImagesSize); #endif if (!ret && count > 0) { LOG_INFO << "No face image captured" << LOG_ENDL; return 0; } int _faceImagesIdxCount = std::min(count, faceImagesIdxArrSize); env->SetIntArrayRegion(faceImagesIdx, 0, _faceImagesIdxCount, (const jint*)_faceImagesIdx); _faceImagesSize = std::min(_faceImagesSize, faceImagesArrSize); env->SetByteArrayRegion(faceImages, 0, _faceImagesSize, (const jbyte*)_faceImages); return count; } }