| | |
| | | //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); |
| | | Logger g_logger(logss); |
| | | |
| | | static std::string g_stface_license_str; |
| | | |
| | | CameraWrapper g_CameraWrappers[CAMERA_COUNT]; |
| | | |
| | | extern "C" |
| | |
| | | for (size_t i = 0; i < CAMERA_COUNT; i++) |
| | | { |
| | | g_CameraWrappers[i].cameraIdx = i + 1; |
| | | |
| | | g_CameraWrappers[i].pipeLine = new PipeLine; |
| | | PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); |
| | | |
| | | g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineRender = new PipeLine; |
| | | //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); |
| | | } |
| | | } |
| | | |
| | |
| | | env->ReleaseStringUTFChars(ipaddr, utf8); |
| | | } |
| | | |
| | | static jclass _jcOutputSurface = nullptr; |
| | | static jmethodID _jmOutputSurface_ctor = nullptr; |
| | | static jmethodID _jmOutputSurface_getSurface = nullptr; |
| | | static jmethodID _jmOutputSurface_readPixels = nullptr; |
| | | static jmethodID _jmOutputSurface_awaitNewImage = nullptr; |
| | | static jmethodID _jmOutputSurface_drawImage = nullptr; |
| | | static jmethodID _jmOutputSurface_makeCurrent = nullptr; |
| | | |
| | | static jobject _jobjOutputSurface = nullptr; |
| | | static jobject _jobjSurface = nullptr; |
| | | static JNIEnv *_env = nullptr; |
| | | //static jclass _jcOutputSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_ctor = nullptr; |
| | | //static jmethodID _jmOutputSurface_getSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_readPixels = nullptr; |
| | | //static jmethodID _jmOutputSurface_awaitNewImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_drawImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_makeCurrent = nullptr; |
| | | // |
| | | //static jobject _jobjOutputSurface = nullptr; |
| | | //static jobject _jobjSurface = nullptr; |
| | | //static JNIEnv *_env = nullptr; |
| | | |
| | | void readPixels(void* surface, uint8_t* buf, size_t& maxSize) |
| | | { |
| | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | // obtain a native windowRender from a Java surface |
| | | //if (cameraWrapper.windowDecode != nullptr) |
| | | //{ |
| | | // ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); |
| | | // cameraWrapper.windowDecode = NULL; |
| | | //} |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceDecoder); |
| | | //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceDecoder); |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceDecoder)); |
| | | if (cameraWrapper.windowDecode != nullptr) |
| | | { |
| | | ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); |
| | | cameraWrapper.windowDecode = NULL; |
| | | } |
| | | cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | if (cameraWrapper.windowRender != nullptr) |
| | | { |
| | |
| | | cameraWrapper.windowRender = NULL; |
| | | } |
| | | cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surfaceRender); |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; // support reset |
| | | //cameraWrapper.windowRender = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | LOGP(DEBUG, "@@@ setsurface %p,%p", cameraWrapper.windowDecode, cameraWrapper.windowRender); |
| | |
| | | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | int w=0,h=0;//#todo |
| | | |
| | | { |
| | | const char *utf8Uri = env->GetStringUTFChars(uri, NULL); |
| | | cameraWrapper.rtspConfig.rtspURL = utf8Uri; |
| | | env->ReleaseStringUTFChars(uri, utf8Uri); |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | if(cameraWrapper.rtspConfig.rtspURL.find("/main/") != std::string::npos || cameraWrapper.rtspConfig.rtspURL.find("subtype=0") != std::string::npos)//#todo temp use |
| | | { |
| | | w=1920;h=1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264/") != std::string::npos && cameraWrapper.rtspConfig.rtspURL.find("/sub/") != std::string::npos) |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264_2") != std::string::npos) |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264") != std::string::npos) |
| | | { |
| | | w = 1920;h = 1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("subtype=2") != std::string::npos) |
| | | { |
| | | w=1280;h=720; |
| | | } |
| | | else |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not |
| | | cameraWrapper.rtspConfig.verbosityLevel = 1; |
| | | cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | } |
| | | |
| | | { |
| | | cameraWrapper.amcdConfig.ak_mime = "video/avc"; |
| | | cameraWrapper.amcdConfig.ak_width = 1920; |
| | | cameraWrapper.amcdConfig.ak_height = 1080; // nvida yuv420p,1088; Amlogic yuv420p,1080 |
| | | cameraWrapper.amcdConfig.ak_width = w; |
| | | cameraWrapper.amcdConfig.ak_height = h; // nvida yuv420p,1088; Amlogic yuv420p,1080; RK3288 NV12 |
| | | //cameraWrapper.amcdConfig.windowSurfaceDecode = cameraWrapper.windowDecode; //cameraWrapper.windowDecode; |
| | | //cameraWrapper.amcdConfig.windowSurfaceRender = cameraWrapper.windowRender; //cameraWrapper.windowRender; |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; |
| | |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; |
| | | cameraWrapper.asvrConfig.outputOriginFrame = true; |
| | | cameraWrapper.asvrConfig.outputRenderFrame = true; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 640; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 480; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 0; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 0; |
| | | } |
| | | |
| | | { |
| | |
| | | cameraWrapper.sftConfig.draw_face_rect = false; |
| | | cameraWrapper.sftConfig.draw_face_feature_point = false; |
| | | cameraWrapper.sftConfig.generate_face_feature = true; |
| | | cameraWrapper.sftConfig.generate_face_point = false; |
| | | cameraWrapper.sftConfig.explode_feature_rect_x = 30; |
| | | cameraWrapper.sftConfig.explode_feature_rect_y = 40; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 50; |
| | | cameraWrapper.sftConfig.clamp_feature_rect = true; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 1; |
| | | //cameraWrapper.sftConfig.license_str = g_stface_license_str; |
| | | cameraWrapper.sftConfig.visionConeAngle = 35.0f; |
| | | cameraWrapper.sftConfig.score_min = 0.2f; |
| | | } |
| | | |
| | | bool ret = cameraWrapper.initPl(); |
| | |
| | | // dbgReadTexture(); |
| | | //} |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense(JNIEnv *env, jclass clazz, jstring lic) |
| | | { |
| | | LOG_WARN << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense" << LOG_ENDL; |
| | | |
| | | const char *utfLic = env->GetStringUTFChars(lic, NULL); |
| | | g_stface_license_str = utfLic; |
| | | env->ReleaseStringUTFChars(lic, utfLic); |
| | | |
| | | //LOG_ERROR << g_stface_license_str << LOG_ERROR; |
| | | } |
| | | |
| | | } |