| | |
| | | #include "RtspNativeCodecJNI.h" |
| | | #include "CameraWrapper.h" |
| | | #include "CaptureCamera.h" |
| | | #include <logger.h> |
| | | #include <Logger/src/logger.hpp> |
| | | |
| | |
| | | |
| | | #include "DebugNetwork.h" |
| | | |
| | | #include <mediastreamer2/include/mediastreamer2/msjava.h> |
| | | //#include <mediastreamer2/include/mediastreamer2/msjava.h> |
| | | |
| | | std::stringstream logss; |
| | | //std::fstream logss("/storage/sdcard/log.log", std::fstream::out); |
| | | //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); |
| | | Logger g_logger(logss); |
| | | |
| | | static std::string g_stface_license_str; |
| | | |
| | | CameraWrapper g_CameraWrappers[CAMERA_COUNT]; |
| | | |
| | | CaptureCamera g_CaptureCamera; |
| | | |
| | | extern "C" |
| | | { |
| | |
| | | for (size_t i = 0; i < CAMERA_COUNT; i++) |
| | | { |
| | | g_CameraWrappers[i].cameraIdx = i + 1; |
| | | |
| | | g_CameraWrappers[i].pipeLine = new PipeLine; |
| | | PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); |
| | | |
| | | g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineRender = new PipeLine; |
| | | //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); |
| | | } |
| | | } |
| | | |
| | |
| | | env->ReleaseStringUTFChars(ipaddr, utf8); |
| | | } |
| | | |
| | | static jclass _jcOutputSurface = nullptr; |
| | | static jmethodID _jmOutputSurface_ctor = nullptr; |
| | | static jmethodID _jmOutputSurface_getSurface = nullptr; |
| | | static jmethodID _jmOutputSurface_readPixels = nullptr; |
| | | static jmethodID _jmOutputSurface_awaitNewImage = nullptr; |
| | | static jmethodID _jmOutputSurface_drawImage = nullptr; |
| | | static jmethodID _jmOutputSurface_makeCurrent = nullptr; |
| | | |
| | | static jobject _jobjOutputSurface = nullptr; |
| | | static jobject _jobjSurface = nullptr; |
| | | static JNIEnv *_env = nullptr; |
| | | //static jclass _jcOutputSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_ctor = nullptr; |
| | | //static jmethodID _jmOutputSurface_getSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_readPixels = nullptr; |
| | | //static jmethodID _jmOutputSurface_awaitNewImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_drawImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_makeCurrent = nullptr; |
| | | // |
| | | //static jobject _jobjOutputSurface = nullptr; |
| | | //static jobject _jobjSurface = nullptr; |
| | | //static JNIEnv *_env = nullptr; |
| | | |
| | | void readPixels(void* surface, uint8_t* buf, size_t& maxSize) |
| | | { |
| | |
| | | } |
| | | |
| | | // set the surface |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surfaceDecoder, jobject surfaceRender) |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surfaceRender) |
| | | { |
| | | LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setSurface" << LOG_ENDL; |
| | | assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); |
| | |
| | | ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); |
| | | cameraWrapper.windowDecode = NULL; |
| | | } |
| | | cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceDecoder); |
| | | //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceDecoder); |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceDecoder)); |
| | | cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | if (cameraWrapper.windowRender != nullptr) |
| | | { |
| | |
| | | cameraWrapper.windowRender = NULL; |
| | | } |
| | | cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surfaceRender); |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; // support reset |
| | | //cameraWrapper.windowRender = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | LOGP(DEBUG, "@@@ setsurface %p,%p", cameraWrapper.windowDecode, cameraWrapper.windowRender); |
| | |
| | | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | int w=0,h=0;//#todo |
| | | |
| | | { |
| | | const char *utf8Uri = env->GetStringUTFChars(uri, NULL); |
| | | cameraWrapper.rtspConfig.rtspURL = utf8Uri; |
| | | env->ReleaseStringUTFChars(uri, utf8Uri); |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | if(cameraWrapper.rtspConfig.rtspURL.find("/main/") != std::string::npos || cameraWrapper.rtspConfig.rtspURL.find("subtype=0") != std::string::npos)//#todo temp use |
| | | { |
| | | w=1920;h=1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264/") != std::string::npos && cameraWrapper.rtspConfig.rtspURL.find("/sub/") != std::string::npos) |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264_2") != std::string::npos) |
| | | { |
| | | w=800;h=600; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264") != std::string::npos) |
| | | { |
| | | w = 1920;h = 1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("subtype=2") != std::string::npos) |
| | | { |
| | | w=1280;h=720; |
| | | } |
| | | else |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not |
| | | cameraWrapper.rtspConfig.verbosityLevel = 1; |
| | | cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | } |
| | | |
| | | { |
| | | cameraWrapper.amcdConfig.ak_mime = "video/avc"; |
| | | cameraWrapper.amcdConfig.ak_width = 1920; |
| | | cameraWrapper.amcdConfig.ak_height = 1088; |
| | | cameraWrapper.amcdConfig.ak_width = w; |
| | | cameraWrapper.amcdConfig.ak_height = h; // nvida yuv420p,1088; Amlogic yuv420p,1080; RK3288 NV12 |
| | | //cameraWrapper.amcdConfig.windowSurfaceDecode = cameraWrapper.windowDecode; //cameraWrapper.windowDecode; |
| | | //cameraWrapper.amcdConfig.windowSurfaceRender = cameraWrapper.windowRender; //cameraWrapper.windowRender; |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; |
| | |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; |
| | | cameraWrapper.asvrConfig.outputOriginFrame = true; |
| | | cameraWrapper.asvrConfig.outputRenderFrame = true; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 640; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 480; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 0; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 0; |
| | | } |
| | | |
| | | { |
| | |
| | | cameraWrapper.sftConfig.draw_face_rect = false; |
| | | cameraWrapper.sftConfig.draw_face_feature_point = false; |
| | | cameraWrapper.sftConfig.generate_face_feature = true; |
| | | cameraWrapper.sftConfig.generate_face_point = false; |
| | | cameraWrapper.sftConfig.explode_feature_rect_x = 30; |
| | | cameraWrapper.sftConfig.explode_feature_rect_y = 40; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 50; |
| | | cameraWrapper.sftConfig.clamp_feature_rect = true; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 1; |
| | | //cameraWrapper.sftConfig.license_str = g_stface_license_str; |
| | | cameraWrapper.sftConfig.visionConeAngle = 35.0f; |
| | | cameraWrapper.sftConfig.score_min = 0.2f; |
| | | } |
| | | |
| | | bool ret = cameraWrapper.initPl(); |
| | |
| | | evpHeader->size = sizeof(EVPHeader) + sizeof(FDP_FaceDetectPB) + buffSize; |
| | | buffSize = evpHeader->size; |
| | | |
| | | FDP_FaceDetectPB* fdpFaceDetectPB = new (buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId); |
| | | FDP_FaceDetectPB* fdpFaceDetectPB = new (buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId, cameraIdx + 1); |
| | | |
| | | evpHeader->hton(); |
| | | fdpFaceDetectPB->hton(); |
| | |
| | | // dbgReadTexture(); |
| | | //} |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense(JNIEnv *env, jclass clazz, jstring lic) |
| | | { |
| | | LOG_WARN << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense" << LOG_ENDL; |
| | | |
| | | const char *utfLic = env->GetStringUTFChars(lic, NULL); |
| | | g_stface_license_str = utfLic; |
| | | env->ReleaseStringUTFChars(lic, utfLic); |
| | | |
| | | //LOG_ERROR << g_stface_license_str << LOG_ERROR; |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setCaptureSurface(JNIEnv *env, jclass clazz, jobject surfaceCapture) |
| | | { |
| | | if (g_CaptureCamera.windowRender != nullptr) |
| | | { |
| | | ANativeWindow_release((ANativeWindow*)(g_CaptureCamera.windowRender)); |
| | | g_CaptureCamera.windowRender = NULL; |
| | | } |
| | | g_CaptureCamera.windowRender = ANativeWindow_fromSurface(env, surfaceCapture); |
| | | } |
| | | |
| | | bool Java_cn_com_basic_face_util_RtspFaceNative_startCapturePlayer(JNIEnv *env, jclass clazz, jstring uri) |
| | | { |
| | | std::string _uri; |
| | | { |
| | | const char *utfFunc = env->GetStringUTFChars(uri, NULL); |
| | | _uri = utfFunc; |
| | | env->ReleaseStringUTFChars(uri, utfFunc); |
| | | } |
| | | |
| | | if (_uri.empty()) |
| | | return false; |
| | | if (g_CaptureCamera.windowRender == nullptr) |
| | | return false; |
| | | |
| | | g_CaptureCamera.cameraUri = _uri; |
| | | return g_CaptureCamera.start(); |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_stopCapturePlayer(JNIEnv *env, jclass clazz) |
| | | { |
| | | |
| | | } |
| | | |
| | | jobject Java_cn_com_basic_face_util_RtspFaceNative_getCaptureImages(JNIEnv* env, jclass clazz) |
| | | { |
| | | |
| | | } |
| | | |
| | | } |