| | |
| | | #include "RtspNativeCodecJNI.h" |
| | | #include "CameraWrapper.h" |
| | | #include "CaptureCamera.h" |
| | | #include <logger.h> |
| | | #include <Logger/src/logger.hpp> |
| | | |
| | |
| | | #include <stdlib.h> |
| | | |
| | | #include "DebugNetwork.h" |
| | | #include "serial.h" |
| | | //#include <mediastreamer2/include/mediastreamer2/msjava.h> |
| | | |
| | | std::stringstream logss; |
| | | //std::fstream logss("/storage/sdcard/log.log", std::fstream::out); |
| | | //std::fstream logss("/storage/emulated/0/log.log", std::fstream::out); |
| | | Logger g_logger(logss); |
| | | |
| | | static std::string g_stface_license_str; |
| | | |
| | | CameraWrapper g_CameraWrappers[CAMERA_COUNT]; |
| | | |
| | | CaptureCamera g_CaptureCamera; |
| | | |
| | | extern "C" |
| | | { |
| | |
| | | for (size_t i = 0; i < CAMERA_COUNT; i++) |
| | | { |
| | | g_CameraWrappers[i].cameraIdx = i + 1; |
| | | |
| | | g_CameraWrappers[i].pipeLine = new PipeLine; |
| | | PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLine)); |
| | | |
| | | g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine; |
| | | g_CameraWrappers[i].pipeLineRender = new PipeLine; |
| | | //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); |
| | | } |
| | | } |
| | | |
| | |
| | | env->ReleaseStringUTFChars(ipaddr, utf8); |
| | | } |
| | | |
| | | void createHiddenSurface(JNIEnv *env, int cameraIdx) |
| | | //static jclass _jcOutputSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_ctor = nullptr; |
| | | //static jmethodID _jmOutputSurface_getSurface = nullptr; |
| | | //static jmethodID _jmOutputSurface_readPixels = nullptr; |
| | | //static jmethodID _jmOutputSurface_awaitNewImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_drawImage = nullptr; |
| | | //static jmethodID _jmOutputSurface_makeCurrent = nullptr; |
| | | // |
| | | //static jobject _jobjOutputSurface = nullptr; |
| | | //static jobject _jobjSurface = nullptr; |
| | | //static JNIEnv *_env = nullptr; |
| | | |
| | | void readPixels(void* surface, uint8_t* buf, size_t& maxSize) |
| | | { |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | //ANativeWindow* window = (ANativeWindow*)surface; |
| | | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[0]); |
| | | |
| | | int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6); |
| | | if (getEnvStat == JNI_EDETACHED) |
| | | { |
| | | //LOG_WARN << "GetEnv: not attached" << std::endl; |
| | | if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0) |
| | | LOG_WARN << "Failed to attach" << LOG_ENDL; |
| | | else |
| | | getEnvStat = JNI_OK; |
| | | } |
| | | else if (getEnvStat == JNI_OK) |
| | | { |
| | | } |
| | | else if (getEnvStat == JNI_EVERSION) |
| | | LOG_WARN << "GetEnv: version not supported" << LOG_ENDL; |
| | | else if (getEnvStat == JNI_ERR) |
| | | LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL; |
| | | |
| | | if (maxSize==0) |
| | | { |
| | | //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_makeCurrent); |
| | | } |
| | | else |
| | | { |
| | | //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_awaitNewImage); |
| | | //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_drawImage); |
| | | // |
| | | //jbyteArray jbarrBuffer = (jbyteArray)cameraWrapper.javaEnv->CallObjectMethod(_jobjOutputSurface, _jmOutputSurface_readPixels); |
| | | //if (jbarrBuffer != nullptr) |
| | | // cameraWrapper.javaEnv->GetByteArrayRegion(jbarrBuffer, 0, 640 * 480 * 2, (jbyte*)buf); |
| | | } |
| | | |
| | | if (cameraWrapper.javaEnv->ExceptionCheck()) |
| | | cameraWrapper.javaEnv->ExceptionDescribe(); |
| | | |
| | | cameraWrapper.javaVM->DetachCurrentThread(); |
| | | } |
| | | |
| | | |
| | | /* |
| | | static void* createDecodeSurface() |
| | | { |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[0]); |
| | | |
| | | //OutputSurface outputSurface = null; |
| | | //outputSurface = new OutputSurface(mWidth, mHeight); |
| | | //outputSurface.getSurface(); |
| | | //outputSurface.release(); |
| | | |
| | | jclass jcOutputSurface = env->FindClass("com/example/nativecodec/OutputSurface"); |
| | | jmethodID jmOutputSurface_ctor = env->GetMethodID(jcOutputSurface, "<init>", "(II)V"); |
| | | jmethodID jmOutputSurface_getSurface = env->GetMethodID(jcOutputSurface, "getSurface", "()Landroid/view/Surface;"); |
| | | |
| | | jobject jobjOutputSurface = env->NewObject(jcOutputSurface , jmOutputSurface_ctor, 1920, 1088); |
| | | jobject jobjSurface = env->CallObjectMethod(jobjOutputSurface, jmOutputSurface_getSurface); |
| | | cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, jobjSurface); |
| | | //int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6); |
| | | //if (getEnvStat == JNI_EDETACHED) |
| | | //{ |
| | | // //LOG_WARN << "GetEnv: not attached" << std::endl; |
| | | // if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0) |
| | | // LOG_WARN << "Failed to attach" << LOG_ENDL; |
| | | // else |
| | | // getEnvStat = JNI_OK; |
| | | //} |
| | | //else if (getEnvStat == JNI_OK) |
| | | //{ |
| | | //} |
| | | //else if (getEnvStat == JNI_EVERSION) |
| | | // LOG_WARN << "GetEnv: version not supported" << LOG_ENDL; |
| | | //else if (getEnvStat == JNI_ERR) |
| | | // LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL; |
| | | |
| | | //void* ret = nullptr; |
| | | //{ |
| | | // jclass jcOutputSurface = _jcOutputSurface = cameraWrapper.javaEnv->FindClass("com/example/nativecodec/OutputSurface"); |
| | | // _jcOutputSurface = static_cast<jclass>( cameraWrapper.javaEnv->NewGlobalRef( jcOutputSurface )); |
| | | // |
| | | // jmethodID jmOutputSurface_ctor = _jmOutputSurface_ctor = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "<init>", "(II)V"); |
| | | // jmethodID jmOutputSurface_getSurface = _jmOutputSurface_getSurface = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "getSurface", "()Landroid/view/Surface;"); |
| | | // jmethodID jmOutputSurface_readPixels = _jmOutputSurface_readPixels = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "readPixels", "()[B"); |
| | | // |
| | | // jmethodID jmOutputSurface_awaitNewImage = _jmOutputSurface_awaitNewImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "awaitNewImage", "()V"); |
| | | // jmethodID jmOutputSurface_drawImage = _jmOutputSurface_drawImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "drawImage", "()V"); |
| | | // jmethodID jmOutputSurface_makeCurrent = _jmOutputSurface_makeCurrent = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "makeCurrent", "()V"); |
| | | // |
| | | // jobject jobjOutputSurface = _jobjOutputSurface = cameraWrapper.javaEnv->NewObject(jcOutputSurface , jmOutputSurface_ctor, 1920, 1088); |
| | | // _jobjOutputSurface = static_cast<jobject>( cameraWrapper.javaEnv->NewGlobalRef( jobjOutputSurface )); |
| | | // |
| | | // jobject jobjSurface = _jobjSurface = cameraWrapper.javaEnv->CallObjectMethod(jobjOutputSurface, jmOutputSurface_getSurface); |
| | | // ret = ANativeWindow_fromSurface(cameraWrapper.javaEnv, jobjSurface); |
| | | // cameraWrapper.amcdConfig.readPixels_callback = readPixels; |
| | | //} |
| | | |
| | | //if (cameraWrapper.javaEnv->ExceptionCheck()) |
| | | // cameraWrapper.javaEnv->ExceptionDescribe(); |
| | | // |
| | | //cameraWrapper.javaVM->DetachCurrentThread(); |
| | | |
| | | //return ret; |
| | | } |
| | | */ |
| | | |
| | | static void* getSurfaceHolderGetSurface(JNIEnv* env, jobject jobjSurfaceHolder) |
| | | { |
| | | jclass jcSurfaceHolder = env->FindClass("android/view/SurfaceHolder"); |
| | | jmethodID jmSurfaceHolder_getSurface = env->GetMethodID(jcSurfaceHolder, "getSurface", "()Landroid/view/Surface;"); |
| | | jobject surface = env->CallObjectMethod(jobjSurfaceHolder, jmSurfaceHolder_getSurface); |
| | | return surface; |
| | | } |
| | | |
| | | // set the surface |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surface) |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surfaceRender) |
| | | { |
| | | LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setSurface" << LOG_ENDL; |
| | | assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); |
| | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | // obtain a native windowRender from a Java surface |
| | | if (cameraWrapper.windowRender) |
| | | if (cameraWrapper.windowDecode != nullptr) |
| | | { |
| | | ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowDecode)); |
| | | cameraWrapper.windowDecode = NULL; |
| | | } |
| | | cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = getSurfaceHolderGetSurface(env, surfaceRender); |
| | | //cameraWrapper.windowDecode = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | if (cameraWrapper.windowRender != nullptr) |
| | | { |
| | | ANativeWindow_release((ANativeWindow*)(cameraWrapper.windowRender)); |
| | | cameraWrapper.windowRender = NULL; |
| | | } |
| | | cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surface); |
| | | cameraWrapper.windowRender = ANativeWindow_fromSurface(env, surfaceRender); |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; // support reset |
| | | //cameraWrapper.windowRender = ANativeWindow_fromSurface(env, (jobject)getSurfaceHolderGetSurface(env, surfaceRender)); |
| | | |
| | | //createHiddenSurface(env, cameraIdx); |
| | | |
| | | LOGP(DEBUG, "@@@ setsurface %p", cameraWrapper.windowRender); |
| | | LOGP(DEBUG, "@@@ setsurface %p,%p", cameraWrapper.windowDecode, cameraWrapper.windowRender); |
| | | } |
| | | |
| | | jboolean Java_cn_com_basic_face_util_RtspFaceNative_createPlayer(JNIEnv* env, jclass clazz, jint cameraIdx, jstring uri) |
| | |
| | | |
| | | CameraWrapper& cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | int w=0,h=0;//#todo |
| | | |
| | | { |
| | | const char *utf8Uri = env->GetStringUTFChars(uri, NULL); |
| | | cameraWrapper.rtspConfig.rtspURL = utf8Uri; |
| | | env->ReleaseStringUTFChars(uri, utf8Uri); |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | if(cameraWrapper.rtspConfig.rtspURL.find("/main/") != std::string::npos || cameraWrapper.rtspConfig.rtspURL.find("subtype=0") != std::string::npos)//#todo temp use |
| | | { |
| | | w=1920;h=1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264/") != std::string::npos && cameraWrapper.rtspConfig.rtspURL.find("/sub/") != std::string::npos) |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264_2") != std::string::npos) |
| | | { |
| | | w=800;h=600; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("/h264") != std::string::npos) |
| | | { |
| | | w = 1920;h = 1080; |
| | | } |
| | | else if (cameraWrapper.rtspConfig.rtspURL.find("subtype=2") != std::string::npos) |
| | | { |
| | | w=1280;h=720; |
| | | } |
| | | else |
| | | { |
| | | w=640;h=480; |
| | | } |
| | | |
| | | cameraWrapper.rtspConfig.progName = "RtspNativeCodec"; |
| | | cameraWrapper.rtspConfig.aux = true; // ffmpeg need aux, but live555 not |
| | | cameraWrapper.rtspConfig.verbosityLevel = 1; |
| | | cameraWrapper.rtspConfig.tunnelOverHTTPPortNum = 0; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | cameraWrapper.rtspConfig.args = nullptr; |
| | | } |
| | | |
| | | { |
| | | cameraWrapper.amcdConfig.ak_mime = "video/avc"; |
| | | cameraWrapper.amcdConfig.ak_width = 640; |
| | | cameraWrapper.amcdConfig.ak_height = 480; |
| | | cameraWrapper.amcdConfig.windowSurfaceDecode = nullptr; //cameraWrapper.windowDecode; |
| | | cameraWrapper.amcdConfig.windowSurfaceRender = nullptr; |
| | | cameraWrapper.amcdConfig.ak_width = w; |
| | | cameraWrapper.amcdConfig.ak_height = h; // nvida yuv420p,1088; Amlogic yuv420p,1080; RK3288 NV12 |
| | | //cameraWrapper.amcdConfig.windowSurfaceDecode = cameraWrapper.windowDecode; //cameraWrapper.windowDecode; |
| | | //cameraWrapper.amcdConfig.windowSurfaceRender = cameraWrapper.windowRender; //cameraWrapper.windowRender; |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdx = true; |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = false; |
| | | cameraWrapper.amcdConfig.generateDecodedDataPerFrame = 1; |
| | | |
| | | //cameraWrapper.amcdConfig.createSurface_callback = createDecodeSurface; |
| | | //cameraWrapper.amcdConfig.readPixels_callback = readPixels; |
| | | |
| | | #ifndef USE_ST_SDK |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = true; |
| | | cameraWrapper.amcdConfig.releaseOutputBuffIdxInPay = true;//#todo |
| | | #endif |
| | | } |
| | | |
| | |
| | | cameraWrapper.asvrConfig.windowSurface = cameraWrapper.windowRender; |
| | | cameraWrapper.asvrConfig.outputOriginFrame = true; |
| | | cameraWrapper.asvrConfig.outputRenderFrame = true; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 640; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 480; |
| | | cameraWrapper.asvrConfig.scaleToWidth = 0; |
| | | cameraWrapper.asvrConfig.scaleToHeight = 0; |
| | | } |
| | | |
| | | { |
| | |
| | | cameraWrapper.sftConfig.draw_face_rect = false; |
| | | cameraWrapper.sftConfig.draw_face_feature_point = false; |
| | | cameraWrapper.sftConfig.generate_face_feature = true; |
| | | cameraWrapper.sftConfig.generate_face_point = false; |
| | | cameraWrapper.sftConfig.explode_feature_rect_x = 30; |
| | | cameraWrapper.sftConfig.explode_feature_rect_y = 40; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 50; |
| | | cameraWrapper.sftConfig.clamp_feature_rect = true; |
| | | cameraWrapper.sftConfig.doTrackPerFrame = 1; |
| | | //cameraWrapper.sftConfig.license_str = g_stface_license_str; |
| | | cameraWrapper.sftConfig.visionConeAngle = 35.0f; |
| | | cameraWrapper.sftConfig.score_min = 0.2f; |
| | | } |
| | | |
| | | bool ret = cameraWrapper.initPl(); |
| | |
| | | cameraWrapper.stop(); |
| | | } |
| | | |
| | | |
| | | //#todo remove |
| | | //see: https://github.com/BelledonneCommunications/mediastreamer2/blob/master/src/android/android_mediacodec.cpp |
| | | static int handle_java_exception(JNIEnv *env) { |
| | | if (env->ExceptionCheck()) { |
| | |
| | | } |
| | | return 0; |
| | | } |
| | | |
| | | //#todo remove |
| | | static bool _loadClass(JNIEnv *env, const char *className, jclass *_class) { |
| | | *_class = env->FindClass(className); |
| | | if(handle_java_exception(env) == -1 || *_class == NULL) { |
| | |
| | | return true; |
| | | } |
| | | |
| | | //#todo remove |
| | | static bool _getStaticMethodID(JNIEnv *env, jclass _class, const char *name, const char *sig, jmethodID *method) { |
| | | *method = env->GetStaticMethodID(_class, name, sig); |
| | | if(handle_java_exception(env) == -1 || *method == NULL) { |
| | |
| | | // set for multithread callback |
| | | env->GetJavaVM(&(cameraWrapper.javaVM)); |
| | | cameraWrapper.javaEnv = env; |
| | | |
| | | //ms_set_jvm(cameraWrapper.javaVM); |
| | | |
| | | std::string _className; |
| | | { |
| | |
| | | evpHeader->size = sizeof(EVPHeader) + sizeof(FDP_FaceDetectPB) + buffSize; |
| | | buffSize = evpHeader->size; |
| | | |
| | | FDP_FaceDetectPB* fdpFaceDetectPB = new (buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId); |
| | | FDP_FaceDetectPB* fdpFaceDetectPB = new (buffer + sizeof(EVPHeader)) FDP_FaceDetectPB(dbId, cameraIdx + 1); |
| | | |
| | | evpHeader->hton(); |
| | | fdpFaceDetectPB->hton(); |
| | |
| | | return env->NewStringUTF(g_dbgLog.c_str()); |
| | | } |
| | | |
| | | //void Java_cn_com_basic_face_util_RtspFaceNative_dbgReadTexture(JNIEnv* env, jclass clazz) |
| | | //{ |
| | | // dbgReadTexture(); |
| | | //} |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense(JNIEnv *env, jclass clazz, jstring lic) |
| | | { |
| | | LOG_WARN << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_dbgSetStfaceLicense" << LOG_ENDL; |
| | | |
| | | const char *utfLic = env->GetStringUTFChars(lic, NULL); |
| | | g_stface_license_str = utfLic; |
| | | env->ReleaseStringUTFChars(lic, utfLic); |
| | | |
| | | //LOG_ERROR << g_stface_license_str << LOG_ERROR; |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setCaptureSurface(JNIEnv *env, jclass clazz, jobject surfaceCapture) |
| | | { |
| | | if (g_CaptureCamera.windowRender != nullptr) |
| | | { |
| | | ANativeWindow_release((ANativeWindow*)(g_CaptureCamera.windowRender)); |
| | | g_CaptureCamera.windowRender = NULL; |
| | | } |
| | | g_CaptureCamera.windowRender = ANativeWindow_fromSurface(env, surfaceCapture); |
| | | } |
| | | |
| | | bool Java_cn_com_basic_face_util_RtspFaceNative_startCapturePlayer(JNIEnv *env, jclass clazz, jstring uri) |
| | | { |
| | | std::string _uri; |
| | | { |
| | | const char *utfFunc = env->GetStringUTFChars(uri, NULL); |
| | | _uri = utfFunc; |
| | | env->ReleaseStringUTFChars(uri, utfFunc); |
| | | } |
| | | |
| | | if (_uri.empty()) |
| | | return false; |
| | | if (g_CaptureCamera.windowRender == nullptr) |
| | | return false; |
| | | |
| | | g_CaptureCamera.cameraUri = _uri; |
| | | return g_CaptureCamera.start(); |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_stopCapturePlayer(JNIEnv *env, jclass clazz) |
| | | { |
| | | |
| | | } |
| | | |
| | | jobject Java_cn_com_basic_face_util_RtspFaceNative_getCaptureImages(JNIEnv* env, jclass clazz) |
| | | { |
| | | |
| | | } |
| | | |
| | | } |
| | | |
| | | |
| | | void Delay(unsigned int nDelay) |
| | | { |
| | | unsigned int i,j,k; |
| | | for ( i=0;i<nDelay;i++ ) |
| | | for ( j=0;j<6144;j++ ) |
| | | k++; |
| | | } |
| | | |
| | | void callNum(int fd,char phone) |
| | | { |
| | | switch ( phone) |
| | | { |
| | | case '1': |
| | | serialWriteString(fd,"DA");//1 |
| | | break; |
| | | case '2': |
| | | serialWriteString(fd,"DB");//2 |
| | | break; |
| | | case '3': |
| | | serialWriteString(fd,"DC");//3 |
| | | break; |
| | | case '4': |
| | | serialWriteString(fd,"DD");//4 |
| | | break; |
| | | case '5': |
| | | serialWriteString(fd,"DE");//5 |
| | | break; |
| | | case '6': |
| | | serialWriteString(fd,"DF");//6 |
| | | break; |
| | | case '7': |
| | | serialWriteString(fd,"DG");//7 |
| | | break; |
| | | case '8': |
| | | serialWriteString(fd,"DH");//8 |
| | | break; |
| | | case '9': |
| | | serialWriteString(fd,"DI");//9 |
| | | break; |
| | | case '0': |
| | | serialWriteString(fd,"DJ");//0 |
| | | break; |
| | | } |
| | | } |
| | | |
| | | void call(const char * phone) |
| | | { |
| | | int fd = serialOpen("/dev/ttyS1",2400); |
| | | serialWriteString(fd,"AA"); |
| | | Delay(10000); |
| | | while (*phone) |
| | | { |
| | | Delay(10000); |
| | | callNum(fd,*(phone++)); |
| | | } |
| | | } |
| | | |
| | | |
| | | char* jstringTostring(JNIEnv* env, jstring jstr) |
| | | { |
| | | char* rtn = NULL; |
| | | jclass clsstring = env->FindClass("java/lang/String"); |
| | | jstring strencode = env->NewStringUTF("utf-8"); |
| | | jmethodID mid = env->GetMethodID(clsstring, "getBytes", "(Ljava/lang/String;)[B"); |
| | | jbyteArray barr= (jbyteArray)env->CallObjectMethod(jstr, mid, strencode); |
| | | jsize alen = env->GetArrayLength(barr); |
| | | jbyte* ba = env->GetByteArrayElements(barr, JNI_FALSE); |
| | | if (alen > 0) |
| | | { |
| | | rtn = (char*)malloc(alen + 1); |
| | | |
| | | memcpy(rtn, ba, alen); |
| | | rtn[alen] = 0; |
| | | } |
| | | env->ReleaseByteArrayElements(barr, ba, 0); |
| | | return rtn; |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_telCall(JNIEnv* env, jclass clazz,jstring phone) |
| | | { |
| | | call(jstringTostring(phone)); |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_Hang(JNIEnv* env, jclass clazz) |
| | | { |
| | | int fd = serialOpen("/dev/ttyS1",2400); |
| | | serialWriteString(fd,"BA"); |
| | | } |