houxiao
2017-08-11 c6968ba2fbd5e9f4e32fd5f62fd587f9a7d2ebc1
VisitFace/RtspNativeCodec/app/src/main/cpp/RtspNativeCodecJNIClient.cpp
@@ -10,7 +10,6 @@
#include <media/NdkMediaExtractor.h>
#include <pthread.h>
#include <ev_proto.h>
#include <face_daemon_proto.h>
@@ -20,7 +19,6 @@
#include "TeleWrapper.h"
#include "cpu_sched_test.h"
//#include <mediastreamer2/include/mediastreamer2/msjava.h>
std::stringstream logss;
@@ -36,30 +34,32 @@
TeleWrapper g_TeleWrapper;
const size_t _faceImagesSize = MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT * 3;
uint8_t _faceImages[_faceImagesSize]; // android stack is small
void remoteFireFaceCountListener(const RtspFaceDetect::FireFaceCountListenerParams::Reader& params, RtspFaceDetect::FireFaceCountListenerResults::Builder& results){
    int count = params.getFaceCount();
    int index = params.getCameraIndex();
    g_CameraWrappers[index].fireFaceCount(count);
}
extern "C"
{
void Java_cn_com_basic_face_util_RtspFaceNative_init(JNIEnv *env, jclass clazz)
{
    g_logger.set_level(INFO);
g_logger.set_level(INFO);
    cpu_sched();
cpu_sched();
#ifdef USE_ST_SDK
    PipeLine::register_global_elem_creator("PL_SensetimeFaceTrackMitiTrd", create_PL_SensetimeFaceTrackMultiTrd);
#endif
for (size_t i = 0; i < CAMERA_COUNT; i++)
{
   g_CameraWrappers[i].cameraIdx = i + 1;
    for (size_t i = 0; i < CAMERA_COUNT; i++)
    {
        g_CameraWrappers[i].cameraIdx = i + 1;
   g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine;
   g_CameraWrappers[i].pipeLineRender = new PipeLine;
   //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector));
}
        g_CameraWrappers[i].pipeLineDecoderDetector = new PipeLine;
        g_CameraWrappers[i].pipeLineAnalyzer = new PipeLine;
        g_CameraWrappers[i].pipeLineRender = new PipeLine;
        //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector));
    }
    g_TeleWrapper.start();
g_TeleWrapper.start();
}
void
@@ -75,126 +75,6 @@
        g_CameraWrappers[i].rtspConfig.requestStreamingOverTcp = true;
    }
    env->ReleaseStringUTFChars(ipaddr, utf8);
}
//static jclass _jcOutputSurface = nullptr;
//static jmethodID _jmOutputSurface_ctor = nullptr;
//static jmethodID _jmOutputSurface_getSurface = nullptr;
//static jmethodID _jmOutputSurface_readPixels = nullptr;
//static jmethodID _jmOutputSurface_awaitNewImage = nullptr;
//static jmethodID _jmOutputSurface_drawImage = nullptr;
//static jmethodID _jmOutputSurface_makeCurrent = nullptr;
//
//static jobject _jobjOutputSurface = nullptr;
//static jobject _jobjSurface = nullptr;
//static JNIEnv *_env = nullptr;
void readPixels(void *surface, uint8_t *buf, size_t &maxSize)
{
    //ANativeWindow* window = (ANativeWindow*)surface;
    CameraWrapper &cameraWrapper(g_CameraWrappers[0]);
    int getEnvStat = cameraWrapper.javaVM->GetEnv((void **) &(cameraWrapper.javaEnv),
                                                  JNI_VERSION_1_6);
    if (getEnvStat == JNI_EDETACHED)
    {
        //LOG_WARN << "GetEnv: not attached" << std::endl;
        if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
            LOG_WARN << "Failed to attach" << LOG_ENDL;
        else
            getEnvStat = JNI_OK;
    } else if (getEnvStat == JNI_OK)
    {
    } else if (getEnvStat == JNI_EVERSION)
        LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    else if (getEnvStat == JNI_ERR)
        LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    if (maxSize == 0)
    {
        //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_makeCurrent);
    } else
    {
        //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_awaitNewImage);
        //cameraWrapper.javaEnv->CallVoidMethod(_jobjOutputSurface, _jmOutputSurface_drawImage);
//
        //jbyteArray jbarrBuffer = (jbyteArray)cameraWrapper.javaEnv->CallObjectMethod(_jobjOutputSurface, _jmOutputSurface_readPixels);
        //if (jbarrBuffer != nullptr)
        //    cameraWrapper.javaEnv->GetByteArrayRegion(jbarrBuffer, 0, 640 * 480 * 2, (jbyte*)buf);
    }
    if (cameraWrapper.javaEnv->ExceptionCheck())
        cameraWrapper.javaEnv->ExceptionDescribe();
    cameraWrapper.javaVM->DetachCurrentThread();
}
/*
static void* createDecodeSurface()
{
    CameraWrapper& cameraWrapper(g_CameraWrappers[0]);
    //OutputSurface outputSurface = null;
    //outputSurface = new OutputSurface(mWidth, mHeight);
    //outputSurface.getSurface();
    //outputSurface.release();
    //int getEnvStat = cameraWrapper.javaVM->GetEnv((void **)&(cameraWrapper.javaEnv), JNI_VERSION_1_6);
    //if (getEnvStat == JNI_EDETACHED)
    //{
    //    //LOG_WARN << "GetEnv: not attached" << std::endl;
    //    if (cameraWrapper.javaVM->AttachCurrentThread(&(cameraWrapper.javaEnv), NULL) != 0)
    //        LOG_WARN << "Failed to attach" << LOG_ENDL;
    //    else
    //        getEnvStat = JNI_OK;
    //}
    //else if (getEnvStat == JNI_OK)
    //{
    //}
    //else if (getEnvStat == JNI_EVERSION)
    //    LOG_WARN << "GetEnv: version not supported" << LOG_ENDL;
    //else if (getEnvStat == JNI_ERR)
    //    LOG_WARN << "GetEnv: JNI_ERR" << LOG_ENDL;
    //void* ret = nullptr;
    //{
    //    jclass jcOutputSurface = _jcOutputSurface = cameraWrapper.javaEnv->FindClass("com/example/nativecodec/OutputSurface");
    //    _jcOutputSurface = static_cast<jclass>( cameraWrapper.javaEnv->NewGlobalRef( jcOutputSurface ));
//
    //    jmethodID jmOutputSurface_ctor = _jmOutputSurface_ctor = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "<init>", "(II)V");
    //    jmethodID jmOutputSurface_getSurface = _jmOutputSurface_getSurface = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "getSurface", "()Landroid/view/Surface;");
    //    jmethodID jmOutputSurface_readPixels = _jmOutputSurface_readPixels = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "readPixels", "()[B");
//
    //    jmethodID jmOutputSurface_awaitNewImage = _jmOutputSurface_awaitNewImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "awaitNewImage", "()V");
    //    jmethodID jmOutputSurface_drawImage = _jmOutputSurface_drawImage = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "drawImage", "()V");
    //    jmethodID jmOutputSurface_makeCurrent = _jmOutputSurface_makeCurrent = cameraWrapper.javaEnv->GetMethodID(jcOutputSurface, "makeCurrent", "()V");
//
    //    jobject jobjOutputSurface = _jobjOutputSurface = cameraWrapper.javaEnv->NewObject(jcOutputSurface , jmOutputSurface_ctor, 1920, 1088);
    //    _jobjOutputSurface = static_cast<jobject>( cameraWrapper.javaEnv->NewGlobalRef( jobjOutputSurface ));
//
    //    jobject jobjSurface = _jobjSurface = cameraWrapper.javaEnv->CallObjectMethod(jobjOutputSurface, jmOutputSurface_getSurface);
    //    ret = ANativeWindow_fromSurface(cameraWrapper.javaEnv, jobjSurface);
    //    cameraWrapper.amcdConfig.readPixels_callback = readPixels;
    //}
    //if (cameraWrapper.javaEnv->ExceptionCheck())
    //    cameraWrapper.javaEnv->ExceptionDescribe();
//
    //cameraWrapper.javaVM->DetachCurrentThread();
    //return  ret;
}
*/
static void *getSurfaceHolderGetSurface(JNIEnv *env, jobject jobjSurfaceHolder)
{
    jclass jcSurfaceHolder = env->FindClass("android/view/SurfaceHolder");
    jmethodID jmSurfaceHolder_getSurface = env->GetMethodID(jcSurfaceHolder, "getSurface", "()Landroid/view/Surface;");
    jobject surface = env->CallObjectMethod(jobjSurfaceHolder, jmSurfaceHolder_getSurface);
    return surface;
}
// set the surface
@@ -324,22 +204,6 @@
      cameraWrapper.asvrConfig.directlyDisplay = true;
    }
    {
        cameraWrapper.sftConfig.point_size = 21;
        cameraWrapper.sftConfig.detect_face_cnt_limit = MAX_FACE;
        cameraWrapper.sftConfig.draw_face_rect = false;
        cameraWrapper.sftConfig.draw_face_feature_point = false;
        cameraWrapper.sftConfig.generate_face_feature = true;
        cameraWrapper.sftConfig.generate_face_point = false;
        cameraWrapper.sftConfig.explode_feature_rect_x = 30;
        cameraWrapper.sftConfig.explode_feature_rect_y = 40;
        cameraWrapper.sftConfig.clamp_feature_rect = true;
        cameraWrapper.sftConfig.doTrackPerFrame = 1;
        //cameraWrapper.sftConfig.license_str = g_stface_license_str;
        cameraWrapper.sftConfig.visionConeAngle = 30.0f;
        cameraWrapper.sftConfig.score_min = 0.4f;
    }
   cameraWrapper.fontPath = "/data/msyh.ttc";
   bool ret = cameraWrapper.initPl();
@@ -444,6 +308,17 @@
                                                cameraWrapper.cameraIdx, 0);
}
JNIEXPORT jboolean JNICALL
Java_cn_com_basic_face_util_RtspFaceNative_addFaceCountListener(JNIEnv *env, jclass type,
                                                                jint cameraIdx, jstring host_,
                                                                jint port) {
    const char *host = env->GetStringUTFChars(host_, 0);
    //TODO
    env->ReleaseStringUTFChars(host_, host);
}
void
Java_cn_com_basic_face_util_RtspFaceNative_lockFace(JNIEnv *env, jclass clazz, jint cameraIdx)
{
@@ -540,20 +415,19 @@
    CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]);
    std::vector<NativeImgIdx> imgIdxes;
    size_t _faceImagesSize = MAX_FACE * MAX_FACE_WIDTH * MAX_FACE_HEIGHT * 3;
    uint8_t *_faceImages = new uint8_t[_faceImagesSize]; // android stack is small
    memset(_faceImages, 0, _faceImagesSize);
    bool ret = false;
#ifdef USE_ST_SDK
    ret = cameraWrapper.faceCache.getFaceListImage(imgIdxes, _faceImages, _faceImagesSize);
    size_t faceImagesSize =_faceImagesSize;
    ret = cameraWrapper.faceCache.getFaceListImage(imgIdxes, _faceImages,faceImagesSize);
#endif
    if (imgIdxes.size() == 0)
    {
        LOG_INFO << "No face image captured" << LOG_ENDL;
        delete[] _faceImages;
        return nullptr;
    }
@@ -564,7 +438,6 @@
    LOG_DEBUG << "_faceImagesSize=" << _faceImagesSize << LOG_ENDL;
    env->SetByteArrayRegion(jbaFaceImages, 0, _faceImagesSize, (const jbyte *) _faceImages);
    env->SetObjectField(faceImages, jfRefByteArray_arr, jbaFaceImages);
    delete[] _faceImages;
    jclass jcArrayList = env->FindClass("java/util/ArrayList");
    jmethodID jmArrayList_ctor = env->GetMethodID(jcArrayList, "<init>", "()V");
@@ -619,7 +492,6 @@
    if (imgIdxes.size() == 0)
    {
        LOG_INFO << "No face image captured" << LOG_ENDL;
        delete[] _faceImages;
        return nullptr;
    }
@@ -656,9 +528,6 @@
        LOG_DEBUG << "imgIdx " << i << ":" << std::string(imgIdxes[i]) << LOG_ENDL;
    }
    delete[] _faceImages;
    _faceImages = nullptr;
    return jobjArrayList_Imgs;
}