#include <jni.h>
|
#include <string>
|
#include <stdio.h>
|
#include <stdlib.h>
|
#include <string.h>
|
#include <sys/time.h>
|
#include <time.h>
|
#include "turbojpeg.h"
|
#include "libyuv.h"
|
|
#define LOG_TAG "turbojpeg"
|
#define LOGW(...) __android_log_write(ANDROID_LOG_WARN,LOG_TAG,__VA_ARGS__)
|
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
|
|
#include "FiStdDefEx.h"
|
#include "THFaceImage_i.h"
|
//#include "jpeglib.h"
|
#include <setjmp.h>
|
|
#include "FaceDetectHelper.h"
|
#include "LogUtil.h"
|
|
#include <memory>
|
#include <opencv2/opencv.hpp>
|
#include "THFeature_i.h"
|
|
#include <map>
|
|
#include <memory>
|
#include <unordered_map>
|
#include <stdio.h>
|
#include <stdlib.h>
|
#include <string.h>
|
#include <sys/time.h>
|
#include <time.h>
|
|
#include "RgbDetect.h"
|
#include "GrayDetect.h"
|
#include "LivenessDetect.h"
|
|
#include "FiStdDefEx.h"
|
//#include "THFaceLive_i.h"
|
|
#include <memory>
|
#include "THFeature_i.h"
|
|
#include "FaceDetectHelper.h"
|
#include "LogUtil.h"
|
#include <memory>
|
#include <sstream>
|
#include "TrackingTrigger.h"
|
#include <map>
|
|
#include <memory>
|
#include "THFeature_i.h"
|
#include <sys/time.h>
|
#include <time.h>
|
#include <iostream>
|
#include <string>
|
#include <cstring>
|
#include <android/log.h>
|
#include <android/native_window.h>
|
#include <android/native_window_jni.h>
|
|
//定义TAG之后,我们可以在LogCat通过TAG过滤出NDK打印的日志
|
#define TAG "jni ndk "
|
// 定义info信息
|
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,TAG,__VA_ARGS__)
|
// 定义debug信息
|
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
|
// 定义error信息
|
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,TAG,__VA_ARGS__)
|
extern "C" {
|
//#include <libavcodec/avcodec.h>
|
//#include <libavformat/avformat.h>
|
//#include <libavcodec/jni.h>
|
//#include <libswscale/swscale.h>
|
//#include <libswresample/swresample.h>
|
}
|
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"testff",__VA_ARGS__)
|
|
//using namespace std;
|
|
extern "C" JNIEXPORT jboolean
|
JNICALL
|
Java_com_basic_security_utils_FaceId_initSdk(
|
JNIEnv *env, jobject, jstring jModelPath) {
|
const char *modelPath = env->GetStringUTFChars(jModelPath, 0);
|
jboolean success = false;
|
success = FaceDetectHelper::getInstance()->initSdk(modelPath);
|
|
return success;
|
|
|
// bool init_sdk_success = false;
|
// THFI_Param detParam;
|
// detParam.nMinFaceSize = 50;
|
// detParam.nRollAngle = 30;
|
// detParam.bOnlyDetect = false;
|
// detParam.dwReserved = NULL;
|
// THFI_SetDir(modelPath, modelPath);
|
// int ret = THFI_Create(1, &detParam);
|
// if (ret < 0) {
|
// printf("THFI_Create failed!(ret=%d)\n", ret);
|
// init_sdk_success = false;
|
// }
|
//// THFL_SDK_SetDir(modelPath, modelPath);
|
//// ret = THFL_Create(1);
|
//// if (ret < 0) {
|
//// printf("THFL_Create failed!(ret=%d)\n", ret);
|
//// THFI_Release();
|
//// init_sdk_success = false;
|
//// }
|
// EF_SetDir(modelPath, modelPath);
|
// ret = EF_Init(1);
|
// if (ret < 0) {
|
// printf("EF_Init failed!(ret=%d)\n", ret);
|
// THFI_Release();
|
// init_sdk_success = false;
|
// }
|
// featureSize = EF_Size();
|
// feature1 = new BYTE[featureSize];
|
// init_sdk_success = true;
|
// return init_sdk_success;
|
}
|
|
jint getIntValue(JNIEnv *env, jobject detectedResult, char *fieldName) {
|
jclass detectedResultClass = env->GetObjectClass(detectedResult);
|
jfieldID isRgbFieldId = env->GetFieldID(detectedResultClass, fieldName, "I");
|
jint intValue = env->GetIntField(detectedResult, isRgbFieldId);
|
return intValue;
|
}
|
|
jboolean getBooleanValue(JNIEnv *env, jobject detectedResult, char *fieldName) {
|
jclass detectedResultClass = env->GetObjectClass(detectedResult);
|
jfieldID isRgbFieldId = env->GetFieldID(detectedResultClass, fieldName, "Z");
|
jboolean isRgb = env->GetBooleanField(detectedResult, isRgbFieldId);
|
return isRgb;
|
}
|
|
namespace main {
|
BYTE features[10][2560];
|
int featureSize = 2560;
|
TrackingTrigger trackingTrigger(0.8);
|
ScoredRect sr;
|
//ScoredRect scoredRect;
|
long track_id[10];
|
bool liveness[10];
|
string featureNames[10];
|
|
double prevExtractFeatureTime = 0;
|
|
unsigned char *rgbBuffer = new unsigned char[640 * 480 * 3];
|
unsigned char *grayBuffer = new unsigned char[640 * 480 * 3];
|
THFI_FacePos rgbFps1[10];
|
THFI_FacePos grayFps1[10];
|
|
//get current system time
|
double msecond() {
|
struct timeval tv;
|
gettimeofday(&tv, 0);
|
return (tv.tv_sec * 1.0e3 + tv.tv_usec * 1.0e-3);
|
}
|
}
|
|
void setIntValue(JNIEnv *env, jobject detectedResult, char *fieldName, jint value) {
|
jclass detectedResultClass = env->GetObjectClass(detectedResult);
|
jfieldID isRgbFieldId = env->GetFieldID(detectedResultClass, fieldName, "I");
|
env->SetIntField(detectedResult, isRgbFieldId, value);
|
}
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_detectFace(
|
JNIEnv *env, jobject, jobject detectedResult) {
|
jboolean isRgb = getBooleanValue(env, detectedResult, "isRgb");
|
int width = getIntValue(env, detectedResult, "width");
|
int height = getIntValue(env, detectedResult, "height");
|
jboolean shouldExtractFeature = getBooleanValue(env, detectedResult, "shouldExtractFeature");
|
jclass detectedResultClass = env->GetObjectClass(detectedResult);
|
jfieldID bgrDataFieldId = env->GetFieldID(detectedResultClass, "bgrData", "[B");
|
jobject bgrData = env->GetObjectField(detectedResult, bgrDataFieldId);
|
jbyteArray bgrDataByteArray = reinterpret_cast<jbyteArray>(bgrData);
|
// Get the elements (you probably have to fetch the length of the array as well
|
// jbyte * data = env->GetByteArrayElements(bgrDataByteArray, NULL);
|
// Don't forget to release it
|
int len = env->GetArrayLength(bgrDataByteArray);
|
|
if (isRgb) {
|
env->GetByteArrayRegion(bgrDataByteArray, 0, len,
|
reinterpret_cast<jbyte *>(main::rgbBuffer));
|
int face_nums = THFI_DetectFace(0, main::rgbBuffer, 24, width, height, main::rgbFps1, 10,
|
360);
|
int featureCount = 0;
|
for (int i = 0; i < face_nums; i++) {
|
try {
|
THFI_FacePos fps;
|
fps = main::rgbFps1[i];
|
ScoredRect scoredRect{fps.fAngle.confidence, {
|
fps.rcFace.left,
|
fps.rcFace.top,
|
fps.rcFace.right,
|
fps.rcFace.bottom
|
}, -1, map<string, string>()};
|
bool not_duplicate = main::trackingTrigger.triggerOnce(scoredRect);
|
bool track_id_changed = (main::track_id[i] != scoredRect.id);
|
main::track_id[i] = scoredRect.id;
|
if (main::track_id[i] == 1) {
|
int m = 0;
|
}
|
if (main::track_id[i] == -1) {
|
continue;
|
}
|
int liveness = 0;
|
// THFL_Detect(0, main::rgbBuffer,
|
// main::grayBuffer,
|
// width, height,
|
// &main::rgbFps1[i], &main::grayFps1[i], 20);
|
main::liveness[i] = liveness;
|
int featureId = i;
|
double d1 = main::msecond();
|
if ((track_id_changed ||
|
(main::msecond() - main::prevExtractFeatureTime) > 2 * 1000)
|
&& shouldExtractFeature > 0) {
|
memset(main::features[i], 0, main::featureSize);
|
int ret = EF_Extract(0, main::rgbBuffer, width, height, 3, main::rgbFps1,
|
main::features[i]);
|
if (ret == 1) {
|
featureCount++;
|
main::prevExtractFeatureTime = main::msecond();
|
} else {
|
featureId = -1;
|
}
|
}
|
|
jclass facePositionClass = env->FindClass("com/basic/security/utils/FacePosition");
|
jmethodID facePositionClassMethodID = env->GetMethodID(facePositionClass, "<init>",
|
"(I)V");
|
jobject facePositionObject = env->NewObject(facePositionClass,
|
facePositionClassMethodID, 5);
|
|
|
// jclass ArrayList_class = env->FindClass("java/util/ArrayList");
|
// jmethodID ArrayList_add_id = env->GetMethodID(ArrayList_class, "add", "(Ljava/lang/Object;)Z");
|
|
// ArrayList_add_id
|
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
setIntValue(env, detectedResult, "featureCount", featureCount);
|
} else {
|
env->GetByteArrayRegion(bgrDataByteArray, 0, len,
|
reinterpret_cast<jbyte *>(main::grayBuffer));
|
int face_nums = THFI_DetectFace(0, main::grayBuffer, 24, width, height, main::grayFps1, 10,
|
360);
|
}
|
int i = 10;
|
}
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_rgbDetectFace(
|
JNIEnv *env, jobject, jstring jModelPath, jstring jRgbFileName, jint width, int height,
|
jstring baseFeatureName, int shouldExtractFeature, bool useGrayCamera,
|
int detectFaceCount) {
|
string faces = FaceDetectHelper::getInstance()->detectRealFace(
|
env->GetStringUTFChars(jModelPath, 0), env->GetStringUTFChars(jRgbFileName, 0), nullptr,
|
width, height, env->GetStringUTFChars(baseFeatureName, 0), shouldExtractFeature,
|
useGrayCamera, detectFaceCount);
|
LogUtil::log(faces);
|
return env->NewStringUTF(faces.c_str());
|
}
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_rgbDetectFace2(
|
JNIEnv *env, jobject, jstring jModelPath, jstring jRgbFileName, jint width, int height,
|
jstring baseFeatureName, int shouldExtractFeature, bool useGrayCamera,
|
int detectFaceCount, jbyteArray bgrArray) {
|
string faces = FaceDetectHelper::getInstance()->detectRealFace2(env,
|
env->GetStringUTFChars(jModelPath, 0), env->GetStringUTFChars(jRgbFileName, 0), nullptr,
|
width, height, env->GetStringUTFChars(baseFeatureName, 0), shouldExtractFeature,
|
useGrayCamera, detectFaceCount, bgrArray);
|
LogUtil::log(faces);
|
return env->NewStringUTF(faces.c_str());
|
}
|
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_rgbDetectFace4(
|
JNIEnv *env, jobject,jint channel, jstring jModelPath, jstring jRgbFileName, jint width, int height,
|
jstring baseFeatureName, int shouldExtractFeature, bool useGrayCamera,
|
int detectFaceCount, jbyteArray bgrArray) {
|
// new char[80*1024*1024];
|
string faces = FaceDetectHelper::getInstance()->detectRealFace4(env,
|
channel, env->GetStringUTFChars(jModelPath, 0), env->GetStringUTFChars(jRgbFileName, 0), nullptr,
|
width, height, env->GetStringUTFChars(baseFeatureName, 0), shouldExtractFeature,
|
useGrayCamera, detectFaceCount, bgrArray);
|
LogUtil::log(faces);
|
return env->NewStringUTF(faces.c_str());
|
}
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_rgbDetectFace3(
|
JNIEnv *env, jobject, jint channel, jstring jModelPath, jint width, int height,
|
jstring baseFeatureName, int shouldExtractFeature, bool useGrayCamera,
|
int detectFaceCount, jbyteArray nv21Array) {
|
string faces = FaceDetectHelper::getInstance()->detectRealFace3(channel, env,
|
env->GetStringUTFChars(jModelPath, 0),
|
width, height, env->GetStringUTFChars(baseFeatureName, 0), shouldExtractFeature,
|
useGrayCamera, detectFaceCount, nv21Array);
|
LogUtil::log(faces);
|
return env->NewStringUTF(faces.c_str());
|
}
|
|
extern "C" JNIEXPORT jbyteArray
|
JNICALL
|
Java_com_basic_security_utils_FaceId_extractFeature(
|
JNIEnv *env, jobject, jstring jpgFileName) {
|
return FaceDetectHelper::getInstance()->extractFeature(env,
|
env->GetStringUTFChars(jpgFileName, 0));
|
}
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_facePosition(
|
JNIEnv *env, jobject, jstring jpgFileName) {
|
string facePosition = FaceDetectHelper::getInstance()->facePosition(env,
|
env->GetStringUTFChars(jpgFileName, 0));
|
return env->NewStringUTF(facePosition.c_str());
|
}
|
|
extern "C" JNIEXPORT jbyteArray
|
JNICALL
|
Java_com_basic_security_utils_FaceId_extractFeature1(
|
JNIEnv *env, jobject, jbyteArray bgrByteArray, int width, int height) {
|
return FaceDetectHelper::getInstance()->extractFeature(env, bgrByteArray, width, height);
|
}
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_grayDetectFace(
|
JNIEnv *env, jobject, jstring jModelPath, jstring jGrayFileName, jint width, int height,
|
int detectFaceCount) {
|
string faces = FaceDetectHelper::getInstance()->detectRealFace(
|
env->GetStringUTFChars(jModelPath, 0), nullptr,
|
env->GetStringUTFChars(jGrayFileName, 0), width, height,
|
nullptr, 0, false, detectFaceCount);
|
LogUtil::log(faces);
|
return env->NewStringUTF(faces.c_str());
|
}
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_cropFace(
|
JNIEnv *env, jobject, jstring frameJpgPath, jstring faceJpgPath, int width, int height,
|
int x1, int y1, int x2, int y2) {
|
try {
|
cv::Mat img = cv::imread(env->GetStringUTFChars(frameJpgPath, 0));
|
if (!img.empty()) {
|
/*cv::Rect roi;
|
roi.x = x1;
|
roi.y = y1;
|
roi.width = x2 - x1;
|
roi.height = y2 - y1;
|
cv::Mat crop = img(roi);
|
//env->GetStringUTFChars(faceJpgPath, 0)
|
cv::String name;
|
cv::imwrite(name, crop);
|
*/
|
}
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
|
jbyteArray as_byte_array(JNIEnv *env, unsigned char *buf, int len) {
|
jbyteArray array = env->NewByteArray(len);
|
env->SetByteArrayRegion(array, 0, len, reinterpret_cast<jbyte *>(buf));
|
return array;
|
}
|
|
unsigned char *as_unsigned_char_array(JNIEnv *env, jbyteArray array) {
|
int len = env->GetArrayLength(array);
|
unsigned char *buf = new unsigned char[len];
|
env->GetByteArrayRegion(array, 0, len, reinterpret_cast<jbyte *>(buf));
|
return buf;
|
}
|
|
unordered_map<string, shared_ptr<vector<unsigned char>>> featureMap;
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_addFeatureToDb(
|
JNIEnv *env, jobject, jstring id, jbyteArray featureBuffer) {
|
try {
|
if (featureMap.size() < 100 || 1 == 1) {
|
unsigned int max_size = featureMap.max_size();
|
{
|
int len = env->GetArrayLength(featureBuffer);
|
shared_ptr<vector<unsigned char>> feature = make_shared<vector<unsigned char>>(len);
|
env->GetByteArrayRegion(featureBuffer, 0, len,
|
reinterpret_cast<jbyte *>(feature.get()->data()));
|
const char *sid = env->GetStringUTFChars(id, 0);
|
// printf("%s ", sid);
|
featureMap[sid] = feature;
|
// shared_ptr<vector<unsigned char>> featureReturn = featureMap[sid];
|
}
|
}
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
|
|
unordered_map<string, shared_ptr<vector<unsigned char>>> tempFeatureMap;
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_addFeatureToTempDb(
|
JNIEnv *env, jobject, jstring id, jbyteArray featureBuffer) {
|
try {
|
if (tempFeatureMap.size() < 100 || 1 == 1) {
|
unsigned int max_size = tempFeatureMap.max_size();
|
{
|
int len = env->GetArrayLength(featureBuffer);
|
shared_ptr<vector<unsigned char>> feature = make_shared<vector<unsigned char>>(len);
|
env->GetByteArrayRegion(featureBuffer, 0, len,
|
reinterpret_cast<jbyte *>(feature.get()->data()));
|
const char *sid = env->GetStringUTFChars(id, 0);
|
// printf("%s ", sid);
|
tempFeatureMap[sid] = feature;
|
// shared_ptr<vector<unsigned char>> featureReturn = featureMap[sid];
|
}
|
}
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
|
extern "C" JNIEXPORT jfloat
|
JNICALL
|
Java_com_basic_security_utils_FaceId_compareFeature(
|
JNIEnv *env, jobject, jbyteArray featureBuffer1, jbyteArray featureBuffer2) {
|
float score = 0;
|
try {
|
if (!featureBuffer1 || !featureBuffer2) {
|
return score;
|
}
|
unsigned char *buf1 = as_unsigned_char_array(env, featureBuffer1);
|
unsigned char *buf2 = as_unsigned_char_array(env, featureBuffer2);
|
score = EF_Compare(buf1, buf2)*100;
|
delete[] buf1;
|
delete[] buf2;
|
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
return score;
|
}
|
|
|
unsigned char buf[2560];
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_compareFeatureInDb(
|
JNIEnv *env, jobject, jbyteArray featureBuffer, jint minScore) {
|
float score = 0;
|
string result = ",0";
|
try {
|
env->GetByteArrayRegion(featureBuffer, 0, 2560, reinterpret_cast<jbyte *>(buf));
|
for (auto const &featureItem : featureMap) {
|
string id = featureItem.first;
|
unsigned char *data = featureItem.second.get()->data();
|
printf("%s\n", id.c_str());
|
score = EF_Compare(buf, data)*100;
|
int i = 10;
|
if (score >= minScore) {
|
// LOGI("the score is :%d",score);
|
result = "";
|
result.append(id);
|
result.append(",");
|
result.append(to_string(score));
|
break;
|
}
|
}
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
return env->NewStringUTF(result.c_str());
|
}
|
|
|
unsigned char tempFeatureBuf[2560];
|
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_compareFeatureInTempDb(
|
JNIEnv *env, jobject, jbyteArray featureBuffer, jint minScore) {
|
float score = 0;
|
string result = ",0";
|
try {
|
env->GetByteArrayRegion(featureBuffer, 0, 2560, reinterpret_cast<jbyte *>(tempFeatureBuf));
|
for (auto const &featureItem : tempFeatureMap) {
|
string id = featureItem.first;
|
unsigned char *data = featureItem.second.get()->data();
|
printf("%s\n", id.c_str());
|
score = EF_Compare(tempFeatureBuf, data)*100;
|
int i = 10;
|
if (score >= minScore) {
|
// LOGI("the score is :%d",score);
|
result = "";
|
result.append(id);
|
result.append(",");
|
result.append(to_string(score));
|
break;
|
}
|
}
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
return env->NewStringUTF(result.c_str());
|
}
|
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_removeFeatureFromTempDb(JNIEnv *env, jobject, jstring id) {
|
try {
|
tempFeatureMap.erase(env->GetStringUTFChars(id, 0));
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
|
extern "C" JNIEXPORT void
|
JNICALL
|
Java_com_basic_security_utils_FaceId_removeFeatureFromDb(JNIEnv *env, jobject, jstring id) {
|
try {
|
featureMap.erase(env->GetStringUTFChars(id, 0));
|
} catch (exception &e) {
|
LogUtil::log(e.what());
|
}
|
}
|
|
//static double r2d(AVRational r) {
|
// return r.num == 0 || r.den == 0 ? 0 : (double) r.num / (double) r.den;
|
//}
|
|
//当前时间戳 clock
|
long long GetNowMs() {
|
struct timeval tv;
|
gettimeofday(&tv, NULL);
|
int sec = tv.tv_sec % 360000;
|
long long t = sec * 1000 + tv.tv_usec / 1000;
|
return t;
|
}
|
|
extern "C"
|
JNIEXPORT
|
jint JNI_OnLoad(JavaVM *vm, void *res) {
|
//av_jni_set_java_vm(vm, 0);
|
return JNI_VERSION_1_4;
|
}
|
|
|
extern "C"
|
JNIEXPORT void JNICALL
|
Java_com_basic_security_widget_H264Player_Open(JNIEnv *env, jobject instance, jstring url_,
|
jobject surface) {
|
const char *path = env->GetStringUTFChars(url_, 0);
|
// av_register_all();
|
// avformat_network_init();
|
// avcodec_register_all();
|
// AVFormatContext *ic = NULL;
|
// int re = avformat_open_input(&ic, path, 0, 0);
|
// if (re != 0) { return; }
|
// re = avformat_find_stream_info(ic, 0);
|
// if (re != 0) {}
|
// int fps = 0;
|
// int videoStream = 0;
|
// int audioStream = 1;
|
// for (int i = 0; i < ic->nb_streams; i++) {
|
// AVStream *as = ic->streams[i];
|
// if (as->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
// videoStream = i;
|
// fps = r2d(as->avg_frame_rate);
|
// } else if (as->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
// audioStream = i;
|
// }
|
// }
|
// audioStream = av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
|
//
|
// AVCodec *codec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id);
|
// codec = avcodec_find_decoder_by_name("h264_mediacodec");
|
// if (!codec) { return; }
|
// AVCodecContext *vc = avcodec_alloc_context3(codec);
|
// avcodec_parameters_to_context(vc, ic->streams[videoStream]->codecpar);
|
// vc->thread_count = 8;
|
// re = avcodec_open2(vc, 0, 0);
|
// if (re != 0) { return; }
|
// AVPacket *pkt = av_packet_alloc();
|
// AVFrame *frame = av_frame_alloc();
|
// long long start = GetNowMs();
|
// int frameCount = 0;
|
// SwsContext *vctx = NULL;
|
// int outWidth = 640;
|
// int outHeight = 480;
|
// char *rgb = new char[1920 * 1080 * 4];
|
// char *pcm = new char[48000 * 4 * 2];
|
// ANativeWindow *nwin = ANativeWindow_fromSurface(env, surface);
|
// ANativeWindow_setBuffersGeometry(nwin, outWidth, outHeight, WINDOW_FORMAT_RGBA_8888);
|
// ANativeWindow_Buffer wbuf;
|
//
|
// for (;;) {
|
// if (GetNowMs() - start >= 3000) {
|
// start = GetNowMs();
|
// frameCount = 0;
|
// }
|
//
|
// int re = av_read_frame(ic, pkt);
|
// if (re != 0) {
|
// int pos = 20 * r2d(ic->streams[videoStream]->time_base);
|
// av_seek_frame(ic, videoStream, pos, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
|
// continue;
|
// }
|
// AVCodecContext *cc = vc;
|
// re = avcodec_send_packet(cc, pkt);
|
// //清理
|
// int p = pkt->pts;
|
// av_packet_unref(pkt);
|
// if (re != 0) {
|
// continue;
|
// }
|
// for (;;) {
|
// re = avcodec_receive_frame(cc, frame);
|
// if (re != 0) {
|
// break;
|
// }
|
// if (cc == vc) {
|
// frameCount++;
|
// vctx = sws_getCachedContext(vctx,
|
// frame->width,
|
// frame->height,
|
// (AVPixelFormat) frame->format,
|
// outWidth,
|
// outHeight,
|
// AV_PIX_FMT_RGBA,
|
// SWS_FAST_BILINEAR,
|
// 0, 0, 0
|
// );
|
// if (!vctx) {}
|
// else {
|
// uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
|
// data[0] = (uint8_t *) rgb;
|
// int lines[AV_NUM_DATA_POINTERS] = {0};
|
// lines[0] = outWidth * 4;
|
// int h = sws_scale(vctx,
|
// (const uint8_t **) frame->data,
|
// frame->linesize, 0,
|
// frame->height,
|
// data, lines);
|
//// LOGW("sws_scale = %d",h);
|
// if (h > 0) {
|
// ofstream ofs;
|
// ofs.open("/sdcard/ffmpeg.rgb", ios::binary);
|
// ofs.write((char *) rgb, outWidth * outHeight * 4);
|
// ofs.close();
|
//
|
// ANativeWindow_lock(nwin, &wbuf, 0);
|
// uint8_t *dst = (uint8_t *) wbuf.bits;
|
// memcpy(dst, rgb, outWidth * outHeight * 4);
|
// ANativeWindow_unlockAndPost(nwin);
|
// }
|
// }
|
//
|
// } else {}
|
// }
|
// }
|
// delete rgb;
|
// delete pcm;
|
// avformat_close_input(&ic);
|
env->ReleaseStringUTFChars(url_, path);
|
|
}
|
|
|
unsigned char* readFileToByteArray(char* fileName, int* fsize) {
|
FILE *f = fopen(fileName, "rb");
|
fseek(f, 0, SEEK_END);
|
*fsize = ftell(f);
|
fseek(f, 0, SEEK_SET);
|
|
unsigned char * string = (unsigned char*)malloc(*fsize);
|
fread(string, *fsize, 1, f);
|
fclose(f);
|
return string;
|
|
}
|
|
void test () {
|
int nSize1;
|
char* sFile1;
|
readFileToByteArray(sFile1,&nSize1);
|
}
|
|
|
#define MAXRECVSTRING 255 /* Longest string to receive */
|
|
void DieWithError(char *errorMessage) {
|
printf("%s\n", errorMessage);
|
} /* External error handling function */
|
|
#include <stdio.h> /* for printf() and fprintf() */
|
#include <sys/socket.h> /* for socket(), connect(), sendto(), and recvfrom() */
|
#include <arpa/inet.h> /* for sockaddr_in and inet_addr() */
|
#include <stdlib.h> /* for atoi() and exit() */
|
#include <string.h> /* for memset() */
|
#include <unistd.h> /* for close() */
|
|
|
bool receiveBroadcastFirst = true;
|
|
int sock;
|
struct sockaddr_in broadcastAddr; /* Broadcast Address */
|
char recvString[MAXRECVSTRING+1]; /* Buffer for received string */
|
extern "C" JNIEXPORT jstring
|
JNICALL
|
Java_com_basic_security_utils_FaceId_receiveBroadcast(JNIEnv *env, jobject, jchar broadcastPort1) { /* Socket */
|
|
unsigned short broadcastPort = broadcastPort1; /* Port */
|
|
int recvStringLen; /* Length of received string */
|
|
if (receiveBroadcastFirst) {
|
/* Create a best-effort datagram socket using UDP */
|
if ((sock = socket(PF_INET, SOCK_DGRAM, IPPROTO_UDP)) < 0)
|
DieWithError("socket() failed");
|
|
/* Construct bind structure */
|
memset(&broadcastAddr, 0, sizeof(broadcastAddr)); /* Zero out structure */
|
broadcastAddr.sin_family = AF_INET; /* Internet address family */
|
broadcastAddr.sin_addr.s_addr = htonl(INADDR_ANY); /* Any incoming interface */
|
broadcastAddr.sin_port = htons(broadcastPort); /* Broadcast port */
|
|
/* Bind to the broadcast port */
|
if (::bind(sock, (struct sockaddr *) &broadcastAddr, sizeof(broadcastAddr)) < 0)
|
DieWithError("bind() failed");
|
receiveBroadcastFirst = false;
|
}
|
|
|
/* Receive a single datagram from the server */
|
if ((recvStringLen = recvfrom(sock, recvString, MAXRECVSTRING, 0, NULL, 0)) < 0)
|
DieWithError("recvfrom() failed");
|
|
recvString[recvStringLen] = '\0';
|
printf("Received: %s\n", recvString); /* Print the received string */
|
|
// close(sock);
|
// exit(0);
|
return env->NewStringUTF(recvString);
|
|
}
|
|
void cnv212yuv420(unsigned char *Src_data, unsigned char *Dst_data,
|
int src_width, int src_height) {
|
|
int NV12_Y_Size = src_width * src_height;
|
|
//dst:YUV420 video size
|
int I420_Y_Size = src_width * src_height;
|
int I420_U_Size = (src_width >> 1) * (src_height >> 1);
|
|
// video format transformation process
|
unsigned char *Y_data_Src = Src_data;
|
unsigned char *UV_data_Src = Src_data + NV12_Y_Size;
|
int src_stride_y = src_width;
|
int src_stride_uv = src_width;
|
|
unsigned char *Y_data_Dst = Dst_data;
|
unsigned char *U_data_Dst = Dst_data + I420_Y_Size;
|
unsigned char *V_data_Dst = Dst_data + I420_Y_Size + I420_U_Size;
|
|
int Dst_Stride_Y = src_width;
|
int Dst_Stride_U = src_width >> 1;
|
int Dst_Stride_V = Dst_Stride_U;
|
libyuv::NV21ToI420(Y_data_Src, src_stride_y,
|
UV_data_Src, src_stride_uv,
|
Y_data_Dst, Dst_Stride_Y,
|
U_data_Dst, Dst_Stride_U,
|
V_data_Dst, Dst_Stride_V,
|
src_width, src_height);
|
}
|
|
extern "C"
|
JNIEXPORT jint JNICALL Java_com_basic_security_utils_FaceId_yuv2jpeg
|
(JNIEnv * env, jclass jclazz, jbyteArray jNv21,jbyteArray jI420, jint width, jint height,
|
jint subsample, jbyteArray jpeg,jint quality,jint flags){
|
|
tjhandle handle = NULL;
|
int padding = 1; // 1或4均可,但不能是0
|
int ret = 0;
|
|
if((handle=tjInitCompress())==NULL){
|
LOGI("tjInitCompress error");
|
return 1001;
|
}
|
|
// if(env->GetArrayLength(jpeg) < (jsize)tjBufSizeYUV2(width, padding, height, subsample)){
|
// LOGI("Destination buffer is not large enough");
|
// return 1002;
|
// }
|
|
unsigned char *srcBuf = (unsigned char *)env->GetPrimitiveArrayCritical(jNv21, 0);
|
unsigned char *i420Buf = (unsigned char *)env->GetPrimitiveArrayCritical(jI420, 0);
|
|
cnv212yuv420(srcBuf,i420Buf,width,height);
|
|
unsigned char *jpegBuf = (unsigned char *)env->GetPrimitiveArrayCritical(jpeg, 0);
|
unsigned long compressSize;
|
|
ret = tjCompressFromYUV(handle, i420Buf, width, padding, height,
|
subsample, &jpegBuf, &compressSize, quality, flags);
|
|
tjDestroy(handle);
|
env->ReleasePrimitiveArrayCritical(jNv21, srcBuf, 0);
|
env->ReleasePrimitiveArrayCritical(jI420, i420Buf, 0);
|
env->ReleasePrimitiveArrayCritical(jpeg, jpegBuf, 0);
|
// tjFree(jpegBuf);
|
// tjFree(srcBuf);
|
jpegBuf=srcBuf=NULL;
|
|
if (ret < 0) {
|
LOGI("compress to jpeg failed: %d\n",ret);
|
return ret;
|
}
|
ret = compressSize;
|
return ret;
|
}
|