package com.basic.security.utils;
|
|
import android.content.ContextWrapper;
|
import android.graphics.Bitmap;
|
import android.graphics.BitmapFactory;
|
import android.text.TextUtils;
|
import android.util.DisplayMetrics;
|
|
import com.basic.security.activity.MainActivity;
|
import com.basic.security.base.BaseApplication;
|
import com.basic.security.manager.BaseSettingManager;
|
import com.basic.security.manager.DetectResultManager;
|
import com.basic.security.manager.FaceTitleAndTipsManager;
|
import com.basic.security.utils.socket.CameraYuvSocketServer;
|
|
import java.io.ByteArrayOutputStream;
|
import java.io.File;
|
import java.util.ArrayList;
|
import java.util.List;
|
|
public class ProcessImageAndDrawResults {
|
|
public static int shouldExtractFeatureFirstTimeInWaitFace = 2;
|
static List<DetectListener> detectListeners = new ArrayList<>();
|
public byte[] originalCameraData;
|
public int rgb_gray_camera;
|
public int mStopping;
|
public int mStopped;
|
public byte[] mYUVData;
|
public byte[] mRGBData;
|
public int mImageWidth, mImageHeight;
|
public boolean rotated;
|
public long lastTimeDetectedFace = 0;
|
public long lastTimeSuccessExtractedFeature = 0;
|
public DetectedResult previewDetectResult = new DetectedResult();
|
public byte[] bgrArrayCopyForFeature = null;
|
public byte[] nv21ArrayCopyForFeature = null;
|
public Object copyForFeatureLock = new Object();
|
BitmapHolder bitmapHolder = new BitmapHolder();
|
int shoudExractFeatureCount = 0;
|
int shouldExtractFeature = 0;
|
boolean shouldExtractFeatureThreadFinished = true;
|
|
public ProcessImageAndDrawResults(int rgb_gray_camera) {
|
this.rgb_gray_camera = rgb_gray_camera;
|
}
|
|
public static void addDetectedListener(DetectListener detectListener) {
|
detectListeners.add(detectListener);
|
}
|
|
public void setRgb_gray_camera(int rgb_gray_camera) {
|
this.rgb_gray_camera = rgb_gray_camera;
|
}
|
|
public void start(Bitmap frameBitmap, byte[] bgrArray, byte[] nv21Array) {
|
if (mRGBData == null) {
|
return;
|
}
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
BaseApplication.getApplication().detectLock.lock();
|
}
|
try {
|
detectFace(rgb_gray_camera, frameBitmap, bgrArray, nv21Array);
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
BaseApplication.getApplication().detectLock.unlock();
|
}
|
}
|
|
public void start1(byte[] nv21Array) {
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
BaseApplication.getApplication().detectLock.lock();
|
}
|
try {
|
detectFace1(rgb_gray_camera, nv21Array);
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
BaseApplication.getApplication().detectLock.unlock();
|
}
|
}
|
|
private byte[] cropImage(int x1, int y1, int x2, int y2, String frameJpgPath, Bitmap frameBitmap) {
|
float ratio = 0.8f;
|
int enlargeWidth = (int) (ratio * (x2 - x1));
|
int enlargeHeight = (int) (ratio * (y2 - y1));
|
x1 = x1 - enlargeWidth / 2;
|
x2 = x2 + enlargeWidth / 2;
|
y1 = y1 - enlargeHeight / 2;
|
y2 = y2 + enlargeHeight / 2;
|
if (x1 < 0) {
|
x1 = 0;
|
}
|
if (y1 < 0) {
|
y1 = 0;
|
}
|
if (x2 > mImageWidth) {
|
x2 = mImageWidth;
|
}
|
if (y2 > mImageHeight) {
|
y2 = mImageHeight;
|
}
|
int width = (x2 - x1);
|
int height = (y2 - y1);
|
if (width > mImageWidth) {
|
width = mImageWidth;
|
}
|
if (height > mImageHeight) {
|
height = mImageHeight;
|
}
|
Bitmap bitmap = null;
|
if (frameBitmap != null) {
|
bitmap = frameBitmap;
|
} else {
|
bitmap = BitmapFactory.decodeFile(frameJpgPath);
|
}
|
|
byte[] faceData = null;
|
if (bitmap != null) {
|
bitmapHolder.storeBitmap(bitmap);
|
bitmapHolder.cropBitmap(x1, y1, x2, y2);
|
Bitmap faceBitmap = bitmapHolder.getBitmapAndFree();
|
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
faceBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
|
faceData = stream.toByteArray();
|
|
if (faceBitmap != null && !faceBitmap.isRecycled()) {
|
faceBitmap.recycle();
|
}
|
faceBitmap = null;
|
bitmap = null;
|
}
|
|
return faceData;
|
}
|
|
private void setPosition(FacePosition facePosition, int x1, int y1, int x2, int y2) {
|
float ratio = 0.5f;
|
int enlargeWidth = (int) (ratio * (x2 - x1));
|
int enlargeHeight = (int) (ratio * (y2 - y1));
|
x1 = x1 - enlargeWidth / 2;
|
x2 = x2 + enlargeWidth / 2;
|
y1 = y1 - enlargeHeight / 2;
|
y2 = y2 + enlargeHeight / 2;
|
if (x1 < 0) {
|
x1 = 0;
|
}
|
if (y1 < 0) {
|
y1 = 0;
|
}
|
if (x2 > mImageWidth) {
|
x2 = mImageWidth;
|
}
|
if (y2 > mImageHeight) {
|
y2 = mImageHeight;
|
}
|
int width = (x2 - x1);
|
int height = (y2 - y1);
|
if (width > mImageWidth) {
|
width = mImageWidth;
|
}
|
if (height > mImageHeight) {
|
height = mImageHeight;
|
}
|
facePosition.x1 = x1;
|
facePosition.y1 = y1;
|
facePosition.x2 = x2;
|
facePosition.y2 = y2;
|
facePosition.faceRatio = (y2 - y1) * (x2 - x1) * 1.0 / (640 * 480);
|
}
|
|
public void addDetectedResult(DetectedResult detectedResult, int x1, int y1, int x2, int y2, float score, String frameJpgPath,
|
String faceJpgPath, long trackerId, boolean liveness, String resultText,
|
int featureId, String featureName,
|
int yaw,
|
int pitch,
|
int roll) {
|
try {
|
FacePosition facePosition = new FacePosition();
|
facePosition.detectedResult = detectedResult;
|
setPosition(facePosition, x1, y1, x2, y2);
|
facePosition.trackerId = trackerId;
|
facePosition.liveness = liveness;
|
facePosition.score = score;
|
facePosition.yaw = yaw;
|
facePosition.pitch = pitch;
|
facePosition.roll = roll;
|
facePosition.createTime = System.currentTimeMillis();
|
if (featureId != -1) {
|
facePosition.featureName = featureName;
|
}
|
detectedResult.facePositions.add(facePosition);
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
}
|
|
private void detectFace(int rgbGrayCamera, Bitmap frameBitmap, byte[] bgrArray, byte[] nv21Array) throws Exception {
|
String outputFileName = "";
|
if (rgbGrayCamera == Constants.RGB_CAMERA) {
|
outputFileName = "rgb.jpg";
|
} else {
|
outputFileName = "gray.jpg";
|
}
|
String filesPath = new ContextWrapper(FaceId.activity).getFilesDir().getAbsolutePath();
|
File jpgFile = new File(filesPath, "image_" + outputFileName);
|
File featureFile = new File(filesPath, "feature");
|
File targetFile = new File(filesPath, outputFileName);
|
String resultsStr = "";
|
String resultText = "";
|
String[] results = null;
|
if (rgbGrayCamera == Constants.RGB_CAMERA) {
|
shoudExractFeatureCount++;
|
shouldExtractFeature = 0;
|
if (shoudExractFeatureCount % 4 == 0) {
|
shoudExractFeatureCount = 0;
|
}
|
if (Constants.useBgrArray) {
|
int shouldExtractFeatureInFirstDetect = 0;
|
if (System.currentTimeMillis() - lastTimeDetectedFace > 2 * 1000) {
|
shouldExtractFeatureInFirstDetect = 1;
|
}
|
//shouldExtractFeatureInFirstDetect = 1;
|
resultsStr = FaceId.instance.rgbDetectFace2(filesPath, targetFile.getAbsolutePath(),
|
mImageWidth, mImageHeight, featureFile.getAbsolutePath(),
|
shouldExtractFeatureInFirstDetect, Constants.USE_GRAY_CAMERA,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1, bgrArray
|
);
|
|
long begin = System.currentTimeMillis();
|
final DetectedResult detectedResult1 = parseResult(resultsStr, rgbGrayCamera, nv21Array,
|
shouldExtractFeatureInFirstDetect > 0, shouldExtractFeatureInFirstDetect);
|
boolean needFaceTitle = false;
|
if (detectedResult1.facePositions.size() > 0) {
|
MainActivity mainActivity = BaseApplication.getApplication().activity;
|
if (mainActivity.currentFragment == mainActivity.fragment_su_auto_wait_face
|
|| mainActivity.currentFragment == mainActivity.fragment_door_access_result
|
) {
|
try {
|
FaceTitleAndTips faceTitleAndTips = FaceTitleAndTipsManager.getFaceTitleMap(detectedResult1.facePositions.get(0), true);
|
if ("".equals(faceTitleAndTips.faceTitle())) {
|
needFaceTitle = true;
|
}
|
} catch (Exception e) {
|
System.out.println("ProcessImageAndDrawResults.detectFace " + e.getMessage());
|
}
|
}
|
lastTimeDetectedFace = System.currentTimeMillis();
|
boolean extractedFeature = false;
|
for (FacePosition facePosition : detectedResult1.facePositions) {
|
if (!TextUtils.isEmpty(facePosition.featureName)) {
|
extractedFeature = true;
|
lastTimeSuccessExtractedFeature = System.currentTimeMillis();
|
break;
|
}
|
}
|
if (shouldExtractFeatureInFirstDetect > 0 && !extractedFeature) {
|
lastTimeDetectedFace = 0;
|
}
|
if (shouldExtractFeatureInFirstDetect > 0) {
|
}
|
} else {
|
BaseApplication.getApplication().activity.fragment_camera.hideAllFaceRects();
|
lastTimeDetectedFace = 0;
|
if (System.currentTimeMillis() - lastTimeDetectedFace > 2 * 1000) {
|
FaceTitleAndTipsManager.clearFaceTitle();
|
}
|
}
|
|
if (shouldExtractFeatureThreadFinished &&
|
shouldExtractFeatureInFirstDetect == 0 &&
|
resultsStr.length() > 0 &&
|
((shouldExtractFeatureFirstTimeInWaitFace > 0 || (shouldExtractFeature > 0)) ||
|
((System.currentTimeMillis() - lastTimeSuccessExtractedFeature) > 1000) ||
|
needFaceTitle)
|
) {
|
shouldExtractFeatureFirstTimeInWaitFace--;
|
if (bgrArrayCopyForFeature == null || bgrArrayCopyForFeature.length != bgrArray.length) {
|
bgrArrayCopyForFeature = new byte[bgrArray.length];
|
}
|
if (nv21ArrayCopyForFeature == null || nv21ArrayCopyForFeature.length != nv21Array.length) {
|
nv21ArrayCopyForFeature = new byte[nv21Array.length];
|
}
|
if (BaseApplication.getApplication().activity.needCopyCameraData()) {
|
synchronized (copyForFeatureLock) {
|
System.arraycopy(bgrArray, 0, bgrArrayCopyForFeature, 0, bgrArrayCopyForFeature.length);
|
System.arraycopy(nv21Array, 0, nv21ArrayCopyForFeature, 0, nv21ArrayCopyForFeature.length);
|
}
|
} else {
|
bgrArrayCopyForFeature = bgrArray;
|
nv21ArrayCopyForFeature = nv21Array;
|
}
|
if (BaseApplication.getApplication().activity.needCopyCameraData()) {
|
extractFeature(filesPath, targetFile, featureFile, rgbGrayCamera, detectedResult1);
|
} else {
|
BaseApplication.getApplication().executorService.execute(new Runnable() {
|
@Override
|
public void run() {
|
extractFeature(filesPath, targetFile, featureFile, rgbGrayCamera, detectedResult1);
|
}
|
});
|
}
|
|
}
|
shouldExtractFeature = 0;
|
} else {
|
resultsStr = FaceId.instance.rgbDetectFace(filesPath, targetFile.getAbsolutePath(),
|
mImageWidth, mImageHeight, featureFile.getAbsolutePath(),
|
shouldExtractFeature, Constants.USE_GRAY_CAMERA,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1
|
);
|
}
|
|
} else {
|
resultsStr = FaceId.instance.grayDetectFace(filesPath, targetFile.getAbsolutePath(), mImageWidth, mImageHeight,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1);
|
}
|
}
|
|
private void extractFeature(String filesPath, File targetFile, File featureFile, int rgbGrayCamera, DetectedResult detectedResult1) {
|
shouldExtractFeatureThreadFinished = false;
|
|
synchronized (copyForFeatureLock) {
|
String resultsStr = FaceId.instance.rgbDetectFace4(1, filesPath, targetFile.getAbsolutePath(),
|
mImageWidth, mImageHeight, featureFile.getAbsolutePath(),
|
1, Constants.USE_GRAY_CAMERA,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1, bgrArrayCopyForFeature);
|
if (resultsStr.length() > 0) {
|
DetectedResult detectedResult2 = parseResult(resultsStr, rgbGrayCamera, nv21ArrayCopyForFeature, true, 1, detectedResult1);
|
for (FacePosition facePosition : detectedResult2.facePositions) {
|
if (!TextUtils.isEmpty(facePosition.featureName)) {
|
lastTimeSuccessExtractedFeature = System.currentTimeMillis();
|
break;
|
}
|
}
|
}
|
}
|
shouldExtractFeatureThreadFinished = true;
|
}
|
|
private void detectFace1(int rgbGrayCamera, byte[] nv21Array) throws Exception {
|
String outputFileName = "";
|
if (rgbGrayCamera == Constants.RGB_CAMERA) {
|
outputFileName = "rgb.jpg";
|
} else {
|
outputFileName = "gray.jpg";
|
}
|
String filesPath = new ContextWrapper(FaceId.activity).getFilesDir().getAbsolutePath();
|
File jpgFile = new File(filesPath, "image_" + outputFileName);
|
File featureFile = new File(filesPath, "feature");
|
File targetFile = new File(filesPath, outputFileName);
|
String resultsStr = "";
|
if (rgbGrayCamera == Constants.RGB_CAMERA) {
|
shoudExractFeatureCount++;
|
shouldExtractFeature = 0;
|
if (shoudExractFeatureCount % 4 == 0) {
|
shouldExtractFeature = 1;
|
shoudExractFeatureCount = 0;
|
}
|
shouldExtractFeature = 1;
|
resultsStr = FaceId.instance.rgbDetectFace3(0, filesPath,
|
mImageWidth, mImageHeight, featureFile.getAbsolutePath(),
|
shouldExtractFeature, Constants.USE_GRAY_CAMERA,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1, nv21Array
|
);
|
if (shouldExtractFeature > 0) {
|
parseResult(resultsStr, rgbGrayCamera, nv21Array, false, shouldExtractFeature);
|
} else {
|
parseResult(resultsStr, rgbGrayCamera, nv21Array, false, shouldExtractFeature);
|
}
|
} else {
|
resultsStr = FaceId.instance.grayDetectFace(filesPath, targetFile.getAbsolutePath(), mImageWidth, mImageHeight,
|
BaseSettingManager.allowMultipleFace() ? 20 : 1);
|
}
|
|
}
|
|
public DetectedResult parseResult(String resultsStr, int rgbGrayCamera, byte[] nv21Array, boolean hasFeature, int shouldExtractFeature) {
|
return parseResult(resultsStr, rgbGrayCamera, nv21Array, hasFeature, shouldExtractFeature, null);
|
}
|
|
public DetectedResult parseResult(String resultsStr, int rgbGrayCamera, byte[] nv21Array, boolean hasFeature, int shouldExtractFeature, DetectedResult previewDetectResult1) {
|
String resultText = "";
|
String[] results = null;
|
results = resultsStr.split("\\|");
|
DisplayMetrics displayMetrics = new DisplayMetrics();
|
FaceId.activity.getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
|
DetectedResult detectedResult = null;
|
if (BaseApplication.getApplication().activity.needCopyCameraData()) {
|
detectedResult = DetectResultManager.getDetectResultFromCache(nv21Array);//new DetectedResult();
|
} else {
|
detectedResult = new DetectedResult();
|
detectedResult.nv21Array = nv21Array;
|
}
|
// detectedResult.nv21Array = nv21Array;
|
detectedResult.width = mImageWidth;
|
detectedResult.height = mImageHeight;
|
detectedResult.shouldExtractFeature = shouldExtractFeature > 0;
|
detectedResult.resultText = resultText;
|
|
for (String result : results) {
|
if (result.contains(",")) {
|
String elem[] = result.split(",", -1);
|
if (elem.length == 13) {
|
int left = Integer.parseInt(elem[0]);
|
int top = Integer.parseInt(elem[1]);
|
int right = Integer.parseInt(elem[2]);
|
int bottom = Integer.parseInt(elem[3]);
|
float score = Float.parseFloat(elem[4]);
|
long trackerId = Long.parseLong(elem[5]);
|
resultText += "" + elem[7] + "\r\n" + elem[5] + "\r\n" + elem[0] + "," + elem[1] + "," + elem[2] + "," + elem[3] + "\r\n" + elem[4] + "\r\n";
|
if (rgbGrayCamera == Constants.RGB_CAMERA) {
|
String faceJpgPath = new File(new ContextWrapper(FaceId.activity).getFilesDir().getAbsolutePath(), "image_rgb_face.jpg").getAbsolutePath();
|
int featureId = Integer.parseInt(elem[8]);
|
String featurePath = elem[9];
|
if (!"".equals(featurePath) && featurePath != null
|
&& featurePath.length() > 5 &&
|
featureId != -1) {
|
detectedResult.featureCount++;
|
}
|
int yaw = Integer.parseInt(elem[10]);
|
int pitch = Integer.parseInt(elem[11]);
|
int roll = Integer.parseInt(elem[12]);
|
addDetectedResult(detectedResult, left, top, right, bottom, score,
|
null, faceJpgPath, trackerId, elem[7].equals("活体"), resultText,
|
featureId,
|
featurePath,
|
yaw,
|
pitch,
|
roll
|
);
|
}
|
} else {
|
resultText += "[" + result.substring(0, result.lastIndexOf(',')) + "]="
|
+ result.substring(result.lastIndexOf(',') + 1) + "\r\n";
|
}
|
}
|
}
|
if (detectedResult.facePositions.size() > 0) {
|
MainActivity mainActivity = BaseApplication.getApplication().activity;
|
if (mainActivity.currentFragment == mainActivity.fragment_home) {
|
mainActivity.showFragment(mainActivity.fragment_su_auto_wait_face);
|
}
|
MainActivity.lastTimeDetectedFace = System.currentTimeMillis();
|
mainActivity.serialDeviceOpenLamp(20);
|
boolean isNewDetectResult = false;
|
if (detectedResult.facePositions.size() != previewDetectResult.facePositions.size()) {
|
isNewDetectResult = true;
|
} else {
|
for (int i = 0; i < detectedResult.facePositions.size(); i++) {
|
FacePosition facePosition = detectedResult.facePositions.get(i);
|
FacePosition previewFacePosition = previewDetectResult.facePositions.get(i);
|
if (facePosition.trackerId != previewFacePosition.trackerId) {
|
isNewDetectResult = true;
|
break;
|
}
|
}
|
}
|
detectedResult.originalCameraData = originalCameraData;
|
// detectedResult.originalCameraData = nv21Array;
|
if (isNewDetectResult) {
|
detectedResult.originalCameraDataChanged = true;
|
CameraYuvSocketServer.setDetectedResult(detectedResult);
|
previewDetectResult = detectedResult;
|
} else {
|
detectedResult.originalCameraDataChanged = false;
|
CameraYuvSocketServer.setDetectedResult(detectedResult);
|
}
|
}
|
if (previewDetectResult1 != null) {
|
for (int i = 0; i < previewDetectResult1.facePositions.size(); i++) {
|
if (i < detectedResult.facePositions.size()) {
|
detectedResult.facePositions.get(i).trackerId = previewDetectResult1.facePositions.get(i).trackerId;
|
}
|
}
|
}
|
for (DetectListener detectListener : detectListeners) {
|
detectListener.faceDetected(detectedResult);
|
}
|
return detectedResult;
|
}
|
|
}
|