package com.basic.security.utils; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Base64; import com.basic.security.base.BaseApplication; import com.basic.security.manager.BaseManager; import com.basic.security.manager.DeviceManager; import com.basic.security.manager.IdCardManager; import com.basic.security.model.ModelAdapter; import com.basic.security.utils.socket.outdoor.OutdoorGuestSocketServer; import com.basic.security.utils.socket.server.TableRowReceiveSocketServer; import com.basic.security.widget.Preview; public class FaceId { public static FaceId instance = new FaceId(); public static Activity activity = null; static { System.loadLibrary("opencv_java3"); System.loadLibrary("THFaceImage"); System.loadLibrary("native-lib"); System.loadLibrary("THFeature"); } public boolean sdkInitSuccess = false; private boolean initSdkSuccess = false; public static void initSdk(Activity activity, Preview rgbPreview, Preview grayPreview, DetectListener detectListener) { String ip = NetUtil.getIPAddress(true); Constants.deviceId = DeviceManager.getDeviceId(); FaceId.activity = activity; if (Constants.isOutdoor) { OutdoorGuestSocketServer.startServer(); } TableRowReceiveSocketServer.startServer(); if (!Constants.stopCameraAndVideo) { if (rgbPreview != null) { rgbPreview.init(activity, Constants.RGB_CAMERA, detectListener); } if (grayPreview != null && Constants.USE_GRAY_CAMERA) { grayPreview.init(activity, Constants.GRAY_CAMERA, detectListener); } } try { } catch (Exception e) { ExceptionUtil.printException(e); } ProcessImageAndDrawResults.addDetectedListener(detectListener); } public static void testSdk(String jpgFile1, String jpgFile2) { BaseApplication.getApplication().detectLock.lock(); Bitmap bitmap1 = BitmapFactory.decodeFile(jpgFile1); if (bitmap1 == null) { BaseApplication.getApplication().detectLock.unlock(); return; } byte[] byte1 = IdCardManager.getPixelsBGR(bitmap1); byte[] featureData1 = FaceId.instance.extractFeature1J(byte1, bitmap1.getWidth(), bitmap1.getHeight()); if (featureData1 == null) { BaseApplication.getApplication().detectLock.unlock(); return; } String base64 = Base64.encodeToString(featureData1, Base64.NO_WRAP); ModelAdapter person1 = new ModelAdapter("p1"); person1.setString("table", "person"); person1.setString("camera_image_feature", base64); BaseManager.save(person1); ModelAdapter dbPerson1 = BaseManager.findById("person", "p1"); featureData1 = Base64.decode(dbPerson1.getString("camera_image_feature"), Base64.NO_WRAP); Bitmap bitmap2 = BitmapFactory.decodeFile(jpgFile2); if (bitmap2 == null) { BaseApplication.getApplication().detectLock.unlock(); return; } byte[] byte2 = IdCardManager.getPixelsBGR(bitmap2); byte[] featureData2 = FaceId.instance.extractFeature1J(byte2, bitmap2.getWidth(), bitmap2.getHeight()); if (featureData2 == null) { BaseApplication.getApplication().detectLock.unlock(); return; } String base642 = Base64.encodeToString(featureData2, Base64.NO_WRAP); ModelAdapter person2 = new ModelAdapter("p2"); person2.setString("table", "person"); person2.setString("camera_image_feature", base642); BaseManager.save(person2); ModelAdapter dbPerson2 = BaseManager.findById("person", "p2"); featureData2 = Base64.decode(dbPerson2.getString("camera_image_feature"), Base64.NO_WRAP); float score = FaceId.instance.compareFeatureJ(featureData1, featureData2); System1.out.println("testSdk=" + score); BaseApplication.getApplication().detectLock.unlock(); } public boolean initSdkJ(String modelPath) { initSdkSuccess = initSdk(modelPath); System.out.println("FaceId.initSdkJ initSdkSuccess=" + initSdkSuccess); return initSdkSuccess; } public void detectFaceJ(DetectedResult detectedResult) { detectFace(detectedResult); } public String rgbDetectFaceJ(String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount) { if (!initSdkSuccess) { return ""; } return rgbDetectFace(modelPath, rgbFileName, width, height, baseFeatureName, shouldExtractFeature, useGrayCamera, detectFaceCount); } public String rgbDetectFace2J(String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] bgrArray) { if (!initSdkSuccess) { return ""; } return rgbDetectFace2(modelPath, rgbFileName, width, height, baseFeatureName, shouldExtractFeature, useGrayCamera, detectFaceCount, bgrArray); } public String rgbDetectFace4J(int channel, String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] bgrArray) { if (!initSdkSuccess) { return ""; } return rgbDetectFace4(channel, modelPath, rgbFileName, width, height, baseFeatureName, shouldExtractFeature, useGrayCamera, detectFaceCount, bgrArray); } public String rgbDetectFace3J(int channel, String modelPath, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] nv21Array) { if (!initSdkSuccess) { return ""; } return rgbDetectFace3(channel, modelPath, width, height, baseFeatureName, shouldExtractFeature, useGrayCamera, detectFaceCount, nv21Array); } public String grayDetectFaceJ(String modelPath, String rgbFileName, int width, int height, int detectFaceCount) { if (!initSdkSuccess) { return ""; } return grayDetectFace(modelPath, rgbFileName, width, height, detectFaceCount); } public void cropFaceJ(String frameJpgPath, String faceJpgPath, int width, int height, int x1, int y1, int x2, int y2) { cropFace(frameJpgPath, faceJpgPath, width, height, x1, y1, x2, y2); } public byte[] extractFeatureJ(String jpgFileName) { if (!initSdkSuccess) { return null; } return extractFeature(jpgFileName); } public String facePositionJ(String jpgFileName) { if (!initSdkSuccess) { return ""; } return facePosition(jpgFileName); } public byte[] extractFeature1J(byte[] bgrData, int width, int height) { if (!initSdkSuccess) { return null; } return extractFeature1(bgrData, width, height); } public String compareFeatureJ(String featureFileName1, String featureFileName2) { if (!initSdkSuccess) { return ""; } return compareFeature(featureFileName1, featureFileName2); } public float compareFeatureJ(byte[] featureBuffer1, byte[] featureBuffer2) { if (!initSdkSuccess) { return 0; } return compareFeature(featureBuffer1, featureBuffer2); } public String compareFeatureInDbJ(String featureFileName) { if (!initSdkSuccess) { return ""; } return compareFeatureInDb(featureFileName); } public String compareFeatureInDbJ(byte[] featureBuffer, int minScore) { if (!initSdkSuccess) { return ""; } return compareFeatureInDb(featureBuffer, minScore); } public String compareFeatureInAllDbJ(byte[] featureBuffer, int minScore) { if (!initSdkSuccess) { return ""; } return compareFeatureInAllDb(featureBuffer, minScore); } public void addFeatureToDbJ(String id, byte[] featureBuffer) { addFeatureToDb(id, featureBuffer); } public void addFeatureToAllDbJ(String id, byte[] featureBuffer) { addFeatureToAllDb(id, featureBuffer); } public void removeFeatureFromDbJ(String id) { removeFeatureFromDb(id); } public void addFeatureToTempDbJ(String id, byte[] featureBuffer) { addFeatureToTempDb(id, featureBuffer); } public String compareFeatureInTempDbJ(byte[] featureBuffer, int minScore) { if (!initSdkSuccess) { return ""; } return compareFeatureInTempDb(featureBuffer, minScore); } public void removeFeatureFromTempDbJ(String id) { removeFeatureFromTempDb(id); } public String receiveBroadcastJ(char broadcastPort) { return receiveBroadcast(broadcastPort); } public byte[] yuv420p2rgb24J(byte[] yuvbuffer, byte[] rgbbuffer, int width, int height) { return yuv420p2rgb24(yuvbuffer, rgbbuffer, width, height); } public native boolean initSdk(String modelPath); public native void detectFace(DetectedResult detectedResult); public native String rgbDetectFace(String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount); public native String rgbDetectFace2(String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] bgrArray); public native String rgbDetectFace4(int channel, String modelPath, String rgbFileName, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] bgrArray); public native String rgbDetectFace3(int channel, String modelPath, int width, int height, String baseFeatureName, int shouldExtractFeature, boolean useGrayCamera, int detectFaceCount, byte[] nv21Array); public native String grayDetectFace(String modelPath, String rgbFileName, int width, int height, int detectFaceCount); public native void cropFace(String frameJpgPath, String faceJpgPath, int width, int height, int x1, int y1, int x2, int y2); public native byte[] extractFeature(String jpgFileName); public native String facePosition(String jpgFileName); public native byte[] extractFeature1(byte[] bgrData, int width, int height); public native String compareFeature(String featureFileName1, String featureFileName2); public native float compareFeature(byte[] featureBuffer1, byte[] featureBuffer2); public native String compareFeatureInDb(String featureFileName); public native String compareFeatureInDb(byte[] featureBuffer, int minScore); public native String compareFeatureInAllDb(byte[] featureBuffer, int minScore); public native void addFeatureToDb(String id, byte[] featureBuffer); public native void addFeatureToAllDb(String id, byte[] featureBuffer); public native void removeFeatureFromDb(String id); public native void addFeatureToTempDb(String id, byte[] featureBuffer); public native String compareFeatureInTempDb(byte[] featureBuffer, int minScore); public native void removeFeatureFromTempDb(String id); public native String receiveBroadcast(char broadcastPort); public native byte[] yuv420p2rgb24(byte[] yuvbuffer, byte[] rgbbuffer, int width, int height); }