package com.basic.security.utils;
|
|
import android.app.AlertDialog;
|
import android.content.Context;
|
import android.content.ContextWrapper;
|
import android.content.DialogInterface;
|
import android.content.res.Configuration;
|
import android.graphics.Bitmap;
|
import android.graphics.BitmapFactory;
|
import android.hardware.Camera;
|
import android.hardware.SensorManager;
|
import android.util.AttributeSet;
|
import android.view.OrientationEventListener;
|
import android.view.SurfaceHolder;
|
import android.view.SurfaceView;
|
import android.view.ViewGroup;
|
|
import com.basic.security.base.BaseApplication;
|
import com.basic.security.utils.socket.CameraYuvSocketServer;
|
import com.basic.security.utils.socket.OfficeSocketServer;
|
import com.basic.security.utils.socket.RelaySocketServer;
|
import com.basic.security.utils.socket.SnapshotYuvSocketServer;
|
import com.basic.security.utils.socket.YuvSocketServer;
|
|
import java.io.ByteArrayOutputStream;
|
import java.io.File;
|
import java.io.FileOutputStream;
|
import java.nio.ByteBuffer;
|
import java.util.List;
|
import java.util.concurrent.ArrayBlockingQueue;
|
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.Executors;
|
|
public class Preview extends SurfaceView implements SurfaceHolder.Callback {
|
public static int rotation = 0;
|
public static Preview rgbPreview, grayPreview;
|
public static int orientation1 = 0;
|
public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(10);
|
public static long lastTimeOnPreviewFrame = System.currentTimeMillis();
|
static ExecutorService rgbExecutorService = Executors.newSingleThreadExecutor();
|
static ExecutorService grayExecutorService = Executors.newSingleThreadExecutor();
|
static long grayCameraDataLastTimeUpdate = 0;
|
static byte[] grayCameraData;
|
private static AvcEncoder avcCodec = new AvcEncoder();
|
public int rotationToSend;
|
Context mContext;
|
SurfaceHolder mHolder;
|
Camera mCamera;
|
ProcessImageAndDrawResults mDraw;
|
boolean mFinished;
|
BitmapHolder bitmapHolder = new BitmapHolder();
|
MyThread rgbThread = new MyThread(); // java.lang.IllegalThreadStateException 异常解决方法:将Thread方法改为Runnable实现
|
MyThread grayThread = new MyThread();
|
DetectListener detectListener;
|
ByteBuffer buf1;
|
private int rgb_gray_camera;
|
|
public Preview(Context context, AttributeSet attrs) {
|
super(context, attrs);
|
}
|
|
public Preview(Context context, ProcessImageAndDrawResults draw) {
|
super(context);
|
mContext = context;
|
mDraw = draw;
|
mHolder = getHolder();
|
mHolder.addCallback(this);
|
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
|
}
|
|
/**
|
* Converts YUV420 NV21 to RGB8888
|
*
|
* @param data byte array on YUV420 NV21 format.
|
* @param width pixels width
|
* @param height pixels height
|
* @return a RGB8888 pixels int array. Where each int is a pixels ARGB.
|
*/
|
public static int[] convertYUV420_NV21toRGB8888(byte[] data, int width, int height) {
|
int size = width * height;
|
int offset = size;
|
int[] pixels = new int[size];
|
int u, v, y1, y2, y3, y4;
|
|
// i percorre os Y and the final pixels
|
// k percorre os pixles U e V
|
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
|
y1 = data[i] & 0xff;
|
y2 = data[i + 1] & 0xff;
|
y3 = data[width + i] & 0xff;
|
y4 = data[width + i + 1] & 0xff;
|
|
u = data[offset + k] & 0xff;
|
v = data[offset + k + 1] & 0xff;
|
u = u - 128;
|
v = v - 128;
|
|
pixels[i] = convertYUVtoRGB(y1, u, v);
|
pixels[i + 1] = convertYUVtoRGB(y2, u, v);
|
pixels[width + i] = convertYUVtoRGB(y3, u, v);
|
pixels[width + i + 1] = convertYUVtoRGB(y4, u, v);
|
|
if (i != 0 && (i + 2) % width == 0)
|
i += width;
|
}
|
|
return pixels;
|
}
|
|
private static int convertYUVtoRGB(int y, int u, int v) {
|
int r, g, b;
|
|
r = y + (int) (1.402f * v);
|
g = y - (int) (0.344f * u + 0.714f * v);
|
b = y + (int) (1.772f * u);
|
r = r > 255 ? 255 : r < 0 ? 0 : r;
|
g = g > 255 ? 255 : g < 0 ? 0 : g;
|
b = b > 255 ? 255 : b < 0 ? 0 : b;
|
return 0xff000000 | (b << 16) | (g << 8) | r;
|
}
|
|
public void reset() {
|
surfaceCreated(mHolder);
|
surfaceChanged(mHolder, 0, 0, 0);
|
}
|
|
public void putYUVData(byte[] buffer, int length) {
|
if (YUVQueue.size() >= 10) {
|
YUVQueue.poll();
|
}
|
YUVQueue.add(buffer);
|
}
|
|
@Override
|
public void surfaceCreated(final SurfaceHolder holder) {
|
mFinished = false;
|
try {
|
mCamera = Camera.open(rgb_gray_camera);
|
Camera.Parameters parameters = mCamera.getParameters();
|
parameters.setPreviewSize(640, 480);
|
mCamera.setParameters(parameters);
|
mCamera.setPreviewDisplay(holder);
|
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
|
public void onPreviewFrame(byte[] data, Camera camera) {
|
try {
|
lastTimeOnPreviewFrame = System.currentTimeMillis();
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
YuvSocketServer.AcceptedClient.addCameraData(data);
|
CameraYuvSocketServer.setCameraData(data);
|
SnapshotYuvSocketServer.SnapshotYuvAcceptedClient.addCameraData(data);
|
if (BaseApplication.getApplication().activity != null
|
&& BaseApplication.getApplication().activity.shouldDetectFace()) {
|
if (!rgbThread.isRunning) {
|
if (Constants.USE_GRAY_CAMERA) {
|
byte[] grayBgrData = getMostMatchGrayBgrData();
|
if (grayBgrData != null) {
|
try {
|
if (Constants.isHuaWeiPad) {
|
grayThread.onPreviewFrame(grayBgrData, camera, Constants.GRAY_CAMERA);
|
grayExecutorService.execute(grayThread); // 华为平板
|
} else {
|
grayThread.start();
|
}
|
} catch (Exception e) {
|
grayThread.isRunning = false;
|
e.printStackTrace();
|
}
|
}
|
}
|
rgbThread.isRunning = true;
|
rgbThread.onPreviewFrame(data, camera, Constants.RGB_CAMERA);
|
if (Constants.isHuaWeiPad) {
|
rgbExecutorService.execute(rgbThread); // 华为平板
|
} else {
|
rgbExecutorService.execute(rgbThread);
|
}
|
}
|
}
|
} else {
|
updateGrayBgrData(data);
|
}
|
} catch (Exception e) {
|
rgbThread.isRunning = false;
|
e.printStackTrace();
|
}
|
}
|
});
|
mCamera.startPreview();
|
OrientationEventListener mOrientationListener = new OrientationEventListener(FaceId.activity,
|
SensorManager.SENSOR_DELAY_NORMAL) {
|
public void onOrientationChanged(int orientation) {
|
// Log.d("OrientationDetector","当前的传感器方向为"+orientation);
|
if (orientation == ORIENTATION_UNKNOWN) return;
|
Camera.CameraInfo info =
|
new Camera.CameraInfo();
|
Camera.getCameraInfo(0, info);
|
orientation = (orientation + 45) / 90 * 90;
|
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
Preview.rotation = (info.orientation - orientation + 360) % 360;
|
} else { // back-facing camera
|
Preview.rotation = (info.orientation + orientation) % 360;
|
}
|
orientation1 = orientation;
|
}
|
};
|
if (mOrientationListener.canDetectOrientation()) {
|
mOrientationListener.enable();
|
} else {
|
mOrientationListener.disable();
|
}
|
} catch (Exception exception) {
|
AlertDialog.Builder builder = new AlertDialog.Builder(mContext);
|
builder.setMessage("Cannot open camera")
|
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
|
@Override
|
public void onClick(DialogInterface dialogInterface, int i) {
|
android.os.Process.killProcess(android.os.Process.myPid());
|
}
|
})
|
.show();
|
if (mCamera != null) {
|
mCamera.release();
|
mCamera = null;
|
}
|
}
|
}
|
|
public void updateGrayBgrData(byte[] data) {
|
grayCameraData = data;
|
grayCameraDataLastTimeUpdate = System.currentTimeMillis();
|
}
|
|
public byte[] getMostMatchGrayBgrData() {
|
long internal = (System.currentTimeMillis() - grayCameraDataLastTimeUpdate);
|
// System.out.println("internal="+internal);
|
if (internal < 1000 && grayCameraData != null) {
|
return grayCameraData;
|
}
|
return null;
|
}
|
|
public void surfaceDestroyed(SurfaceHolder holder) {
|
mFinished = true;
|
if (mCamera != null) {
|
mCamera.setPreviewCallback(null);
|
mCamera.stopPreview();
|
mCamera.release();
|
mCamera = null;
|
}
|
}
|
|
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
|
|
if (mCamera == null) return;
|
Camera.Parameters parameters = mCamera.getParameters();
|
if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) {
|
parameters.set("orientation", "portrait");
|
mCamera.setDisplayOrientation(90);
|
mDraw.rotated = true;
|
} else {
|
parameters.set("orientation", "landscape");
|
mCamera.setDisplayOrientation(0);
|
}
|
List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes();
|
int width = 0;
|
int height = 0;
|
for (Camera.Size s : supportedSizes) {
|
if ((width - 640) * (width - 640) + (height - 480) * (height - 480) >
|
(s.width - 640) * (s.width - 640) + (s.height - 480) * (s.height - 480)) {
|
width = s.width;
|
height = s.height;
|
}
|
}
|
try {
|
if (width * height > 0) {
|
parameters.setPreviewSize(width, height);
|
}
|
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_PORTRAIT);
|
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
mCamera.setParameters(parameters);
|
} catch (Exception ex) {
|
// ex.printStackTrace();
|
}
|
parameters = mCamera.getParameters();
|
Camera.Size previewSize = parameters.getPreviewSize();
|
makeResizeForCameraAspect(1.0f / ((1.0f * previewSize.width) / previewSize.height));
|
}
|
|
private void makeResizeForCameraAspect(float cameraAspectRatio) {
|
ViewGroup.LayoutParams layoutParams = this.getLayoutParams();
|
int matchParentWidth = this.getWidth();
|
int newHeight = (int) (matchParentWidth / cameraAspectRatio);
|
if (newHeight != layoutParams.height) {
|
layoutParams.height = newHeight;
|
layoutParams.width = matchParentWidth;
|
this.setLayoutParams(layoutParams);
|
this.invalidate();
|
}
|
}
|
|
public void init(Context context, int rgb_gray_camera, DetectListener detectListener) {
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
Preview.rgbPreview = this;
|
// YuvSocketServer.startYuvSocketServer();
|
CameraYuvSocketServer.startServer();
|
SnapshotYuvSocketServer.startYuvSocketServer();
|
RelaySocketServer.startSocketServer();
|
OfficeSocketServer.startSocketServer();
|
}
|
if (rgb_gray_camera == Constants.GRAY_CAMERA) {
|
Preview.grayPreview = this;
|
}
|
this.detectListener = detectListener;
|
mContext = context;
|
mDraw = new ProcessImageAndDrawResults(rgb_gray_camera);
|
this.rgb_gray_camera = rgb_gray_camera;
|
mHolder = getHolder();
|
mHolder.addCallback(this);
|
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
|
}
|
|
class MyThread extends Thread {
|
public boolean isRunning = false;
|
RotateUtil rotateUtil = new RotateUtil();
|
BgrUtils bgrUtils = new BgrUtils();
|
ByteArrayOutputStream jpgBaos = new ByteArrayOutputStream();
|
|
byte[] data;
|
byte[] originalCameraData;
|
Camera camera;
|
int rgb_gray_camera;
|
RenderScriptHelper renderScriptHelper = new RenderScriptHelper();
|
Camera.Parameters params;
|
private byte[] mTempI420 = null;
|
private byte[] mTempNv21 = null;
|
private byte[] mMaxJpeg = null;
|
|
public void onPreviewFrame(byte[] data, Camera camera, int rgb_gray_camera) {
|
originalCameraData = data;
|
this.rgb_gray_camera = rgb_gray_camera;
|
if (this.data == null || this.data.length != data.length) {
|
this.data = new byte[data.length];
|
}
|
// this.data = data;
|
long begin = System.currentTimeMillis();
|
if (BaseApplication.getApplication().activity.needCopyCameraData()) {
|
System.arraycopy(data, 0, this.data, 0, data.length);
|
} else {
|
this.data = data;
|
}
|
// System.out.println("onPreviewFrame = "+(System.currentTimeMillis() - begin));
|
this.camera = camera;
|
}
|
|
@Override
|
public void run() {
|
try {
|
isRunning = true;
|
ProcessImageAndDrawResults mDraw;
|
if (rgb_gray_camera == Constants.GRAY_CAMERA) {
|
ThreadUtil.printThreadInfo("GRAY_CAMERA");
|
mDraw = grayPreview.mDraw;
|
} else {
|
ThreadUtil.printThreadInfo("RGB_CAMERA");
|
mDraw = rgbPreview.mDraw;
|
}
|
mDraw.originalCameraData = originalCameraData;
|
mDraw.setRgb_gray_camera(rgb_gray_camera);
|
if (params == null) {
|
params = camera.getParameters();
|
}
|
int orientation = orientation1;
|
int rotation = Preview.rotation;
|
rotation = 360 - rotation;
|
if (Constants.isHuaWeiPad) {
|
rotation = 270;// 华为平板270
|
} else {
|
rotation = 90;
|
}
|
rotation = Constants.rotation;
|
if (Constants.simulate_camera) {
|
rotation = 0;
|
}
|
if (rotation == 360 || rotation == 0) {
|
mDraw.mImageWidth = params.getPreviewSize().width;
|
mDraw.mImageHeight = params.getPreviewSize().height;
|
}
|
if (rotation == 270) {
|
data = rotateUtil.rotateYUV420Degree270(data, params.getPreviewSize().width, params.getPreviewSize().height);
|
mDraw.mImageWidth = params.getPreviewSize().height;
|
mDraw.mImageHeight = params.getPreviewSize().width;
|
}
|
if (rotation == 180) {
|
data = rotateUtil.rotateYUV420Degree180(data, params.getPreviewSize().width, params.getPreviewSize().height);
|
mDraw.mImageWidth = params.getPreviewSize().width;
|
mDraw.mImageHeight = params.getPreviewSize().height;
|
}
|
if (rotation == 90) {
|
data = rotateUtil.rotateYUV420Degree90(data, params.getPreviewSize().width, params.getPreviewSize().height);
|
mDraw.mImageWidth = params.getPreviewSize().height;
|
mDraw.mImageHeight = params.getPreviewSize().width;
|
}
|
Preview.this.rotationToSend = rotation;
|
if (!Constants.useNv21ToBgr) {
|
long begin = System.currentTimeMillis();
|
Bitmap frameBitmap = renderScriptHelper.getBitmapFromFrameData(data, mDraw.mImageWidth, mDraw.mImageHeight);
|
String outputFileName = "";
|
byte[] bgrArray = null;
|
|
try {
|
String name = "";
|
String filesPath = new ContextWrapper(FaceId.activity).getFilesDir().getAbsolutePath();
|
byte[] jpgByteArray = null;
|
if (rgb_gray_camera == Constants.RGB_CAMERA) {
|
name = "image_rgb.jpg";
|
outputFileName = "rgb.jpg";
|
File targetFile = new File(filesPath, outputFileName);
|
if (Constants.showCapturedImages) {
|
jpgBaos.reset();
|
frameBitmap.compress(Bitmap.CompressFormat.JPEG, 100, jpgBaos);
|
jpgByteArray = jpgBaos.toByteArray();
|
}
|
bgrArray = bgrUtils.getPixelsBGR(frameBitmap);
|
if (!Constants.useBgrArray) {
|
FileOutputStream outputStream = new FileOutputStream(targetFile);
|
outputStream.write(bgrArray);
|
outputStream.close();
|
}
|
} else {
|
outputFileName = "gray.jpg";
|
name = "image_gray.jpg";
|
Bitmap bitmap = BitmapFactory.decodeByteArray(jpgBaos.toByteArray(), 0, jpgBaos.size());
|
bitmapHolder.storeBitmap(bitmap);
|
bitmapHolder.flipBitmapHorizontal();
|
bitmap = bitmapHolder.getBitmapAndFree();
|
jpgBaos.reset();
|
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, jpgBaos);
|
|
|
jpgByteArray = jpgBaos.toByteArray();
|
File targetFile = new File(filesPath, outputFileName);
|
FileOutputStream outputStream = new FileOutputStream(targetFile);
|
bgrArray = bgrUtils.getPixelsBGR(BitmapFactory.decodeByteArray(jpgByteArray, 0, jpgBaos.size()));
|
outputStream.write(bgrArray);
|
outputStream.close();
|
}
|
if (Constants.showCapturedImages) {
|
if (jpgByteArray != null) {
|
FileOutputStream output = new FileOutputStream(new File(new ContextWrapper(getContext()).getFilesDir().getAbsolutePath(), name));
|
output.write(jpgByteArray);
|
output.flush();
|
output.close();
|
}
|
}
|
} catch (Exception e) {
|
e.printStackTrace();
|
}
|
if ((mDraw == null) || mFinished)
|
return;
|
if (mDraw.mYUVData == null) {
|
mDraw.mRGBData = new byte[3 * mDraw.mImageWidth * mDraw.mImageHeight];
|
mDraw.mYUVData = new byte[data.length];
|
}
|
mDraw.start(frameBitmap, bgrArray, data);
|
} else {
|
mDraw.start1(data);
|
}
|
} catch (Exception e) {
|
e.printStackTrace();
|
} finally {
|
isRunning = false;
|
}
|
}
|
}
|
|
}
|