title: Android原生人臉識別Camera2+FaceDetector 快速實現(xiàn)人臉跟蹤
categories:
- Android
tags: - 人臉識別
- FaceDetector
- Camera2
date: 2020-05-27 14:02:13
前言本人博客
本篇主要介紹Android原生Api人臉檢測FaceDetector的使用,該方法檢測人臉可以
做到的是,檢測到屏幕有無人臉,有多少個人臉,人臉的雙眼眉心位置2d坐標(biāo),雙眼間距,
但是本人測到該方法的坑,檢測有無人臉確實好用,但是如果要精確的測量人臉位置,距離等,會有偏差,畢竟是2d坐標(biāo),對現(xiàn)實
定位不準(zhǔn)確,我感覺可以這樣理解,
然后大家如果要實現(xiàn)該功能的時候,如果這些不夠用,就不用考慮該方法了。
廢話不多說,實現(xiàn)開始,
實現(xiàn)
1.首先可以實現(xiàn)一個自定義view用來在屏幕上畫方框
class FaceView : View {
lateinit var mPaint: Paint
private var mCorlor = "#42ed45"
private var mFaces: ArrayList<RectF>? = null
constructor(context: Context) : super(context) {
init()
}
constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
init()
}
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
init()
}
private fun init() {
mPaint = Paint()
mPaint.color = Color.parseColor(mCorlor)
mPaint.style = Paint.Style.STROKE
mPaint.strokeWidth = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 3f, context.resources.displayMetrics)
mPaint.isAntiAlias = true
}
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
mFaces?.let {
for (face in it) {
canvas.drawRect(face, mPaint)
}
}
}
fun setFaces(faces: ArrayList<RectF>) {
this.mFaces = faces
invalidate()
}
}
imageUtil用來處理返回的基礎(chǔ)格式
/**
* Author: Sar_Wang
* Date: 2020/5/11 3:40 PM
* Description:
*/
public class ImageUtil {
/**
* 將Y:U:V == 4:2:2的數(shù)據(jù)轉(zhuǎn)換為nv21
*
* @param y Y 數(shù)據(jù)
* @param u U 數(shù)據(jù)
* @param v V 數(shù)據(jù)
* @param nv21 生成的nv21,需要預(yù)先分配內(nèi)存
* @param stride 步長
* @param height 圖像高度
*/
public static void yuv422ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {
System.arraycopy(y, 0, nv21, 0, y.length);
// 注意,若length值為 y.length * 3 / 2 會有數(shù)組越界的風(fēng)險,需使用真實數(shù)據(jù)長度計算
int length = y.length + u.length / 2 + v.length / 2;
int uIndex = 0, vIndex = 0;
for (int i = stride * height; i < length; i += 2) {
nv21[i] = v[vIndex];
nv21[i + 1] = u[uIndex];
vIndex += 2;
uIndex += 2;
}
}
/**
* 將Y:U:V == 4:1:1的數(shù)據(jù)轉(zhuǎn)換為nv21
*
* @param y Y 數(shù)據(jù)
* @param u U 數(shù)據(jù)
* @param v V 數(shù)據(jù)
* @param nv21 生成的nv21,需要預(yù)先分配內(nèi)存
* @param stride 步長
* @param height 圖像高度
*/
public static void yuv420ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {
System.arraycopy(y, 0, nv21, 0, y.length);
// 注意,若length值為 y.length * 3 / 2 會有數(shù)組越界的風(fēng)險,需使用真實數(shù)據(jù)長度計算
int length = y.length + u.length + v.length;
int uIndex = 0, vIndex = 0;
for (int i = stride * height; i < length; i++) {
nv21[i] = v[vIndex++];
nv21[i + 1] = u[uIndex++];
}
}
}
然后是調(diào)用相機(jī)的activity的布局
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextureView
android:id="@+id/textureView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<TextView
android:id="@+id/switch_Camera"
android:layout_gravity="end|bottom"
android:layout_marginBottom="90dp"
android:layout_marginEnd="40dp"
android:text="切換攝像頭"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<com.sto.itgspeaker.FaceView
android:id="@+id/faceView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</FrameLayout>
然后這里剩的麻煩,用了一個大神寫的相機(jī)輔助類,感興趣的可以看一下源碼
public class Camera2Helper {
private static final String TAG = "Camera2Helper";
private Point maxPreviewSize;
private Point minPreviewSize;
public static final String CAMERA_ID_FRONT = "1";
public static final String CAMERA_ID_BACK = "0";
private String mCameraId;
private String specificCameraId;
private Camera2Listener camera2Listener;
private TextureView mTextureView;
private int rotation;
private Point previewViewSize;
private Point specificPreviewSize;
private boolean isMirror;
private Context context;
private boolean mCalibrated;
private boolean mIsVertical = true;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private Camera2Helper(Camera2Helper.Builder builder) {
mTextureView = builder.previewDisplayView;
specificCameraId = builder.specificCameraId;
camera2Listener = builder.camera2Listener;
rotation = builder.rotation;
previewViewSize = builder.previewViewSize;
specificPreviewSize = builder.previewSize;
maxPreviewSize = builder.maxPreviewSize;
minPreviewSize = builder.minPreviewSize;
isMirror = builder.isMirror;
context = builder.context;
if (isMirror) {
mTextureView.setScaleX(-1);
}
}
public void setConfiguration(boolean val) {
mIsVertical = val;
}
public void switchCamera() {
if (CAMERA_ID_BACK.equals(mCameraId)) {
specificCameraId = CAMERA_ID_FRONT;
} else if (CAMERA_ID_FRONT.equals(mCameraId)) {
specificCameraId = CAMERA_ID_BACK;
}
stop();
start();
}
private int getCameraOri(int rotation, String cameraId) {
int degrees = rotation * 90;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
break;
}
int result;
if (CAMERA_ID_FRONT.equals(cameraId)) {
result = (mSensorOrientation + degrees) % 360;
result = (360 - result) % 360;
} else {
result = (mSensorOrientation - degrees + 360) % 360;
}
Log.i(TAG, "getCameraOri: " + rotation + " " + result + " " + mSensorOrientation);
return result;
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureAvailable: ");
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureSizeChanged: ");
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
Log.i(TAG, "onSurfaceTextureDestroyed: ");
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onOpened: ");
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
if (camera2Listener != null) {
camera2Listener.onCameraOpened(cameraDevice, mCameraId, mPreviewSize, getCameraOri(rotation, mCameraId), isMirror);
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onDisconnected: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
Log.i(TAG, "onError: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("error occurred, code is " + error));
}
}
};
private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigured: ");
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
mCaptureCallBack, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed: ");
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("configureFailed"));
}
}
};
private CameraCaptureSession.CaptureCallback mCaptureCallBack = new CameraCaptureSession.CaptureCallback(){
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
camera2Listener.onHandleFaces(result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
private ImageReader mImageReader;
/**
* {@link CaptureRequest.Builder} for the camera preview
*/
private CaptureRequest.Builder mPreviewRequestBuilder;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Orientation of the camera sensor
*/
private int mSensorOrientation;
private Size getBestSupportedSize(List<Size> sizes) {
Size defaultSize = sizes.get(0);
Size[] tempSizes = sizes.toArray(new Size[0]);
Arrays.sort(tempSizes, new Comparator<Size>() {
@Override
public int compare(Size o1, Size o2) {
if (o1.getWidth() > o2.getWidth()) {
return -1;
} else if (o1.getWidth() == o2.getWidth()) {
return o1.getHeight() > o2.getHeight() ? -1 : 1;
} else {
return 1;
}
}
});
sizes = new ArrayList<>(Arrays.asList(tempSizes));
for (int i = sizes.size() - 1; i >= 0; i--) {
if (maxPreviewSize != null) {
if (sizes.get(i).getWidth() > maxPreviewSize.x || sizes.get(i).getHeight() > maxPreviewSize.y) {
sizes.remove(i);
continue;
}
}
if (minPreviewSize != null) {
if (sizes.get(i).getWidth() < minPreviewSize.x || sizes.get(i).getHeight() < minPreviewSize.y) {
sizes.remove(i);
}
}
}
if (sizes.size() == 0) {
String msg = "can not find suitable previewSize, now using default";
if (camera2Listener != null) {
Log.e(TAG, msg);
camera2Listener.onCameraError(new Exception(msg));
}
return defaultSize;
}
Size bestSize = sizes.get(0);
float previewViewRatio;
if (previewViewSize != null) {
previewViewRatio = (float) previewViewSize.x / (float) previewViewSize.y;
} else {
previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();
}
if (previewViewRatio > 1) {
previewViewRatio = 1 / previewViewRatio;
}
for (Size s : sizes) {
if (specificPreviewSize != null && specificPreviewSize.x == s.getWidth() && specificPreviewSize.y == s.getHeight()) {
return s;
}
if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {
bestSize = s;
}
}
return bestSize;
}
public synchronized void start() {
if (mCameraDevice != null) {
return;
}
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
public synchronized void stop() {
if (mCameraDevice == null) {
return;
}
closeCamera();
stopBackgroundThread();
}
public void release() {
stop();
mTextureView = null;
camera2Listener = null;
context = null;
}
private void setUpCameraOutputs(CameraManager cameraManager) {
try {
if (configCameraParams(cameraManager, specificCameraId)) {
return;
}
for (String cameraId : cameraManager.getCameraIdList()) {
if (configCameraParams(cameraManager, cameraId)) {
return;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
private boolean configCameraParams(CameraManager manager, String cameraId) throws CameraAccessException {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return false;
}
mPreviewSize = getBestSupportedSize(new ArrayList<Size>(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(
new OnImageAvailableListenerImpl(), mBackgroundHandler);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mCameraId = cameraId;
return true;
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
setUpCameraOutputs(cameraManager);
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
cameraManager.openCamera(mCameraId, mDeviceStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.addTarget(surface);
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
mCaptureStateCallback, mBackgroundHandler
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate((90 * (rotation - 2)) % 360, centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
Log.i(TAG, "configureTransform: " + getCameraOri(rotation, mCameraId) + " " + rotation * 90);
mTextureView.setTransform(matrix);
}
public static final class Builder {
/**
* 預(yù)覽顯示的view,目前僅支持textureView
*/
private TextureView previewDisplayView;
/**
* 是否鏡像顯示,只支持textureView
*/
private boolean isMirror;
/**
* 指定的相機(jī)ID
*/
private String specificCameraId;
/**
* 事件回調(diào)
*/
private Camera2Listener camera2Listener;
/**
* 屏幕的長寬,在選擇最佳相機(jī)比例時用到
*/
private Point previewViewSize;
/**
* 傳入getWindowManager().getDefaultDisplay().getRotation()的值即可
*/
private int rotation;
/**
* 指定的預(yù)覽寬高,若系統(tǒng)支持則會以這個預(yù)覽寬高進(jìn)行預(yù)覽
*/
private Point previewSize;
/**
* 最大分辨率
*/
private Point maxPreviewSize;
/**
* 最小分辨率
*/
private Point minPreviewSize;
/**
* 上下文,用于獲取CameraManager
*/
private Context context;
public Builder() {
}
public Builder previewOn(TextureView val) {
previewDisplayView = val;
return this;
}
public Builder isMirror(boolean val) {
isMirror = val;
return this;
}
public Builder previewSize(Point val) {
previewSize = val;
return this;
}
public Builder maxPreviewSize(Point val) {
maxPreviewSize = val;
return this;
}
public Builder minPreviewSize(Point val) {
minPreviewSize = val;
return this;
}
public Builder previewViewSize(Point val) {
previewViewSize = val;
return this;
}
public Builder rotation(int val) {
rotation = val;
return this;
}
public Builder specificCameraId(String val) {
specificCameraId = val;
return this;
}
public Builder cameraListener(Camera2Listener val) {
camera2Listener = val;
return this;
}
public Builder context(Context val) {
context = val;
return this;
}
public Camera2Helper build() {
if (previewViewSize == null) {
Log.e(TAG, "previewViewSize is null, now use default previewSize");
}
if (camera2Listener == null) {
Log.e(TAG, "camera2Listener is null, callback will not be called");
}
if (previewDisplayView == null) {
throw new NullPointerException("you must preview on a textureView or a surfaceView");
}
if (maxPreviewSize != null && minPreviewSize != null) {
if (maxPreviewSize.x < minPreviewSize.x || maxPreviewSize.y < minPreviewSize.y) {
throw new IllegalArgumentException("maxPreviewSize must greater than minPreviewSize");
}
}
return new Camera2Helper(this);
}
}
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] y;
private byte[] u;
private byte[] v;
private ReentrantLock lock = new ReentrantLock();
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
// Y:U:V == 4:2:2
if (camera2Listener != null && image.getFormat() == ImageFormat.YUV_420_888) {
Image.Plane[] planes = image.getPlanes();
// 加鎖確保y、u、v來源于同一個Image
lock.lock();
// 重復(fù)使用同一批byte數(shù)組,減少gc頻率
if (y == null) {
y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
}
if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
planes[0].getBuffer().get(y);
planes[1].getBuffer().get(u);
planes[2].getBuffer().get(v);
camera2Listener.onPreview(y, u, v, mPreviewSize, planes[0].getRowStride());
}
lock.unlock();
}
image.close();
}
}
}
然后初始化后綁定布局
texture_preview.viewTreeObserver.addOnGlobalLayoutListener(this)
override fun onGlobalLayout() {
texture_preview.viewTreeObserver.removeOnGlobalLayoutListener(this)
if (!checkPermissions(NEEDED_PERMISSIONS)) {
ActivityCompat.requestPermissions(this, NEEDED_PERMISSIONS, ACTION_REQUEST_PERMISSIONS)
} else {
initCamera()
}
}
初始化相機(jī)
camera2Helper = Camera2Helper.Builder()
.cameraListener(this)
.maxPreviewSize(Point(1920, 1080))
.minPreviewSize(Point(1280, 720))
.specificCameraId(CAMERA_ID)
.context(applicationContext)
.previewOn(texture_preview)
.previewViewSize(Point(texture_preview.width,
texture_preview.height))
.rotation(windowManager.defaultDisplay.rotation)
.build()
camera2Helper.start()
然后在相機(jī)的回調(diào)里面,我們看看做了什么,首先是相機(jī)啟動的時候
override fun onCameraOpened(
cameraDevice: CameraDevice?,
cameraId: String?,
previewSize: Size?,
displayOrientation: Int,
isMirror: Boolean
) {
Log.i("Wzz", "onCameraOpened: previewSize = ${previewSize?.width} x ${previewSize?.height}")
mDisplayOrientation = displayOrientation
isMirrorPreview = isMirror
openedCameraId = cameraId
}
然后重要的就是preview里面返回的yuv原始數(shù)據(jù)
if (!this::nv21.isInitialized) {
nv21 = ByteArray(stride * previewSize!!.height * 3 / 2)
}
// 回傳數(shù)據(jù)是YUV422
if (y!!.size / u!!.size == 2) {
ImageUtil.yuv422ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)
} else if (y.size / u.size == 4) {
ImageUtil.yuv420ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)
}
val yuvImage = YuvImage(nv21, ImageFormat.NV21, stride, previewSize!!.height, null)
然后轉(zhuǎn)換nv21
YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
_previewSize.getWidth(), _previewSize.getHeight(), null);
再繼續(xù)轉(zhuǎn)換為rgb_565格式
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap _currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(baos.toByteArray()), null, bfo);
如果需要轉(zhuǎn)換方向
Matrix matrix = new Matrix();
if(mIsVertical){
matrix.postRotate(90);
matrix.preScale(-1, 1); //Android內(nèi)置人臉識別的圖像必須是頭在上,所以要做旋轉(zhuǎn)變換
// We rotate the same Bitmap
_currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,
_previewSize.getWidth(), _previewSize.getHeight(), matrix, false);
}
然后就可以用faceDetector來進(jìn)行檢測了
FaceDetector d = new FaceDetector(
_currentFrame.getWidth(),
_currentFrame.getHeight(),
1);
Face[] faces = new Face[1];
d.findFaces(_currentFrame, faces);
接下來就可以自己對face進(jìn)行判斷處理進(jìn)行自己需要的操作了
然后介紹如何繪制人臉位置方框
private fun handleFaces(face: FaceDetector.Face) {
var pointF = PointF()
face.getMidPoint(pointF)
mFacesRect.clear()
val widthp = texture_preview.width/height
val heightP = texture_preview.height/width
val spec = face.eyesDistance() / heightP
val bounds = pointF
val y = bounds.y * heightP
val x = bounds.x * widthp
val left = x - spec
val top = y - spec
val right = x + spec
val bottom = y + spec
val rawFaceRect = RectF(left.toFloat(), top.toFloat(), right.toFloat(), bottom.toFloat())
// val rawFaceRect3 = RectF(0f, 0f, 10f, 20f)
val rawFaceRect3 = RectF( 0f,
0f,
texture_preview.width.toFloat(),
texture_preview.height.toFloat())
mFaceDetectMatrix.mapRect(rawFaceRect)
Log.d("wzz","prewview: ${width} * ${height}")
Log.d("wzz","texture_preview: ${texture_preview.width} * ${texture_preview.height}")
Log.d("wzz","texture_preview: ${texture_preview.top} * ${texture_preview.left} --- ${texture_preview.right}---${texture_preview.bottom}")
val resultFaceRect = rawFaceRect
mFacesRect.add(resultFaceRect)
mFacesRect.add(rawFaceRect3)
Log.d("wzz","原始人臉位置: ${bounds.x} * ${bounds.y} ----${face.eyesDistance()} ")
Log.d("wzz","轉(zhuǎn)換后人臉位置: ${resultFaceRect.width()} * ${resultFaceRect.height()} ${resultFaceRect.left} ${resultFaceRect.top} ${resultFaceRect.right} ${resultFaceRect.bottom} ")
runOnUiThread {
faceView.setFaces(mFacesRect)
}
}
然后具體的參數(shù),大家就可以調(diào)試著玩了,
后續(xù)會推出人臉識別opencv方案,
1.最近要研究一下opencv 2d人臉模型轉(zhuǎn)3d
2.以及arcore的人臉增強(qiáng)玩法
大家有什么問題可以評論討論,也可以直接聯(lián)系博主