Switch back to camera2
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java
new file mode 100644
index 0000000..39ae130
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
+public class AutoFitTextureView extends TextureView {
+  private int ratioWidth = 0;
+  private int ratioHeight = 0;
+
+  public AutoFitTextureView(final Context context) {
+    this(context, null);
+  }
+
+  public AutoFitTextureView(final Context context, final AttributeSet attrs) {
+    this(context, attrs, 0);
+  }
+
+  public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
+    super(context, attrs, defStyle);
+  }
+
+  /**
+   * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
+   * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
+   * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
+   *
+   * @param width Relative horizontal size
+   * @param height Relative vertical size
+   */
+  public void setAspectRatio(final int width, final int height) {
+    if (width < 0 || height < 0) {
+      throw new IllegalArgumentException("Size cannot be negative.");
+    }
+    ratioWidth = width;
+    ratioHeight = height;
+    requestLayout();
+  }
+
+  @Override
+  protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
+    super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+    final int width = MeasureSpec.getSize(widthMeasureSpec);
+    final int height = MeasureSpec.getSize(heightMeasureSpec);
+    if (0 == ratioWidth || 0 == ratioHeight) {
+      setMeasuredDimension(width, height);
+    } else {
+      if (width < height * ratioWidth / ratioHeight) {
+        setMeasuredDimension(width, width * ratioHeight / ratioWidth);
+      } else {
+        setMeasuredDimension(height * ratioWidth / ratioHeight, height);
+      }
+    }
+  }
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java
index c8ca747..eb7e561 100644
--- a/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java
@@ -19,6 +19,10 @@
 import android.graphics.Matrix;
 
 public class ImageUtils {
+  // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
+  // are normalized to eight bits.
+  static final int kMaxChannelValue = 262143;
+
   private ImageUtils() {}
 
   /**
@@ -82,4 +86,51 @@
 
     return matrix;
   }
+
+  private static int YUV2RGB(int y, int u, int v) {
+    // Adjust and check YUV values
+    y = (y - 16) < 0 ? 0 : (y - 16);
+    u -= 128;
+    v -= 128;
+
+    // This is the floating point equivalent. We do the conversion in integer
+    // because some Android devices do not have floating point in hardware.
+    // nR = (int)(1.164 * nY + 2.018 * nU);
+    // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
+    // nB = (int)(1.164 * nY + 1.596 * nV);
+    int y1192 = 1192 * y;
+    int r = (y1192 + 1634 * v);
+    int g = (y1192 - 833 * v - 400 * u);
+    int b = (y1192 + 2066 * u);
+
+    // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
+    r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
+    g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
+    b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
+
+    return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
+  }
+
+  public static void convertYUV420ToARGB8888(
+          byte[] yData,
+          byte[] uData,
+          byte[] vData,
+          int width,
+          int height,
+          int yRowStride,
+          int uvRowStride,
+          int uvPixelStride,
+          int[] out) {
+    int yp = 0;
+    for (int j = 0; j < height; j++) {
+      int pY = yRowStride * j;
+      int pUV = uvRowStride * (j >> 1);
+
+      for (int i = 0; i < width; i++) {
+        int uv_offset = pUV + (i >> 1) * uvPixelStride;
+
+        out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
+      }
+    }
+  }
 }
diff --git a/app/build.gradle b/app/build.gradle
index 0bbdf75..8a412ca 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -43,14 +43,6 @@
 	compileOnly files('system_libs/framework.jar')
 
 	implementation(project(':FaceShared'))
-	def camerax_version = "1.3.0-alpha02"
-	implementation "androidx.camera:camera-core:${camerax_version}"
-	implementation "androidx.camera:camera-camera2:${camerax_version}"
-	implementation "androidx.camera:camera-lifecycle:${camerax_version}"
-	implementation "androidx.camera:camera-video:${camerax_version}"
-	implementation "androidx.camera:camera-view:${camerax_version}"
-	implementation "androidx.camera:camera-extensions:${camerax_version}"
-	implementation "androidx.exifinterface:exifinterface:1.3.5"
 
 	implementation "androidx.annotation:annotation:1.5.0"
 	implementation "androidx.appcompat:appcompat:1.6.0"
diff --git a/app/src/main/java/com/libremobileos/facedetect/BitmapUtils.java b/app/src/main/java/com/libremobileos/facedetect/BitmapUtils.java
index 88dd055..707ca20 100644
--- a/app/src/main/java/com/libremobileos/facedetect/BitmapUtils.java
+++ b/app/src/main/java/com/libremobileos/facedetect/BitmapUtils.java
@@ -29,8 +29,6 @@
 import android.media.Image.Plane;
 import android.util.Log;
 import androidx.annotation.Nullable;
-import androidx.camera.core.ExperimentalGetImage;
-import androidx.camera.core.ImageProxy;
 
 import java.io.ByteArrayOutputStream;
 import java.nio.ByteBuffer;
@@ -114,22 +112,6 @@
 		return null;
 	}
 
-	/** Converts a YUV_420_888 image from CameraX API to a bitmap. */
-	@Nullable
-	@ExperimentalGetImage
-	public static Bitmap getBitmap(ImageProxy image) {
-		FrameMetadata frameMetadata =
-				new FrameMetadata.Builder()
-						.setWidth(image.getWidth())
-						.setHeight(image.getHeight())
-						.setRotation(image.getImageInfo().getRotationDegrees())
-						.build();
-
-		ByteBuffer nv21Buffer =
-				yuv420ThreePlanesToNV21(Objects.requireNonNull(image.getImage()).getPlanes(), image.getWidth(), image.getHeight());
-		return getBitmap(nv21Buffer, frameMetadata);
-	}
-
 	/** Rotates a bitmap if it is converted from a bytebuffer. */
 	private static Bitmap rotateBitmap(
 			Bitmap bitmap, int rotationDegrees) {
diff --git a/app/src/main/java/com/libremobileos/facedetect/CameraActivity.java b/app/src/main/java/com/libremobileos/facedetect/CameraActivity.java
index 9151557..c208b62 100644
--- a/app/src/main/java/com/libremobileos/facedetect/CameraActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/CameraActivity.java
@@ -1,29 +1,85 @@
 package com.libremobileos.facedetect;
 
+import android.Manifest;
+import android.content.Context;
+import android.content.pm.PackageManager;
 import android.content.res.Configuration;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.Image;
+import android.media.ImageReader;
 import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Trace;
+import android.text.TextUtils;
+import android.util.Log;
 import android.util.Size;
+import android.view.Surface;
+import android.view.TextureView;
 
 import androidx.annotation.NonNull;
 import androidx.annotation.Nullable;
 import androidx.appcompat.app.AppCompatActivity;
-import androidx.camera.core.CameraSelector;
-import androidx.camera.core.ImageAnalysis;
-import androidx.camera.core.Preview;
-import androidx.camera.lifecycle.ProcessCameraProvider;
-import androidx.camera.view.PreviewView;
+import androidx.core.app.ActivityCompat;
 
-import com.google.common.util.concurrent.ListenableFuture;
+import com.libremobileos.yifan.face.AutoFitTextureView;
+import com.libremobileos.yifan.face.ImageUtils;
 
-import java.util.concurrent.ExecutionException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
 
-public abstract class CameraActivity extends AppCompatActivity {
-	// CameraX boilerplate
-	private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
-	// View showing camera frames
-	protected PreviewView previewView;
-	// The desired camera input size
+public abstract class CameraActivity extends AppCompatActivity implements ImageReader.OnImageAvailableListener {
+
+	private static final String TAG = "Camera2Activity";
+
+	/**
+	 * The camera preview size will be chosen to be the smallest frame by pixel size capable of
+	 * containing a DESIRED_SIZE x DESIRED_SIZE square.
+	 */
+	private static final int MINIMUM_PREVIEW_SIZE = 320;
+
+	protected AutoFitTextureView previewView;
+	private Handler mBackgroundHandler;
+	private HandlerThread mBackgroundThread;
+	private String cameraId;
+	protected CameraDevice cameraDevice;
+	protected CameraCaptureSession cameraCaptureSessions;
+	protected CaptureRequest captureRequest;
+	protected CaptureRequest.Builder captureRequestBuilder;
+	private ImageReader previewReader;
+	private byte[][] yuvBytes = new byte[3][];
+	private int[] rgbBytes = null;
+	private boolean isProcessingFrame = false;
+	private int yRowStride;
+	private Runnable postInferenceCallback;
+	private Runnable imageConverter;
+	private Integer sensorOrientation;
+	private Bitmap rgbFrameBitmap = null;
+	private Bitmap croppedBitmap = null;
+	private Bitmap cropCopyBitmap = null;
+	private Matrix frameToCropTransform;
+	private Matrix cropToFrameTransform;
+	private static final boolean MAINTAIN_ASPECT = false;
+
+
 	protected final Size desiredInputSize = new Size(640, 480);
+	private Size previewSize;
 	// The calculated actual processing width & height
 	protected int width, height;
 
@@ -32,59 +88,428 @@
 		super.onCreate(savedInstanceState);
 	}
 
-	protected void connectToCam(PreviewView pv) {
+	protected void connectToCam(AutoFitTextureView pv) {
 		previewView = pv;
-		previewView.setScaleType(PreviewView.ScaleType.FIT_CENTER);
 
-		// CameraX boilerplate (create camera connection)
-		cameraProviderFuture = ProcessCameraProvider.getInstance(this);
-		cameraProviderFuture.addListener(() -> {
-			try {
-				ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
-				bindPreview(cameraProvider);
-			} catch (ExecutionException | InterruptedException e) {
-				// No errors need to be handled for this Future.
-				// This should never be reached.
-			}
-		}, getMainExecutor());
+		previewView.setSurfaceTextureListener(textureListener);
 	}
 
-	private void bindPreview(@NonNull ProcessCameraProvider cameraProvider) {
-		// We're connected to the camera, set up everything
-		Preview preview = new Preview.Builder()
-				.build();
-
-		// Which camera to use
-		int selectedCamera = CameraSelector.LENS_FACING_FRONT;
-		CameraSelector cameraSelector = new CameraSelector.Builder()
-				.requireLensFacing(selectedCamera)
-				.build();
-
-		preview.setSurfaceProvider(previewView.getSurfaceProvider());
-
-		// Cameras give us landscape images. If we are in portrait mode
-		// (and want to process a portrait image), swap width/height to
-		// make the image portrait.
-		if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
-			width = desiredInputSize.getHeight();
-			height = desiredInputSize.getWidth();
-		} else {
-			width = desiredInputSize.getWidth();
-			height = desiredInputSize.getHeight();
+	TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
+		@Override
+		public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+			//open your camera here
+			openCamera();
 		}
 
-		// Set up CameraX boilerplate and configure it to drop frames if we can't keep up
-		ImageAnalysis imageAnalysis =
-				new ImageAnalysis.Builder()
-						.setTargetResolution(new Size(width, height))
-						.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
-						.build();
+		@Override
+		public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+			// Transform you image captured size according to the surface width and height
+			configureTransform(width, height);
+		}
 
-		onSetCameraCallback(imageAnalysis);
+		@Override
+		public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+			return false;
+		}
 
-		// Bind all objects together
-		/* Camera camera = */ cameraProvider.bindToLifecycle(this, cameraSelector, imageAnalysis, preview);
+		@Override
+		public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+		}
+	};
+	private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
+		@Override
+		public void onOpened(CameraDevice camera) {
+			//This is called when the camera is open
+			Log.e(TAG, "onOpened");
+			cameraDevice = camera;
+			createCameraPreview();
+		}
+
+		@Override
+		public void onDisconnected(CameraDevice camera) {
+			cameraDevice.close();
+		}
+
+		@Override
+		public void onError(CameraDevice camera, int error) {
+			cameraDevice.close();
+			cameraDevice = null;
+		}
+	};
+
+	protected void startBackgroundThread() {
+		mBackgroundThread = new HandlerThread("Camera Background");
+		mBackgroundThread.start();
+		mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
 	}
 
-	protected abstract void onSetCameraCallback(ImageAnalysis imageAnalysis);
+	protected void stopBackgroundThread() {
+		mBackgroundThread.quitSafely();
+		try {
+			mBackgroundThread.join();
+			mBackgroundThread = null;
+			mBackgroundHandler = null;
+		} catch (InterruptedException e) {
+			e.printStackTrace();
+		}
+	}
+
+	protected void createCameraPreview() {
+		try {
+			SurfaceTexture texture = previewView.getSurfaceTexture();
+			assert texture != null;
+			texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
+			Surface surface = new Surface(texture);
+			captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+			captureRequestBuilder.addTarget(surface);
+
+			previewReader =
+					ImageReader.newInstance(
+							previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
+
+			previewReader.setOnImageAvailableListener(this, mBackgroundHandler);
+			captureRequestBuilder.addTarget(previewReader.getSurface());
+
+			cameraDevice.createCaptureSession(Arrays.asList(surface, previewReader.getSurface()),
+					new CameraCaptureSession.StateCallback() {
+				@Override
+				public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
+					//The camera is already closed
+					if (null == cameraDevice) {
+						return;
+					}
+					// When the session is ready, we start displaying the preview.
+					cameraCaptureSessions = cameraCaptureSession;
+					try {
+						// Auto focus should be continuous for camera preview.
+						captureRequestBuilder.set(
+								CaptureRequest.CONTROL_AF_MODE,
+								CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+						// Flash is automatically enabled when necessary.
+						captureRequestBuilder.set(
+								CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+
+						// Finally, we start displaying the camera preview.
+						captureRequest = captureRequestBuilder.build();
+						cameraCaptureSessions.setRepeatingRequest(
+								captureRequest, null, mBackgroundHandler);
+					} catch (final CameraAccessException e) {
+						Log.e(TAG, "Exception!", e);
+					}
+//					updatePreview();
+				}
+
+				@Override
+				public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
+				}
+			}, null);
+		} catch (CameraAccessException e) {
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
+	 * called after the camera preview size is determined in setUpCameraOutputs and also the size of
+	 * `mTextureView` is fixed.
+	 *
+	 * @param viewWidth The width of `mTextureView`
+	 * @param viewHeight The height of `mTextureView`
+	 */
+	private void configureTransform(final int viewWidth, final int viewHeight) {
+		if (null == previewView || null == previewSize) {
+			return;
+		}
+		final int rotation = getWindowManager().getDefaultDisplay().getRotation();
+		final Matrix matrix = new Matrix();
+		final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+		final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
+		final float centerX = viewRect.centerX();
+		final float centerY = viewRect.centerY();
+		if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
+			bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+			matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+			final float scale =
+					Math.max(
+							(float) viewHeight / previewSize.getHeight(),
+							(float) viewWidth / previewSize.getWidth());
+			matrix.postScale(scale, scale, centerX, centerY);
+			matrix.postRotate(90 * (rotation - 2), centerX, centerY);
+		} else if (Surface.ROTATION_180 == rotation) {
+			matrix.postRotate(180, centerX, centerY);
+		}
+		previewView.setTransform(matrix);
+	}
+
+	/** Compares two {@code Size}s based on their areas. */
+	static class CompareSizesByArea implements Comparator<Size> {
+		@Override
+		public int compare(final Size lhs, final Size rhs) {
+			// We cast here to ensure the multiplications won't overflow
+			return Long.signum(
+					(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
+		}
+	}
+
+	/**
+	 * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
+	 * width and height are at least as large as the minimum of both, or an exact match if possible.
+	 *
+	 * @param choices The list of sizes that the camera supports for the intended output class
+	 * @param width The minimum desired width
+	 * @param height The minimum desired height
+	 * @return The optimal {@code Size}, or an arbitrary one if none were big enough
+	 */
+	protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
+		final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
+		final Size desiredSize = new Size(width, height);
+
+		// Collect the supported resolutions that are at least as big as the preview Surface
+		boolean exactSizeFound = false;
+		final List<Size> bigEnough = new ArrayList<Size>();
+		final List<Size> tooSmall = new ArrayList<Size>();
+		for (final Size option : choices) {
+			if (option.equals(desiredSize)) {
+				// Set the size but don't return yet so that remaining sizes will still be logged.
+				exactSizeFound = true;
+			}
+
+			if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
+				bigEnough.add(option);
+			} else {
+				tooSmall.add(option);
+			}
+		}
+
+		Log.i(TAG, "Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
+		Log.i(TAG,"Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
+		Log.i(TAG, "Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
+
+		if (exactSizeFound) {
+			Log.i(TAG, "Exact size match found.");
+			return desiredSize;
+		}
+
+		// Pick the smallest of those, assuming we found any
+		if (bigEnough.size() > 0) {
+			final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
+			Log.i(TAG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
+			return chosenSize;
+		} else {
+			Log.e(TAG, "Couldn't find any suitable preview size");
+			return choices[0];
+		}
+	}
+
+	private void openCamera() {
+		CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+		Log.e(TAG, "is camera open");
+		try {
+			cameraId = manager.getCameraIdList()[0];
+			for (String id : manager.getCameraIdList()) {
+				CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
+				if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
+					cameraId = id;
+					break;
+				}
+			}
+			CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+			StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+			sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+			assert map != null;
+
+			// Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
+			// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
+			// garbage capture data.
+			previewSize =
+					chooseOptimalSize(
+							map.getOutputSizes(SurfaceTexture.class),
+							desiredInputSize.getWidth(), desiredInputSize.getHeight());
+			width = previewSize.getWidth();
+			height = previewSize.getHeight();
+			configureTransform(width, height);
+
+			final int orientation = getResources().getConfiguration().orientation;
+			if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
+				previewView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
+			} else {
+				previewView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
+			}
+
+			int imageOrientation = sensorOrientation + getScreenOrientation();
+			rgbFrameBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+			int targetW, targetH;
+			if (imageOrientation == 90 || imageOrientation == 270) {
+				targetH = width;
+				targetW = height;
+			}
+			else {
+				targetW = width;
+				targetH = height;
+			}
+			int cropW = (int) (targetW / 2.0);
+			int cropH = (int) (targetH / 2.0);
+
+			croppedBitmap = Bitmap.createBitmap(cropW, cropH, Bitmap.Config.ARGB_8888);
+
+			frameToCropTransform =
+					ImageUtils.getTransformationMatrix(
+							width, height,
+							cropW, cropH,
+							imageOrientation, MAINTAIN_ASPECT);
+			cropToFrameTransform = new Matrix();
+			frameToCropTransform.invert(cropToFrameTransform);
+
+			setupFaceRecognizer(new Size(cropW, cropH));
+
+			// Add permission for camera and let user grant the permission
+			if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
+			//	ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
+				return;
+			}
+			manager.openCamera(cameraId, stateCallback, null);
+		} catch (CameraAccessException e) {
+			e.printStackTrace();
+		}
+		Log.e(TAG, "openCamera X");
+	}
+
+	private void closeCamera() {
+		if (null != cameraDevice) {
+			cameraDevice.close();
+			cameraDevice = null;
+		}
+		if (null != previewReader) {
+			previewReader.close();
+			previewReader = null;
+		}
+	}
+
+	@Override
+	protected void onResume() {
+		super.onResume();
+		Log.e(TAG, "onResume");
+		startBackgroundThread();
+	}
+
+	@Override
+	protected void onPause() {
+		Log.e(TAG, "onPause");
+		closeCamera();
+		stopBackgroundThread();
+		super.onPause();
+	}
+
+	protected void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) {
+		// Because of the variable row stride it's not possible to know in
+		// advance the actual necessary dimensions of the yuv planes.
+		for (int i = 0; i < planes.length; ++i) {
+			final ByteBuffer buffer = planes[i].getBuffer();
+			if (yuvBytes[i] == null) {
+				Log.d(TAG, "Initializing buffer " + i + " at size " + buffer.capacity());
+				yuvBytes[i] = new byte[buffer.capacity()];
+			}
+			buffer.get(yuvBytes[i]);
+		}
+	}
+
+	protected int[] getRgbBytes() {
+		imageConverter.run();
+		return rgbBytes;
+	}
+
+	protected Bitmap getCroppedBitmap() {
+		return croppedBitmap;
+	}
+
+	protected int getScreenOrientation() {
+		switch (getWindowManager().getDefaultDisplay().getRotation()) {
+			case Surface.ROTATION_270:
+				return 270;
+			case Surface.ROTATION_180:
+				return 180;
+			case Surface.ROTATION_90:
+				return 90;
+			default:
+				return 0;
+		}
+	}
+
+	protected void readyForNextImage() {
+		if (postInferenceCallback != null) {
+			postInferenceCallback.run();
+		}
+	}
+
+	@Override
+	public void onImageAvailable(ImageReader reader) {
+		int previewWidth = previewSize.getWidth();
+		int previewHeight = previewSize.getHeight();
+
+		if (rgbBytes == null) {
+			rgbBytes = new int[previewWidth * previewHeight];
+		}
+		try {
+			final Image image = reader.acquireLatestImage();
+
+			if (image == null) {
+				return;
+			}
+
+			if (isProcessingFrame) {
+				image.close();
+				return;
+			}
+			isProcessingFrame = true;
+			Trace.beginSection("imageAvailable");
+			final Image.Plane[] planes = image.getPlanes();
+			fillBytes(planes, yuvBytes);
+			yRowStride = planes[0].getRowStride();
+			final int uvRowStride = planes[1].getRowStride();
+			final int uvPixelStride = planes[1].getPixelStride();
+
+			imageConverter =
+					new Runnable() {
+						@Override
+						public void run() {
+							ImageUtils.convertYUV420ToARGB8888(
+									yuvBytes[0],
+									yuvBytes[1],
+									yuvBytes[2],
+									previewWidth,
+									previewHeight,
+									yRowStride,
+									uvRowStride,
+									uvPixelStride,
+									rgbBytes);
+						}
+					};
+
+			postInferenceCallback =
+					new Runnable() {
+						@Override
+						public void run() {
+							image.close();
+							isProcessingFrame = false;
+						}
+					};
+
+			rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
+			final Canvas canvas = new Canvas(croppedBitmap);
+			canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
+
+			runOnUiThread(() -> processImage());
+		} catch (final Exception e) {
+			Log.e(TAG, "Exception!", e);
+			Trace.endSection();
+			return;
+		}
+		Trace.endSection();
+	}
+
+	protected abstract void setupFaceRecognizer(final Size bitmapSize);
+
+	protected abstract void processImage();
+
 }
diff --git a/app/src/main/java/com/libremobileos/facedetect/MainActivity.java b/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
index ea3ab2d..cc532b9 100644
--- a/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
@@ -21,16 +21,13 @@
 import android.graphics.RectF;
 import android.os.Bundle;
 import android.util.Pair;
+import android.util.Size;
 
 import androidx.annotation.Nullable;
-import androidx.annotation.OptIn;
-import androidx.camera.core.ExperimentalGetImage;
-import androidx.camera.core.ImageAnalysis;
 
 import com.libremobileos.yifan.face.DirectoryFaceStorageBackend;
 import com.libremobileos.yifan.face.FaceRecognizer;
 import com.libremobileos.yifan.face.FaceStorageBackend;
-import com.libremobileos.yifan.face.SharedPreferencesFaceStorageBackend;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -40,6 +37,7 @@
 	private FaceRecognizer faceRecognizer;
 	// Simple view allowing us to draw Rectangles over the Preview
 	private FaceBoundsOverlayView overlayView;
+	private boolean computingDetection = false;
 
 	@Override
 	protected void onCreate(@Nullable Bundle savedInstanceState) {
@@ -55,43 +53,8 @@
 		});
 	}
 
-	@OptIn(markerClass = ExperimentalGetImage.class)
 	@Override
-	protected void onSetCameraCallback(ImageAnalysis imageAnalysis) {
-		imageAnalysis.setAnalyzer(getMainExecutor(), imageProxy -> {
-			// Convert CameraX Image to Bitmap and process it
-			// Return list of detected faces
-			List<FaceRecognizer.Face> data = faceRecognizer.recognize(BitmapUtils.getBitmap(imageProxy));
-			ArrayList<Pair<RectF, String>> bounds = new ArrayList<>();
-
-			for (FaceRecognizer.Face face : data) {
-				RectF boundingBox = new RectF(face.getLocation());
-
-				// Camera is frontal so the image is flipped horizontally,
-				// so flip it again.
-				Matrix flip = new Matrix();
-				flip.postScale(-1, 1, width / 2.0f, height / 2.0f);
-				flip.mapRect(boundingBox);
-
-				// Generate UI text for face
-				String uiText;
-				// Do we have any match?
-				if (face.isRecognized()) {
-					// If yes, show the user-visible ID and the detection confidence
-					uiText = face.getModelCount() + " " + face.getTitle() + " " + face.getDistance();
-				} else {
-					// Show detected object type (always "Face") and how confident the AI is that this is a Face
-					uiText = face.getTitle() + " " + face.getDetectionConfidence();
-				}
-				bounds.add(new Pair<>(boundingBox, uiText));
-			}
-
-			// Pass bounds to View drawing rectangles
-			overlayView.updateBounds(bounds, width, height);
-			// Clean up
-			imageProxy.close();
-		});
-
+	protected void setupFaceRecognizer(final Size bitmapSize) {
 		// Store registered Faces in Memory
 		//FaceStorageBackend faceStorage = new VolatileFaceStorageBackend();
 		//FaceStorageBackend faceStorage = new SharedPreferencesFaceStorageBackend(getSharedPreferences("faces", 0));
@@ -101,12 +64,52 @@
 		faceRecognizer = FaceRecognizer.create(this,
 				faceStorage, /* face data storage */
 				0.6f, /* minimum confidence to consider object as face */
-				width, /* bitmap width */
-				height, /* bitmap height */
-				0, /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
+				bitmapSize.getWidth(), /* bitmap width */
+				bitmapSize.getHeight(), /* bitmap height */
+				0, /* We rotates the image, so IGNORE sensorRotation altogether */
 				0.7f, /* maximum distance (to saved face model, not from camera) to track face */
 				1 /* minimum model count to track face */
 		);
 	}
 
+	@Override
+	protected void processImage() {
+		// No mutex needed as this method is not reentrant.
+		if (computingDetection) {
+			readyForNextImage();
+			return;
+		}
+		computingDetection = true;
+		List<FaceRecognizer.Face> data = faceRecognizer.recognize(getCroppedBitmap());
+		computingDetection = false;
+
+		ArrayList<Pair<RectF, String>> bounds = new ArrayList<>();
+
+		for (FaceRecognizer.Face face : data) {
+			RectF boundingBox = new RectF(face.getLocation());
+
+			// Camera is frontal so the image is flipped horizontally,
+			// so flip it again.
+			Matrix flip = new Matrix();
+			flip.postScale(-1, 1, width / 2.0f, height / 2.0f);
+			flip.mapRect(boundingBox);
+
+			// Generate UI text for face
+			String uiText;
+			// Do we have any match?
+			if (face.isRecognized()) {
+				// If yes, show the user-visible ID and the detection confidence
+				uiText = face.getModelCount() + " " + face.getTitle() + " " + face.getDistance();
+			} else {
+				// Show detected object type (always "Face") and how confident the AI is that this is a Face
+				uiText = face.getTitle() + " " + face.getDetectionConfidence();
+			}
+			bounds.add(new Pair<>(boundingBox, uiText));
+		}
+
+		// Pass bounds to View drawing rectangles
+		overlayView.updateBounds(bounds, width, height);
+		readyForNextImage();
+	}
+
 }
diff --git a/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java b/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
index e946d58..cf5811b 100644
--- a/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
@@ -19,14 +19,12 @@
 import android.content.Intent;
 import android.os.Bundle;
 import android.util.Pair;
+import android.util.Size;
 import android.view.View;
 import android.widget.FrameLayout;
 import android.widget.TextView;
 
 import androidx.annotation.Nullable;
-import androidx.annotation.OptIn;
-import androidx.camera.core.ExperimentalGetImage;
-import androidx.camera.core.ImageAnalysis;
 
 import com.libremobileos.yifan.face.FaceDataEncoder;
 import com.libremobileos.yifan.face.FaceDetector;
@@ -47,6 +45,8 @@
 	private final List<FaceScanner.Face> faces = new ArrayList<>();
 	private TextView subText;
 
+	private boolean computingDetection = false;
+
 	@Override
 	protected void onCreate(@Nullable Bundle savedInstanceState) {
 		// Initialize basic views
@@ -65,70 +65,79 @@
 		findViewById(R.id.button).setVisibility(View.GONE);
 	}
 
-	@OptIn(markerClass = ExperimentalGetImage.class)
-	protected void onSetCameraCallback(ImageAnalysis imageAnalysis) {
-		imageAnalysis.setAnalyzer(getMainExecutor(), imageProxy -> {
-			if (faces.size() == 10) {
-				imageProxy.close();
-				return;
-			}
-			// Convert CameraX Image to Bitmap and process it
-			// Return list of detected faces
-			List<Pair<FaceDetector.Face, FaceScanner.Face>> data = faceRecognizer.process(BitmapUtils.getBitmap(imageProxy), false);
-
-			if (data.size() > 1) {
-				if (lastAdd == -1) { // last frame had two faces too
-					subText.setText(R.string.found_2_faces);
-				}
-				lastAdd = -1;
-				imageProxy.close();
-				return;
-			} else if (lastAdd == -1) {
-				lastAdd = System.currentTimeMillis();
-			}
-			if (data.size() == 0) {
-				if (lastAdd == -2) { // last frame had 0 faces too
-					subText.setText(R.string.cant_find_face);
-				}
-				lastAdd = -2;
-				imageProxy.close();
-				return;
-			} else if (lastAdd == -2) {
-				lastAdd = System.currentTimeMillis();
-			}
-
-			Pair<FaceDetector.Face, FaceScanner.Face> face = data.get(0);
-
-			// Do we want to add a new face?
-			if (lastAdd + 1000 < System.currentTimeMillis()) {
-				lastAdd = System.currentTimeMillis();
-				if (face.second.getBrightnessHint() < 1) {
-					subText.setText(R.string.cant_scan_face);
-					imageProxy.close();
-					return;
-				} else {
-					subText.setText(R.string.scan_face_now);
-				}
-				faces.add(face.second);
-				overlayView.setPercentage(faces.size() * 10);
-			}
-
-			if (faces.size() == 10) {
-				startActivity(new Intent(this, EnrollActivity.class).putExtra("faces",
-						FaceDataEncoder.encode(faces.stream().map(FaceScanner.Face::getExtra).toArray(float[][]::new))));
-				finish();
-			}
-
-			// Clean up
-			imageProxy.close();
-		});
-
+	@Override
+	protected void setupFaceRecognizer(final Size bitmapSize) {
 		// Create AI-based face detection
 		faceRecognizer = FaceFinder.create(this,
 				0.6f, /* minimum confidence to consider object as face */
-				width, /* bitmap width */
-				height, /* bitmap height */
-				0 /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
+				bitmapSize.getWidth(), /* bitmap width */
+				bitmapSize.getHeight(), /* bitmap height */
+				0 /* We rotates the image, so IGNORE sensorRotation altogether */
 		);
 	}
+
+	@Override
+	protected void processImage() {
+		// No mutex needed as this method is not reentrant.
+		if (computingDetection) {
+			readyForNextImage();
+			return;
+		}
+		computingDetection = true;
+
+		if (faces.size() == 10) {
+			readyForNextImage();
+			return;
+		}
+
+		// Return list of detected faces
+		List<Pair<FaceDetector.Face, FaceScanner.Face>> data = faceRecognizer.process(getCroppedBitmap(), false);
+		computingDetection = false;
+
+		if (data.size() > 1) {
+			if (lastAdd == -1) { // last frame had two faces too
+				subText.setText(R.string.found_2_faces);
+			}
+			lastAdd = -1;
+			readyForNextImage();
+			return;
+		} else if (lastAdd == -1) {
+			lastAdd = System.currentTimeMillis();
+		}
+		if (data.size() == 0) {
+			if (lastAdd == -2) { // last frame had 0 faces too
+				subText.setText(R.string.cant_find_face);
+			}
+			lastAdd = -2;
+			readyForNextImage();
+			return;
+		} else if (lastAdd == -2) {
+			lastAdd = System.currentTimeMillis();
+		}
+
+		Pair<FaceDetector.Face, FaceScanner.Face> face = data.get(0);
+
+		// Do we want to add a new face?
+		if (lastAdd + 1000 < System.currentTimeMillis()) {
+			lastAdd = System.currentTimeMillis();
+			if (face.second.getBrightnessHint() < 1) {
+				subText.setText(R.string.cant_scan_face);
+				readyForNextImage();
+				return;
+			} else {
+				subText.setText(R.string.scan_face_now);
+			}
+			faces.add(face.second);
+			overlayView.setPercentage(faces.size() * 10);
+		}
+
+		if (faces.size() == 10) {
+			startActivity(new Intent(this, EnrollActivity.class).putExtra("faces",
+					FaceDataEncoder.encode(faces.stream().map(FaceScanner.Face::getExtra).toArray(float[][]::new))));
+			finish();
+		}
+
+		// Clean up
+		readyForNextImage();
+	}
 }
diff --git a/app/src/main/res/layout/activity_main.xml b/app/src/main/res/layout/activity_main.xml
index 1f0ca0c..8e109dd 100644
--- a/app/src/main/res/layout/activity_main.xml
+++ b/app/src/main/res/layout/activity_main.xml
@@ -4,7 +4,7 @@
 	android:layout_height="match_parent"
 	android:orientation="vertical">
 
-	<androidx.camera.view.PreviewView
+	<com.libremobileos.yifan.face.AutoFitTextureView
 		android:id="@+id/viewFinder"
 		android:layout_width="match_parent"
 		android:layout_height="match_parent" />
diff --git a/app/src/main/res/layout/enroll_main.xml b/app/src/main/res/layout/enroll_main.xml
index 41c032b..5e5c125 100644
--- a/app/src/main/res/layout/enroll_main.xml
+++ b/app/src/main/res/layout/enroll_main.xml
@@ -33,7 +33,7 @@
 		android:orientation="vertical"
 		app:cardCornerRadius="360dp">
 
-		<androidx.camera.view.PreviewView
+		<com.libremobileos.yifan.face.AutoFitTextureView
 			android:id="@+id/viewFinder"
 			android:layout_width="400dp"
 			android:layout_height="400dp"