Initial import
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..f34a624
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,29 @@
+name: Build APK
+
+on:
+  push:
+    branches: ["master"]
+  workflow_dispatch:
+
+concurrency:
+  group: "build"
+  cancel-in-progress: true
+
+jobs:
+  build:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
+      - name: Setup Java JDK
+        uses: actions/setup-java@v3.9.0
+        with:
+          java-version: 11
+          distribution: adopt
+      - name: Gradle Build Action
+        run: ./gradlew :FaceExample:assembleDebug
+      - name: Upload artifact
+        uses: actions/upload-artifact@v2
+        with:
+          name: FaceExample-debug.apk
+          path: FaceExample/build/outputs/apk/debug/FaceExample-debug.apk
diff --git a/.github/workflows/javadoc.yml b/.github/workflows/javadoc.yml
new file mode 100644
index 0000000..220f51d
--- /dev/null
+++ b/.github/workflows/javadoc.yml
@@ -0,0 +1,44 @@
+name: Deploy JavaDoc to Pages
+
+on:
+  push:
+    branches: ["master"]
+  workflow_dispatch:
+
+permissions:
+  contents: read
+  pages: write
+  id-token: write
+
+concurrency:
+  group: "pages"
+  cancel-in-progress: true
+
+jobs:
+  deploy:
+    environment:
+      name: github-pages
+      url: ${{ steps.deployment.outputs.page_url }}
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
+      - name: Setup Pages
+        uses: actions/configure-pages@v2
+      - name: Setup Java JDK
+        uses: actions/setup-java@v3.9.0
+        with:
+          java-version: 11
+          distribution: adopt
+      - name: Gradle Build Action
+        run: ./gradlew :FaceShared:generateWithGpuReleaseJavadoc
+      - name: Combine docs
+        run: mkdir -p build/docs && cp -r FaceShared/build/docs/javadoc build/docs/FaceShared
+      - name: Upload artifact
+        uses: actions/upload-pages-artifact@v1
+        with:
+          # Upload entire repository
+          path: 'build/docs/'
+      - name: Deploy to GitHub Pages
+        id: deployment
+        uses: actions/deploy-pages@v1
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..d1f3319
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,97 @@
+*.iml
+.gradle
+/local.properties
+/.idea
+.DS_Store
+/build
+/captures
+.externalNativeBuild
+.cxx
+local.properties
+# Built application files
+*.apk
+*.aar
+*.ap_
+*.aab
+
+# Files for the ART/Dalvik VM
+*.dex
+
+# Java class files
+*.class
+
+# Generated files
+bin/
+gen/
+out/
+#  Uncomment the following line in case you need and you don't have the release build type files in your app
+# release/
+
+# Gradle files
+.gradle/
+build/
+
+# Local configuration file (sdk path, etc)
+local.properties
+
+# Proguard folder generated by Eclipse
+proguard/
+
+# Log Files
+*.log
+
+# Android Studio Navigation editor temp files
+.navigation/
+
+# Android Studio captures folder
+captures/
+
+# IntelliJ
+*.iml
+.idea/workspace.xml
+.idea/tasks.xml
+.idea/gradle.xml
+.idea/assetWizardSettings.xml
+.idea/dictionaries
+.idea/libraries
+# Android Studio 3 in .gitignore file.
+.idea/caches
+.idea/modules.xml
+# Comment next line if keeping position of elements in Navigation Editor is relevant for you
+.idea/navEditor.xml
+
+# Keystore files
+# Uncomment the following lines if you do not want to check your keystore files in.
+*.jks
+*.keystore
+keystore.properties
+
+# External native build folder generated in Android Studio 2.2 and later
+.externalNativeBuild
+.cxx/
+
+# Google Services (e.g. APIs or Firebase)
+# google-services.json
+
+# Freeline
+freeline.py
+freeline/
+freeline_project_description.json
+
+# fastlane
+fastlane/report.xml
+fastlane/Preview.html
+fastlane/screenshots
+fastlane/test_output
+fastlane/readme.md
+
+# Version control
+vcs.xml
+
+# lint
+lint/intermediates/
+lint/generated/
+lint/outputs/
+lint/tmp/
+# lint/reports/
+
diff --git a/FaceExample/.gitignore b/FaceExample/.gitignore
new file mode 100644
index 0000000..42afabf
--- /dev/null
+++ b/FaceExample/.gitignore
@@ -0,0 +1 @@
+/build
\ No newline at end of file
diff --git a/FaceExample/build.gradle b/FaceExample/build.gradle
new file mode 100644
index 0000000..e361d4b
--- /dev/null
+++ b/FaceExample/build.gradle
@@ -0,0 +1,52 @@
+plugins {
+	id 'com.android.application'
+}
+
+android {
+	namespace 'com.libremobileos.facedetect'
+	compileSdk 33
+
+	defaultConfig {
+		applicationId "com.libremobileos.yifan.face.example"
+		minSdk 28
+		targetSdk 33
+		versionCode 1
+		versionName "1.0"
+		missingDimensionStrategy 'gpu', 'withGpu' // include gpu delegate support. withoutGpu = exclude it
+	}
+
+	buildTypes {
+		release {
+			minifyEnabled false
+			proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+		}
+	}
+	compileOptions {
+		sourceCompatibility JavaVersion.VERSION_11
+		targetCompatibility JavaVersion.VERSION_11
+	}
+}
+
+dependencies {
+	implementation('androidx.annotation:annotation:1.6.0')
+	implementation('androidx.appcompat:appcompat:1.6.1')
+	implementation(project(':FaceShared'))
+
+	def camerax_version = "1.3.0-alpha04"
+	implementation "androidx.camera:camera-core:${camerax_version}"
+	implementation "androidx.camera:camera-camera2:${camerax_version}"
+	implementation "androidx.camera:camera-lifecycle:${camerax_version}"
+	implementation "androidx.camera:camera-video:${camerax_version}"
+	implementation "androidx.camera:camera-view:${camerax_version}"
+	implementation "androidx.camera:camera-extensions:${camerax_version}"
+	implementation "androidx.exifinterface:exifinterface:1.3.6"
+
+	constraints {
+		implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.8.0") {
+			because("kotlin-stdlib-jdk7 is now a part of kotlin-stdlib")
+		}
+		implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.8.0") {
+			because("kotlin-stdlib-jdk8 is now a part of kotlin-stdlib")
+		}
+	}
+}
\ No newline at end of file
diff --git a/FaceExample/proguard-rules.pro b/FaceExample/proguard-rules.pro
new file mode 100644
index 0000000..481bb43
--- /dev/null
+++ b/FaceExample/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
\ No newline at end of file
diff --git a/FaceExample/src/main/AndroidManifest.xml b/FaceExample/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..108779a
--- /dev/null
+++ b/FaceExample/src/main/AndroidManifest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	xmlns:tools="http://schemas.android.com/tools">
+
+	<uses-feature
+		android:name="android.hardware.camera"
+		android:required="true" />
+
+	<uses-permission android:name="android.permission.CAMERA" />
+
+	<application
+		android:allowBackup="true"
+		android:dataExtractionRules="@xml/data_extraction_rules"
+		android:fullBackupContent="@xml/backup_rules"
+		android:icon="@mipmap/ic_launcher"
+		android:label="@string/app_name"
+		android:supportsRtl="true"
+		android:theme="@style/Theme.FaceDetect"
+		tools:targetApi="31">
+		<activity
+			android:name=".MainActivity"
+			android:exported="true">
+			<intent-filter>
+				<action android:name="android.intent.action.MAIN" />
+
+				<category android:name="android.intent.category.LAUNCHER" />
+			</intent-filter>
+		</activity>
+	</application>
+
+</manifest>
\ No newline at end of file
diff --git a/FaceExample/src/main/java/com/libremobileos/facedetect/BitmapUtils.java b/FaceExample/src/main/java/com/libremobileos/facedetect/BitmapUtils.java
new file mode 100644
index 0000000..dbc303b
--- /dev/null
+++ b/FaceExample/src/main/java/com/libremobileos/facedetect/BitmapUtils.java
@@ -0,0 +1,256 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * File imported without required modifications, only fixing IDE warnings.
+ * Source: https://github.com/googlesamples/mlkit/blob/d10c447f8259b59262582c30c1608cdf38f4e4a0/android/vision-quickstart/app/src/main/java/com/google/mlkit/vision/demo/BitmapUtils.java
+ */
+
+package com.libremobileos.facedetect;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import androidx.camera.core.ExperimentalGetImage;
+import androidx.camera.core.ImageProxy;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.Objects;
+
+/** Utils functions for bitmap conversions. */
+public class BitmapUtils {
+	/** Describing a frame info. */
+	public static class FrameMetadata {
+
+		private final int width;
+		private final int height;
+		private final int rotation;
+
+		public int getWidth() {
+			return width;
+		}
+
+		public int getHeight() {
+			return height;
+		}
+
+		public int getRotation() {
+			return rotation;
+		}
+
+		private FrameMetadata(int width, int height, int rotation) {
+			this.width = width;
+			this.height = height;
+			this.rotation = rotation;
+		}
+
+		/** Builder of {@link FrameMetadata}. */
+		public static class Builder {
+
+			private int width;
+			private int height;
+			private int rotation;
+
+			public Builder setWidth(int width) {
+				this.width = width;
+				return this;
+			}
+
+			public Builder setHeight(int height) {
+				this.height = height;
+				return this;
+			}
+
+			public Builder setRotation(int rotation) {
+				this.rotation = rotation;
+				return this;
+			}
+
+			public FrameMetadata build() {
+				return new FrameMetadata(width, height, rotation);
+			}
+		}
+	}
+
+	/** Converts NV21 format byte buffer to bitmap. */
+	@Nullable
+	public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) {
+		data.rewind();
+		byte[] imageInBuffer = new byte[data.limit()];
+		data.get(imageInBuffer, 0, imageInBuffer.length);
+		try {
+			YuvImage image =
+					new YuvImage(
+							imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null);
+			ByteArrayOutputStream stream = new ByteArrayOutputStream();
+			image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream);
+
+			Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
+
+			stream.close();
+			return rotateBitmap(bmp, metadata.getRotation());
+		} catch (Exception e) {
+			Log.e("VisionProcessorBase", "Error: " + e.getMessage());
+		}
+		return null;
+	}
+
+	/** Converts a YUV_420_888 image from CameraX API to a bitmap. */
+	@Nullable
+	@ExperimentalGetImage
+	public static Bitmap getBitmap(ImageProxy image) {
+		FrameMetadata frameMetadata =
+				new FrameMetadata.Builder()
+						.setWidth(image.getWidth())
+						.setHeight(image.getHeight())
+						.setRotation(image.getImageInfo().getRotationDegrees())
+						.build();
+
+		ByteBuffer nv21Buffer =
+				yuv420ThreePlanesToNV21(Objects.requireNonNull(image.getImage()).getPlanes(), image.getWidth(), image.getHeight());
+		return getBitmap(nv21Buffer, frameMetadata);
+	}
+
+	/** Rotates a bitmap if it is converted from a bytebuffer. */
+	private static Bitmap rotateBitmap(
+			Bitmap bitmap, int rotationDegrees) {
+		Matrix matrix = new Matrix();
+
+		// Rotate the image back to straight.
+		matrix.postRotate(rotationDegrees);
+
+		Bitmap rotatedBitmap =
+				Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
+
+		// Recycle the old bitmap if it has changed.
+		if (rotatedBitmap != bitmap) {
+			bitmap.recycle();
+		}
+		return rotatedBitmap;
+	}
+
+	/**
+	 * Converts YUV_420_888 to NV21 bytebuffer.
+	 *
+	 * <p>The NV21 format consists of a single byte array containing the Y, U and V values. For an
+	 * image of size S, the first S positions of the array contain all the Y values. The remaining
+	 * positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both
+	 * dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain
+	 * S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU
+	 *
+	 * <p>YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled
+	 * by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and
+	 * V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into
+	 * the first part of the NV21 array. The U and V planes may already have the representation in the
+	 * NV21 format. This happens if the planes share the same buffer, the V buffer is one position
+	 * before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy
+	 * them to the NV21 array.
+	 */
+	private static ByteBuffer yuv420ThreePlanesToNV21(
+			Plane[] yuv420888planes, int width, int height) {
+		int imageSize = width * height;
+		byte[] out = new byte[imageSize + 2 * (imageSize / 4)];
+
+		if (areUVPlanesNV21(yuv420888planes, width, height)) {
+			// Copy the Y values.
+			yuv420888planes[0].getBuffer().get(out, 0, imageSize);
+
+			ByteBuffer uBuffer = yuv420888planes[1].getBuffer();
+			ByteBuffer vBuffer = yuv420888planes[2].getBuffer();
+			// Get the first V value from the V buffer, since the U buffer does not contain it.
+			vBuffer.get(out, imageSize, 1);
+			// Copy the first U value and the remaining VU values from the U buffer.
+			uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1);
+		} else {
+			// Fallback to copying the UV values one by one, which is slower but also works.
+			// Unpack Y.
+			unpackPlane(yuv420888planes[0], width, height, out, 0, 1);
+			// Unpack U.
+			unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2);
+			// Unpack V.
+			unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2);
+		}
+
+		return ByteBuffer.wrap(out);
+	}
+
+	/** Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format. */
+	private static boolean areUVPlanesNV21(Plane[] planes, int width, int height) {
+		int imageSize = width * height;
+
+		ByteBuffer uBuffer = planes[1].getBuffer();
+		ByteBuffer vBuffer = planes[2].getBuffer();
+
+		// Backup buffer properties.
+		int vBufferPosition = vBuffer.position();
+		int uBufferLimit = uBuffer.limit();
+
+		// Advance the V buffer by 1 byte, since the U buffer will not contain the first V value.
+		vBuffer.position(vBufferPosition + 1);
+		// Chop off the last byte of the U buffer, since the V buffer will not contain the last U value.
+		uBuffer.limit(uBufferLimit - 1);
+
+		// Check that the buffers are equal and have the expected number of elements.
+		boolean areNV21 =
+				(vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0);
+
+		// Restore buffers to their initial state.
+		vBuffer.position(vBufferPosition);
+		uBuffer.limit(uBufferLimit);
+
+		return areNV21;
+	}
+
+	/**
+	 * Unpack an image plane into a byte array.
+	 *
+	 * <p>The input plane data will be copied in 'out', starting at 'offset' and every pixel will be
+	 * spaced by 'pixelStride'. Note that there is no row padding on the output.
+	 */
+	private static void unpackPlane(
+			Plane plane, int width, int height, byte[] out, int offset, int pixelStride) {
+		ByteBuffer buffer = plane.getBuffer();
+		buffer.rewind();
+
+		// Compute the size of the current plane.
+		// We assume that it has the aspect ratio as the original image.
+		int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride();
+		if (numRow == 0) {
+			return;
+		}
+		int scaleFactor = height / numRow;
+		int numCol = width / scaleFactor;
+
+		// Extract the data in the output buffer.
+		int outputPos = offset;
+		int rowStart = 0;
+		for (int row = 0; row < numRow; row++) {
+			int inputPos = rowStart;
+			for (int col = 0; col < numCol; col++) {
+				out[outputPos] = buffer.get(inputPos);
+				outputPos += pixelStride;
+				inputPos += plane.getPixelStride();
+			}
+			rowStart += plane.getRowStride();
+		}
+	}
+}
\ No newline at end of file
diff --git a/FaceExample/src/main/java/com/libremobileos/facedetect/FaceBoundsOverlayView.java b/FaceExample/src/main/java/com/libremobileos/facedetect/FaceBoundsOverlayView.java
new file mode 100644
index 0000000..8da8b9e
--- /dev/null
+++ b/FaceExample/src/main/java/com/libremobileos/facedetect/FaceBoundsOverlayView.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.facedetect;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.RectF;
+import android.util.AttributeSet;
+import android.util.Pair;
+import android.view.View;
+
+import androidx.annotation.Nullable;
+
+import com.libremobileos.yifan.face.ImageUtils;
+
+import java.util.List;
+
+public class FaceBoundsOverlayView extends View {
+
+	private List<Pair<RectF, String>> bounds = null;
+	private Paint paint, textPaint;
+	private Matrix transform = null;
+	private int extraWidth, extraHeight, viewWidth, viewHeight, sensorWidth, sensorHeight;
+
+	public FaceBoundsOverlayView(Context context) {
+		this(context, null);
+	}
+
+	public FaceBoundsOverlayView(Context context, @Nullable AttributeSet attrs) {
+		this(context, attrs, 0);
+	}
+
+	public FaceBoundsOverlayView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
+		this(context, attrs, defStyleAttr, 0);
+	}
+
+	public FaceBoundsOverlayView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
+		super(context, attrs, defStyleAttr, defStyleRes);
+	}
+
+	@Override
+	protected void onDraw(Canvas canvas) {
+		super.onDraw(canvas);
+		if (bounds == null || transform == null || paint == null)
+			return; // am I ready yet?
+ 		for (Pair<RectF, String> bound : bounds) {
+			canvas.drawRect(bound.first, paint);
+			if (bound.second != null)
+				canvas.drawText(bound.second, bound.first.left, bound.first.bottom, textPaint);
+		}
+	}
+
+	@Override
+	protected void onSizeChanged(int w, int h, int oldWidth, int oldHeight) {
+		super.onSizeChanged(w, h, oldWidth, oldHeight);
+		viewWidth = w;
+		viewHeight = h;
+		transform = null;
+	}
+
+	// please give me RectF's that wont be used otherwise as I modify them
+	public void updateBounds(List<Pair<RectF, String>> inputBounds, int sensorWidth, int sensorHeight) {
+		this.bounds = inputBounds;
+		// if we have no paint yet, make one
+		if (paint == null) {
+			paint = new Paint();
+			paint.setStyle(Paint.Style.STROKE);
+			paint.setStrokeWidth(10f);
+			paint.setColor(Color.RED);
+		}
+		if (textPaint == null) {
+			textPaint = new Paint();
+			textPaint.setColor(Color.RED);
+			textPaint.setTextSize(100);
+		}
+		// if camera size or view size changed, recalculate it
+		if (this.sensorWidth != sensorWidth || this.sensorHeight != sensorHeight || (viewWidth + viewHeight) > 0) {
+			this.sensorWidth = sensorWidth;
+			this.sensorHeight = sensorHeight;
+			int oldWidth = viewWidth;
+			int oldHeight = viewHeight;
+			extraWidth = 0;
+			extraHeight = 0;
+			// calculate scaling keeping aspect ratio
+			int newHeight = (int)((oldWidth / (float)sensorWidth) * sensorHeight);
+			int newWidth = (int)((oldHeight / (float)sensorHeight) * sensorWidth);
+			// calculate out black bars
+			if (newWidth > oldWidth) {
+				extraHeight = (oldHeight - newHeight) / 2;
+				viewHeight = newHeight;
+			} else {
+				extraWidth = (oldWidth - newWidth) / 2;
+				viewWidth = newWidth;
+			}
+			// scale from image size to view size
+			transform = ImageUtils.getTransformationMatrix(sensorWidth, sensorHeight, viewWidth, viewHeight, 0, false);
+			viewWidth = 0; viewHeight = 0;
+		}
+		// map bounds to view size
+		for (Pair<RectF, String> bound : bounds) {
+			transform.mapRect(bound.first);
+			bound.first.offset(extraWidth, extraHeight);
+		}
+		invalidate();
+	}
+}
diff --git a/FaceExample/src/main/java/com/libremobileos/facedetect/MainActivity.java b/FaceExample/src/main/java/com/libremobileos/facedetect/MainActivity.java
new file mode 100644
index 0000000..a3243d1
--- /dev/null
+++ b/FaceExample/src/main/java/com/libremobileos/facedetect/MainActivity.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.facedetect;
+
+import android.content.res.Configuration;
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.os.Bundle;
+import android.util.Pair;
+import android.util.Size;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.widget.EditText;
+import android.widget.ImageView;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.OptIn;
+import androidx.appcompat.app.AlertDialog;
+import androidx.appcompat.app.AppCompatActivity;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.ExperimentalGetImage;
+import androidx.camera.core.ImageAnalysis;
+import androidx.camera.core.Preview;
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import androidx.camera.view.PreviewView;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+import com.libremobileos.yifan.face.FaceRecognizer;
+import com.libremobileos.yifan.face.FaceStorageBackend;
+import com.libremobileos.yifan.face.SharedPreferencesFaceStorageBackend;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+
+public class MainActivity extends AppCompatActivity {
+
+	// CameraX boilerplate
+	private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
+	// View showing camera frames
+	private PreviewView previewView;
+	// AI-based detector
+	private FaceRecognizer faceRecognizer;
+	// Simple view allowing us to draw Rectangles over the Preview
+	private FaceBoundsOverlayView overlayView;
+	// The desired camera input size
+	private final Size desiredInputSize = new Size(640, 480);
+	// The calculated actual processing width & height
+	private int width, height;
+	// Store registered Faces in Memory
+	private FaceStorageBackend faceStorage;
+	// If we are waiting for a face to be added to knownFaces
+	private boolean addPending = false;
+
+	@Override
+	protected void onCreate(@Nullable Bundle savedInstanceState) {
+		// Initialize basic views
+		super.onCreate(savedInstanceState);
+		setContentView(R.layout.activity_main);
+		previewView = findViewById(R.id.viewFinder);
+		previewView.setScaleType(PreviewView.ScaleType.FIT_CENTER);
+		overlayView = findViewById(R.id.overlay);
+		overlayView.setOnClickListener(v -> addPending = true);
+		setTitle(getString(R.string.tap_to_add_face));
+
+		// CameraX boilerplate (create camera connection)
+		cameraProviderFuture = ProcessCameraProvider.getInstance(this);
+		cameraProviderFuture.addListener(() -> {
+			try {
+				ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
+				bindPreview(cameraProvider);
+			} catch (ExecutionException | InterruptedException e) {
+				// No errors need to be handled for this Future.
+				// This should never be reached.
+			}
+		}, getMainExecutor());
+
+	}
+
+	@OptIn(markerClass = ExperimentalGetImage.class)
+	private void bindPreview(@NonNull ProcessCameraProvider cameraProvider) {
+		// We're connected to the camera, set up everything
+		Preview preview = new Preview.Builder()
+				.build();
+
+		// Which camera to use
+		int selectedCamera = CameraSelector.LENS_FACING_FRONT;
+		CameraSelector cameraSelector = new CameraSelector.Builder()
+				.requireLensFacing(selectedCamera)
+				.build();
+
+		preview.setSurfaceProvider(previewView.getSurfaceProvider());
+
+		// Cameras give us landscape images. If we are in portrait mode
+		// (and want to process a portrait image), swap width/height to
+		// make the image portrait.
+		if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
+			width = desiredInputSize.getHeight();
+			height = desiredInputSize.getWidth();
+		} else {
+			width = desiredInputSize.getWidth();
+			height = desiredInputSize.getHeight();
+		}
+
+		// Set up CameraX boilerplate and configure it to drop frames if we can't keep up
+		ImageAnalysis imageAnalysis =
+				new ImageAnalysis.Builder()
+						.setTargetResolution(new Size(width, height))
+						.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
+						.build();
+
+		imageAnalysis.setAnalyzer(getMainExecutor(), imageProxy -> {
+			// Convert CameraX Image to Bitmap and process it
+			// Return list of detected faces
+			List<FaceRecognizer.Face> data = faceRecognizer.recognize(BitmapUtils.getBitmap(imageProxy));
+			ArrayList<Pair<RectF, String>> bounds = new ArrayList<>();
+
+			for (FaceRecognizer.Face face : data) {
+				RectF boundingBox = new RectF(face.getLocation());
+
+				// Camera is frontal so the image is flipped horizontally,
+				// so flip it again.
+				Matrix flip = new Matrix();
+				flip.postScale(-1, 1, width / 2.0f, height / 2.0f);
+				flip.mapRect(boundingBox);
+
+				// Generate UI text for face
+				String uiText;
+				// Do we want to add a new face?
+				if (addPending) {
+					// If we want to add a new face, show the dialog.
+					runOnUiThread(() -> showAddFaceDialog(face));
+					addPending = false;
+				}
+				// Do we have any match?
+				if (face.isRecognized()) {
+					// If yes, show the user-visible ID and the detection confidence
+					uiText = face.getModelCount() + " " + face.getTitle() + " " + face.getDistance();
+				} else {
+					// Show detected object type (always "Face") and how confident the AI is that this is a Face
+					uiText = face.getTitle() + " " + face.getDetectionConfidence();
+				}
+				bounds.add(new Pair<>(boundingBox, uiText));
+			}
+
+			// Pass bounds to View drawing rectangles
+			overlayView.updateBounds(bounds, width, height);
+			// Clean up
+			imageProxy.close();
+		});
+
+		// Bind all objects together
+		/* Camera camera = */ cameraProvider.bindToLifecycle(this, cameraSelector, imageAnalysis, preview);
+
+		// Create AI-based face detection
+		//faceStorage = new VolatileFaceStorageBackend();
+		faceStorage = new SharedPreferencesFaceStorageBackend(getSharedPreferences("faces", 0));
+		faceRecognizer = FaceRecognizer.create(this,
+				faceStorage, /* face data storage */
+				0.6f, /* minimum confidence to consider object as face */
+				width, /* bitmap width */
+				height, /* bitmap height */
+				0, /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
+				0.7f, /* maximum distance to track face */
+				1 /* minimum model count to track face */
+		);
+	}
+
+	private void showAddFaceDialog(FaceRecognizer.Face rec) {
+		AlertDialog.Builder builder = new AlertDialog.Builder(this);
+		LayoutInflater inflater = getLayoutInflater();
+		View dialogLayout = inflater.inflate(R.layout.image_edit_dialog, null);
+		ImageView ivFace = dialogLayout.findViewById(R.id.dlg_image);
+		TextView tvTitle = dialogLayout.findViewById(R.id.dlg_title);
+		EditText etName = dialogLayout.findViewById(R.id.dlg_input);
+
+		tvTitle.setText(R.string.add_face);
+		// Add preview of cropped face to verify we're adding the correct one
+		ivFace.setImageBitmap(rec.getCrop());
+		etName.setHint(R.string.input_name);
+
+		builder.setPositiveButton(R.string.ok, (dlg, i) -> {
+			String name = etName.getText().toString();
+			if (name.isEmpty()) {
+				return;
+			}
+			// Save facial features in knownFaces
+			if (!faceStorage.extendRegistered(name, rec.getExtra(), true)) {
+				Toast.makeText(this, R.string.register_failed, Toast.LENGTH_LONG).show();
+			}
+			dlg.dismiss();
+		});
+		builder.setView(dialogLayout);
+		builder.show();
+	}
+
+}
diff --git a/FaceExample/src/main/res/drawable-v24/ic_launcher_foreground.xml b/FaceExample/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100644
index 0000000..c6aee64
--- /dev/null
+++ b/FaceExample/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,30 @@
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+	xmlns:aapt="http://schemas.android.com/aapt"
+	android:width="108dp"
+	android:height="108dp"
+	android:viewportWidth="108"
+	android:viewportHeight="108">
+	<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
+		<aapt:attr name="android:fillColor">
+			<gradient
+				android:endX="85.84757"
+				android:endY="92.4963"
+				android:startX="42.9492"
+				android:startY="49.59793"
+				android:type="linear">
+				<item
+					android:color="#44000000"
+					android:offset="0.0" />
+				<item
+					android:color="#00000000"
+					android:offset="1.0" />
+			</gradient>
+		</aapt:attr>
+	</path>
+	<path
+		android:fillColor="#FFFFFF"
+		android:fillType="nonZero"
+		android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
+		android:strokeWidth="1"
+		android:strokeColor="#00000000" />
+</vector>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/drawable/ic_launcher_background.xml b/FaceExample/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000..4e12b43
--- /dev/null
+++ b/FaceExample/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="utf-8"?>
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+	android:width="108dp"
+	android:height="108dp"
+	android:viewportWidth="108"
+	android:viewportHeight="108">
+	<path
+		android:fillColor="#3DDC84"
+		android:pathData="M0,0h108v108h-108z" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M9,0L9,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,0L19,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M29,0L29,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M39,0L39,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M49,0L49,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M59,0L59,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M69,0L69,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M79,0L79,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M89,0L89,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M99,0L99,108"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,9L108,9"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,19L108,19"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,29L108,29"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,39L108,39"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,49L108,49"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,59L108,59"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,69L108,69"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,79L108,79"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,89L108,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M0,99L108,99"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,29L89,29"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,39L89,39"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,49L89,49"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,59L89,59"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,69L89,69"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M19,79L89,79"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M29,19L29,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M39,19L39,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M49,19L49,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M59,19L59,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M69,19L69,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+	<path
+		android:fillColor="#00000000"
+		android:pathData="M79,19L79,89"
+		android:strokeWidth="0.8"
+		android:strokeColor="#33FFFFFF" />
+</vector>
diff --git a/FaceExample/src/main/res/layout/activity_main.xml b/FaceExample/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..1f0ca0c
--- /dev/null
+++ b/FaceExample/src/main/res/layout/activity_main.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	android:layout_width="match_parent"
+	android:layout_height="match_parent"
+	android:orientation="vertical">
+
+	<androidx.camera.view.PreviewView
+		android:id="@+id/viewFinder"
+		android:layout_width="match_parent"
+		android:layout_height="match_parent" />
+
+	<com.libremobileos.facedetect.FaceBoundsOverlayView
+		android:id="@+id/overlay"
+		android:layout_width="match_parent"
+		android:layout_height="match_parent" />
+
+</FrameLayout>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/layout/image_edit_dialog.xml b/FaceExample/src/main/res/layout/image_edit_dialog.xml
new file mode 100644
index 0000000..60e46ca
--- /dev/null
+++ b/FaceExample/src/main/res/layout/image_edit_dialog.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	android:layout_width="wrap_content"
+	android:layout_height="wrap_content"
+	xmlns:tools="http://schemas.android.com/tools"
+	android:orientation="vertical"
+	android:padding="16dp">
+
+	<TextView
+		android:id="@+id/dlg_title"
+		android:layout_gravity="center"
+		android:textSize="20sp"
+		tools:text="The dialog title"
+		android:layout_width="wrap_content"
+		android:layout_height="wrap_content"/>
+
+	<ImageView
+		android:layout_gravity="center"
+		android:id="@+id/dlg_image"
+		android:layout_width="200dp"
+		android:layout_height="200dp"
+		android:scaleType="centerCrop"
+		android:adjustViewBounds="true"
+		/>
+
+	<EditText
+		android:layout_gravity="center"
+		android:id="@+id/dlg_input"
+		android:layout_width="wrap_content"
+		android:layout_height="wrap_content"
+		tools:hint="The dialog hint"
+		/>
+
+
+</LinearLayout>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100644
index 0000000..65bd9d3
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+	<background android:drawable="@drawable/ic_launcher_background" />
+	<foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100644
index 0000000..65bd9d3
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+	<background android:drawable="@drawable/ic_launcher_background" />
+	<foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/mipmap-anydpi-v33/ic_launcher.xml b/FaceExample/src/main/res/mipmap-anydpi-v33/ic_launcher.xml
new file mode 100644
index 0000000..52ac069
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-anydpi-v33/ic_launcher.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+	<background android:drawable="@drawable/ic_launcher_background" />
+	<foreground android:drawable="@drawable/ic_launcher_foreground" />
+	<monochrome android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/mipmap-hdpi/ic_launcher_foreground.png b/FaceExample/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
new file mode 100644
index 0000000..4eaccdd
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
Binary files differ
diff --git a/FaceExample/src/main/res/mipmap-mdpi/ic_launcher_foreground.png b/FaceExample/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
new file mode 100644
index 0000000..7baaea0
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
Binary files differ
diff --git a/FaceExample/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png b/FaceExample/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
new file mode 100644
index 0000000..9b01b6d
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
Binary files differ
diff --git a/FaceExample/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png b/FaceExample/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
new file mode 100644
index 0000000..bfc2f07
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
Binary files differ
diff --git a/FaceExample/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png b/FaceExample/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
new file mode 100644
index 0000000..0ee89d8
--- /dev/null
+++ b/FaceExample/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
Binary files differ
diff --git a/FaceExample/src/main/res/values/colors.xml b/FaceExample/src/main/res/values/colors.xml
new file mode 100644
index 0000000..d95bf93
--- /dev/null
+++ b/FaceExample/src/main/res/values/colors.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+	<color name="purple_200">#FFBB86FC</color>
+	<color name="purple_500">#FF6200EE</color>
+	<color name="purple_700">#FF3700B3</color>
+	<color name="teal_200">#FF03DAC5</color>
+	<color name="teal_700">#FF018786</color>
+	<color name="black">#FF000000</color>
+	<color name="white">#FFFFFFFF</color>
+</resources>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/values/strings.xml b/FaceExample/src/main/res/values/strings.xml
new file mode 100644
index 0000000..573d092
--- /dev/null
+++ b/FaceExample/src/main/res/values/strings.xml
@@ -0,0 +1,8 @@
+<resources>
+	<string name="app_name">FaceDetect</string>
+	<string name="add_face">Add Face</string>
+	<string name="input_name">Input name</string>
+	<string name="ok">OK</string>
+	<string name="tap_to_add_face">Tap anywhere to add face</string>
+	<string name="register_failed">Registering the face failed.</string>
+</resources>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/values/themes.xml b/FaceExample/src/main/res/values/themes.xml
new file mode 100644
index 0000000..aaa8e1b
--- /dev/null
+++ b/FaceExample/src/main/res/values/themes.xml
@@ -0,0 +1,6 @@
+<resources xmlns:tools="http://schemas.android.com/tools">
+	<!-- Base application theme. -->
+	<style name="Theme.FaceDetect" parent="Theme.AppCompat.DayNight">
+		<!-- Customize your theme here. -->
+	</style>
+</resources>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/xml/backup_rules.xml b/FaceExample/src/main/res/xml/backup_rules.xml
new file mode 100644
index 0000000..26457c5
--- /dev/null
+++ b/FaceExample/src/main/res/xml/backup_rules.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?><!--
+   Sample backup rules file; uncomment and customize as necessary.
+   See https://developer.android.com/guide/topics/data/autobackup
+   for details.
+   Note: This file is ignored for devices older that API 31
+   See https://developer.android.com/about/versions/12/backup-restore
+-->
+<full-backup-content>
+   <exclude domain="sharedpref" path="faces.xml"/>
+</full-backup-content>
\ No newline at end of file
diff --git a/FaceExample/src/main/res/xml/data_extraction_rules.xml b/FaceExample/src/main/res/xml/data_extraction_rules.xml
new file mode 100644
index 0000000..3fc20df
--- /dev/null
+++ b/FaceExample/src/main/res/xml/data_extraction_rules.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Sample data extraction rules file; uncomment and customize as necessary.
+   See https://developer.android.com/about/versions/12/backup-restore#xml-changes
+   for details.
+-->
+<data-extraction-rules>
+	<cloud-backup>
+		<exclude domain="sharedpref" path="faces.xml" />
+	</cloud-backup>
+    <device-transfer>
+	    <exclude domain="sharedpref" path="faces.xml" />
+    </device-transfer>
+</data-extraction-rules>
\ No newline at end of file
diff --git a/FaceShared/.gitignore b/FaceShared/.gitignore
new file mode 100644
index 0000000..42afabf
--- /dev/null
+++ b/FaceShared/.gitignore
@@ -0,0 +1 @@
+/build
\ No newline at end of file
diff --git a/FaceShared/build.gradle b/FaceShared/build.gradle
new file mode 100644
index 0000000..05f8802
--- /dev/null
+++ b/FaceShared/build.gradle
@@ -0,0 +1,57 @@
+plugins {
+	id 'com.android.library'
+}
+
+// https://stackoverflow.com/a/29164335, https://stackoverflow.com/a/52286740, https://stackoverflow.com/a/71848953, https://stackoverflow.com/a/63547141
+android.libraryVariants.all { variant ->
+	project.tasks.create("generate${variant.name.capitalize()}Javadoc", Javadoc) {
+		description "Generates Javadoc for $variant.name."
+		source = variant.sourceSets.collect { it.java.sourceFiles }.inject { m, i -> m + i }
+		doFirst {
+			classpath = project.files(variant.javaCompileProvider.get().classpath.files,
+					project.android.getBootClasspath())
+		}
+		exclude "**/R"
+		exclude "**/R.**"
+		exclude "**/R\$**"
+		exclude "**/BuildConfig*"
+		ext.androidJar = "${android.sdkDirectory}/platforms/${android.compileSdkVersion}/android.jar"
+		options.links("https://d.android.com/reference/")
+		options.linksOffline('https://d.android.com/reference/', 'https://d.android.com/reference/androidx/')
+	}
+}
+
+android {
+	namespace 'com.libremobileos.yifan.face'
+	compileSdk 33
+
+	defaultConfig {
+		minSdk 26
+		targetSdk 33
+	}
+
+	buildTypes {
+		release {
+			minifyEnabled false
+		}
+	}
+	compileOptions {
+		sourceCompatibility JavaVersion.VERSION_11
+		targetCompatibility JavaVersion.VERSION_11
+	}
+
+	flavorDimensions "gpu"
+	productFlavors {
+		withGpu {
+			dependencies {
+				implementation('org.tensorflow:tensorflow-lite-gpu:2.11.0')
+			}
+		}
+		withoutGpu {}
+	}
+
+}
+
+dependencies {
+	implementation('org.tensorflow:tensorflow-lite:2.11.0')
+}
diff --git a/FaceShared/src/Android.bp b/FaceShared/src/Android.bp
new file mode 100644
index 0000000..eb3f361
--- /dev/null
+++ b/FaceShared/src/Android.bp
@@ -0,0 +1,41 @@
+java_library_static {
+    name: "YifanFaceShared",
+    sdk_version: "current",
+    srcs: ["main/java/**/*.java", "withoutGpu/java/**/*.java"],
+    manifest: "main/AndroidManifest.xml",
+    required: ["yifan-detect-class1.tflite", "yifan-mobile_face_net.tflite"],
+
+    static_libs: [
+        "tensorflowlite_java",
+    ],
+}
+
+prebuilt_etc {
+    name: "yifan-detect-class1.tflite",
+    filename: "detect-class1.tflite",
+    src: "main/assets/detect-class1.tflite",
+    sub_dir: "face",
+    required: ["yifan-detect-class1.txt"],
+}
+
+prebuilt_etc {
+    name: "yifan-detect-class1.txt",
+    filename: "detect-class1.txt",
+    src: "main/assets/detect-class1.txt",
+    sub_dir: "face",
+}
+
+prebuilt_etc {
+    name: "yifan-mobile_face_net.tflite",
+    filename: "mobile_face_net.tflite",
+    src: "main/assets/mobile_face_net.tflite",
+    sub_dir: "face",
+    required: ["yifan-mobile_face_net.txt"],
+}
+
+prebuilt_etc {
+    name: "yifan-mobile_face_net.txt",
+    filename: "mobile_face_net.txt",
+    src: "main/assets/mobile_face_net.txt",
+    sub_dir: "face",
+}
diff --git a/FaceShared/src/main/AndroidManifest.xml b/FaceShared/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..a343b35
--- /dev/null
+++ b/FaceShared/src/main/AndroidManifest.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.libremobileos.yifan.face.shared">
+
+</manifest>
diff --git a/FaceShared/src/main/assets/detect-class1.tflite b/FaceShared/src/main/assets/detect-class1.tflite
new file mode 100644
index 0000000..f93116d
--- /dev/null
+++ b/FaceShared/src/main/assets/detect-class1.tflite
Binary files differ
diff --git a/FaceShared/src/main/assets/detect-class1.txt b/FaceShared/src/main/assets/detect-class1.txt
new file mode 100644
index 0000000..36018c0
--- /dev/null
+++ b/FaceShared/src/main/assets/detect-class1.txt
@@ -0,0 +1,4 @@
+???
+Face
+Profile
+BackHead
diff --git a/FaceShared/src/main/assets/mobile_face_net.tflite b/FaceShared/src/main/assets/mobile_face_net.tflite
new file mode 100644
index 0000000..836000e
--- /dev/null
+++ b/FaceShared/src/main/assets/mobile_face_net.tflite
Binary files differ
diff --git a/FaceShared/src/main/assets/mobile_face_net.txt b/FaceShared/src/main/assets/mobile_face_net.txt
new file mode 100644
index 0000000..c1b0730
--- /dev/null
+++ b/FaceShared/src/main/assets/mobile_face_net.txt
@@ -0,0 +1 @@
+x
\ No newline at end of file
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java
new file mode 100644
index 0000000..39bd645
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/AutoFitTextureView.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
+public class AutoFitTextureView extends TextureView {
+  private int ratioWidth = 0;
+  private int ratioHeight = 0;
+
+  public AutoFitTextureView(final Context context) {
+    this(context, null);
+  }
+
+  public AutoFitTextureView(final Context context, final AttributeSet attrs) {
+    this(context, attrs, 0);
+  }
+
+  public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
+    super(context, attrs, defStyle);
+  }
+
+  /**
+   * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
+   * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
+   * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
+   *
+   * @param width Relative horizontal size
+   * @param height Relative vertical size
+   */
+  public void setAspectRatio(final int width, final int height) {
+    if (width < 0 || height < 0) {
+      throw new IllegalArgumentException("Size cannot be negative.");
+    }
+    ratioWidth = width;
+    ratioHeight = height;
+    requestLayout();
+  }
+
+  @Override
+  protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
+    super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+    final int width = MeasureSpec.getSize(widthMeasureSpec);
+    final int height = MeasureSpec.getSize(heightMeasureSpec);
+    if (0 == ratioWidth || 0 == ratioHeight) {
+      setMeasuredDimension(width, height);
+    } else {
+      if (width < height * ratioWidth / ratioHeight) {
+        setMeasuredDimension(width, width * ratioHeight / ratioWidth);
+      } else {
+        setMeasuredDimension(height * ratioWidth / ratioHeight, height);
+      }
+    }
+  }
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/DirectoryFaceStorageBackend.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/DirectoryFaceStorageBackend.java
new file mode 100644
index 0000000..207f5ef
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/DirectoryFaceStorageBackend.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.util.Log;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * {@link FaceStorageBackend} to store data in a directory. Directory must not contain files other than these created by this class!
+ */
+public class DirectoryFaceStorageBackend extends FaceStorageBackend {
+	private final File dir;
+
+	public DirectoryFaceStorageBackend(File dir) {
+		this.dir = dir;
+		if (!dir.exists()) {
+			throw new IllegalArgumentException("directory.exists() == false");
+		}
+		if (!dir.isDirectory()) {
+			throw new IllegalArgumentException("directory.isDirectory() == false");
+		}
+		if (!dir.canRead()) {
+			throw new IllegalArgumentException("directory.canRead() == false");
+		}
+		if (!dir.canWrite()) {
+			throw new IllegalArgumentException("directory.canWrite() == false");
+		}
+	}
+
+	@Override
+	protected Set<String> getNamesInternal() {
+		// Java...
+		return new HashSet<>(Arrays.asList(Objects.requireNonNull(dir.list())));
+	}
+
+	@Override
+	protected boolean registerInternal(String name, String data, boolean duplicate) {
+		File f = new File(dir, name);
+		try {
+			if (f.exists()) {
+				if (!duplicate)
+					throw new IOException("f.exists() && !duplicate == true");
+			} else {
+				if (!f.createNewFile())
+					throw new IOException("f.createNewFile() failed");
+			}
+			OutputStreamWriter faceOSW = new OutputStreamWriter(new FileOutputStream(f));
+			faceOSW.write(data);
+			faceOSW.close();
+			return true;
+		} catch (IOException e) {
+			Log.e("DirectoryFaceStorageBackend", Log.getStackTraceString(e));
+		}
+		return false;
+	}
+
+	@Override
+	protected String getInternal(String name) {
+		File f = new File(dir, name);
+		try {
+			if (!f.exists()) {
+				throw new IOException("f.exists() == false");
+			}
+			if (!f.canRead()) {
+				throw new IOException("f.canRead() == false");
+			}
+			try (InputStream inputStream = new FileInputStream(f)) {
+				// https://stackoverflow.com/a/35446009
+				ByteArrayOutputStream result = new ByteArrayOutputStream();
+				byte[] buffer = new byte[1024];
+				for (int length; (length = inputStream.read(buffer)) != -1; ) {
+					result.write(buffer, 0, length);
+				}
+				// ignore the warning, api 33-only stuff right there :D
+				return result.toString(StandardCharsets.UTF_8.name());
+			}
+		} catch (IOException e) {
+			Log.e("DirectoryFaceStorageBackend", Log.getStackTraceString(e));
+		}
+		return null;
+	}
+
+	@Override
+	protected boolean deleteInternal(String name) {
+		File f = new File(dir, name);
+		try {
+			if (!f.exists()) {
+				throw new IOException("f.exists() == false");
+			}
+			if (!f.canWrite()) {
+				throw new IOException("f.canWrite() == false");
+			}
+			return f.delete();
+		} catch (IOException e) {
+			Log.e("DirectoryFaceStorageBackend", Log.getStackTraceString(e));
+		}
+		return false;
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDataEncoder.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDataEncoder.java
new file mode 100644
index 0000000..851d7bc
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDataEncoder.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.Base64;
+
+public class FaceDataEncoder {
+	private static final Base64.Encoder encoder = Base64.getUrlEncoder();
+	private static final Base64.Decoder decoder = Base64.getUrlDecoder();
+
+	/**
+	 * Encode face model to string.
+	 * @param alldata Face model.
+	 * @return Encoded face model.
+	 */
+	public static String encode(float[][] alldata) {
+		StringBuilder b = new StringBuilder();
+		for (float[] data : alldata) {
+			ByteBuffer buff = ByteBuffer.allocate(4 * data.length);
+			for (float f : data) {
+				buff.putFloat(f);
+			}
+			b.append(encoder.encodeToString(buff.array())).append(":");
+		}
+		return b.substring(0, b.length() - 1);
+	}
+
+	/**
+	 * Decode face model encoded by {@link #encode(float[][])}
+	 * @param data Encoded face model.
+	 * @return Face model.
+	 */
+	public static float[][] decode(String data) {
+		String[] a = data.split(":");
+		float[][] f = new float[a.length][];
+		int i = 0;
+		for (String s : a) {
+			FloatBuffer buf = ByteBuffer.wrap(decoder.decode(s)).asFloatBuffer();
+			f[i] = new float[buf.capacity()];
+			buf.get(f[i++]);
+		}
+		return f;
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDetector.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDetector.java
new file mode 100644
index 0000000..fc73e7f
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceDetector.java
@@ -0,0 +1,253 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.util.Log;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Detect multiple faces in one large {@link Bitmap} and returns {@link Face} objects.
+ * Requires preprocessed {@link InputImage} objects from {@link InputImageProcessor}.
+ */
+public class FaceDetector {
+	// Asset manager to load TFLite model
+	private final AssetManager am;
+	// TFLite Model API
+	private SimilarityClassifier classifier;
+	// Optional settings
+	private final boolean hwAcceleration, enhancedHwAcceleration;
+	private final int numThreads;
+	private final float minConfidence;
+	// Face Detection model parameters
+	private static final int TF_FD_API_INPUT_SIZE = 300;
+	private static final boolean TF_FD_API_IS_QUANTIZED = true;
+	private static final String TF_FD_API_MODEL_FILE = "detect-class1.tflite";
+	private static final String TF_FD_API_LABELS_FILE = "detect-class1.txt";
+	// Maintain aspect ratio or squish image?
+	private static final boolean MAINTAIN_ASPECT = false;
+
+	/**
+	 * Wrapper around {@link Bitmap} to avoid user passing unprocessed data
+	 * @see InputImageProcessor
+	 */
+	public static class InputImage {
+		private final Bitmap processedImage;
+		private final Matrix cropToFrameTransform;
+
+		/* package-private */ InputImage(Bitmap processedImage, Matrix cropToFrameTransform) {
+			this.processedImage = processedImage;
+			this.cropToFrameTransform = cropToFrameTransform;
+		}
+
+		/* package-private */ Bitmap getProcessedImage() {
+			return processedImage;
+		}
+
+		/* package-private */ Matrix getCropToFrameTransform() {
+			return cropToFrameTransform;
+		}
+	}
+
+	/**
+	 * Processes {@link Bitmap}s to compatible format
+	 * @see InputImage
+	 */
+	public static class InputImageProcessor {
+		private final Matrix frameToCropTransform;
+		private final Matrix cropToFrameTransform = new Matrix();
+
+		/**
+		 * Create new {@link InputImage} processor.
+		 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+		 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+		 * @param sensorOrientation rotation if the image should be rotated, or 0.
+		 */
+		public InputImageProcessor(int inputWidth, int inputHeight, int sensorOrientation) {
+			frameToCropTransform =
+					ImageUtils.getTransformationMatrix(
+							sensorOrientation % 180 != 0 ? inputHeight : inputWidth,
+							sensorOrientation % 180 != 0 ? inputWidth : inputHeight,
+							TF_FD_API_INPUT_SIZE, TF_FD_API_INPUT_SIZE,
+							0, MAINTAIN_ASPECT);
+			if (sensorOrientation != 0) {
+				Matrix myRotationMatrix =
+						ImageUtils.getTransformationMatrix(
+								inputWidth, inputHeight,
+								sensorOrientation % 180 != 0 ? inputHeight : inputWidth,
+								sensorOrientation % 180 != 0 ? inputWidth : inputHeight,
+								sensorOrientation % 360, false);
+				frameToCropTransform.setConcat(frameToCropTransform, myRotationMatrix);
+			}
+			frameToCropTransform.invert(cropToFrameTransform);
+		}
+
+		/**
+		 * Process {@link Bitmap} for use in AI model.
+		 * @param input {@link Bitmap} with length/height that were specified in the constructor
+		 * @return Processed {@link InputImage}
+		 */
+		public InputImage process(Bitmap input) {
+			Bitmap croppedBitmap = Bitmap.createBitmap(TF_FD_API_INPUT_SIZE, TF_FD_API_INPUT_SIZE, Bitmap.Config.ARGB_8888);
+			final Canvas canvas = new Canvas(croppedBitmap);
+			canvas.drawBitmap(input, frameToCropTransform, null);
+			return new InputImage(croppedBitmap, cropToFrameTransform);
+		}
+	}
+
+	/** An immutable result returned by a {@link FaceDetector} describing what was recognized. */
+	public static class Face {
+		// A unique identifier for what has been recognized. Specific to the class, not the instance of
+		// the object.
+		private final String id;
+
+		private final Float confidence;
+
+		private final RectF location;
+
+		/* package-private */ Face(
+				final String id, final Float confidence, final RectF location) {
+			this.id = id;
+			this.confidence = confidence;
+			this.location = location;
+		}
+
+		/* package-private */ String getId() {
+			return id;
+		}
+
+		/**
+		 * A score for how good the detection is relative to others.
+		 * @return Sortable score, higher is better. Min: 0f Max: 1.0f
+		 */
+		public Float getConfidence() {
+			return confidence;
+		}
+
+		/**
+		 * Optional location within the source image for the location of the recognized object.
+		 * @return {@link RectF} containing location on input image
+		 */
+		public RectF getLocation() {
+			return new RectF(location);
+		}
+
+		@Override
+		public String toString() {
+			String resultString = "";
+			if (id != null) {
+				resultString += "[" + id + "] ";
+			}
+
+			if (confidence != null) {
+				resultString += String.format(Locale.US, "(%.1f%%) ", confidence * 100.0f);
+			}
+
+			if (location != null) {
+				resultString += location + " ";
+			}
+
+			return resultString.trim();
+		}
+
+	}
+
+	/**
+	 * Create {@link FaceDetector} instance.
+	 * @param context Android {@link Context} object, may be in background.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+	 * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+	 * @return {@link FaceDetector} instance.
+	 * @see #create(Context, float)
+	 */
+	public static FaceDetector create(Context context, float minConfidence, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		AssetManager assetmanager = null;
+		if (context != null)
+			assetmanager = context.getAssets();
+		return new FaceDetector(assetmanager, minConfidence, hwAcceleration, enhancedHwAcceleration, numThreads);
+	}
+
+	/**
+	 * Create {@link FaceDetector} instance with sensible defaults regarding hardware acceleration (CPU, XNNPACK, 4 threads).
+	 * @param context Android {@link Context} object, may be in background.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @return {@link FaceDetector} instance.
+	 * @see #create(Context, float, boolean, boolean, int)
+	 */
+	@SuppressWarnings("unused")
+	public static FaceDetector create(Context context, float minConfidence) {
+		return create(context, minConfidence, false, true, 4);
+	}
+
+	private FaceDetector(AssetManager am, float minConfidence, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		this.am = am;
+		this.minConfidence = minConfidence;
+		this.hwAcceleration = hwAcceleration;
+		this.enhancedHwAcceleration = enhancedHwAcceleration;
+		this.numThreads = numThreads;
+	}
+
+	private SimilarityClassifier getClassifier() throws IOException {
+		if (classifier == null) {
+			classifier = SimilarityClassifier.create(am,
+					TF_FD_API_MODEL_FILE,
+					TF_FD_API_LABELS_FILE,
+					TF_FD_API_INPUT_SIZE,
+					TF_FD_API_IS_QUANTIZED,
+					hwAcceleration,
+					enhancedHwAcceleration,
+					numThreads
+			);
+		}
+		return classifier;
+	}
+
+	/**
+	 * Detect multiple faces in an {@link InputImage} and return their locations.
+	 * @param input Image, processed with {@link InputImageProcessor}
+	 * @return List of {@link Face} objects
+	 */
+	public List<Face> detectFaces(InputImage input) {
+		try {
+			List<SimilarityClassifier.Recognition> results = getClassifier().recognizeImage(input.getProcessedImage());
+
+			final List<Face> mappedRecognitions = new LinkedList<>();
+			for (final SimilarityClassifier.Recognition result : results) {
+				final RectF location = result.getLocation();
+				if (location != null && result.getDistance() >= minConfidence) {
+					input.getCropToFrameTransform().mapRect(location);
+					mappedRecognitions.add(new Face(result.getId(), result.getDistance(), location));
+				}
+			}
+			return mappedRecognitions;
+		} catch (IOException e) {
+			Log.e("FaceDetector", Log.getStackTraceString(e));
+			return null;
+		}
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java
new file mode 100644
index 0000000..6b2ba04
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Combination of {@link FaceDetector} and {@link FaceScanner}
+ * for workloads where both face detection and face scanning are required.
+ * However, this class makes no assumptions about the workload and is therefore bare-bones.
+ * Because of this, usage of a task-specific class like {@link FaceRecognizer}
+ * is highly recommended, unless these do not fit your use case.
+ */
+public class FaceFinder {
+	private final FaceDetector faceDetector;
+	private final FaceDetector.InputImageProcessor detectorInputProcessor;
+	/* package-private */ final FaceScanner faceScanner;
+	private final int sensorOrientation;
+
+	private FaceFinder(Context ctx, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		this.faceDetector = FaceDetector.create(ctx, minConfidence, hwAcceleration, enhancedHwAcceleration, numThreads);
+		this.faceScanner = FaceScanner.create(ctx, hwAcceleration, enhancedHwAcceleration, numThreads);
+		this.sensorOrientation = sensorOrientation;
+		this.detectorInputProcessor = new FaceDetector.InputImageProcessor(inputWidth, inputHeight, sensorOrientation);
+	}
+
+	/**
+	 * Create new {@link FaceFinder} instance.
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+	 * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+	 * @return {@link FaceFinder} instance
+	 * @see #create(Context, float, int, int, int)
+	 */
+	public static FaceFinder create(Context ctx, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		return new FaceFinder(ctx, minConfidence, inputWidth, inputHeight, sensorOrientation, hwAcceleration, enhancedHwAcceleration, numThreads);
+	}
+
+	/**
+	 * Create new {@link FaceFinder} instance  with sensible defaults regarding hardware acceleration (CPU, XNNPACK, 4 threads).
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @return FaceFinder instance
+	 * @see #create(Context, float, int, int, int, boolean, boolean, int)
+	 */
+	@SuppressWarnings("unused")
+	public static FaceFinder create(Context ctx, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation) {
+		return create(ctx, minConfidence, inputWidth, inputHeight, sensorOrientation, false, true, 4);
+	}
+
+	/**
+	 * Process a Bitmap using {@link FaceDetector},
+	 * scanning the resulting found faces using {@link FaceScanner} after manually cropping the image.
+	 * Adds extra metadata (location) to {@link FaceScanner.Face} based on best effort basis.
+	 * @param input Bitmap to process.
+	 * @param allowPostprocessing Allow postprocessing to improve detection quality. Undesirable when registering faces.
+	 * @return {@link List} of {@link Pair}s of detection results from {@link FaceDetector} and {@link FaceScanner}
+	 */
+	public List<Pair<FaceDetector.Face, FaceScanner.Face>> process(Bitmap input, boolean allowPostprocessing) {
+		FaceDetector.InputImage inputImage = detectorInputProcessor.process(input);
+
+		final List<FaceDetector.Face> faces = faceDetector.detectFaces(inputImage);
+		final List<Pair<FaceDetector.Face, FaceScanner.Face>> results = new ArrayList<>();
+
+		if (faces != null && faces.size() > 0) {
+			final FaceScanner.InputImageProcessor scannerInputProcessor = new FaceScanner.InputImageProcessor(input, sensorOrientation);
+
+			for (FaceDetector.Face face : faces) {
+				if (face == null) continue;
+
+				FaceScanner.InputImage faceBmp = scannerInputProcessor.process(face.getLocation());
+				if (faceBmp == null) continue;
+
+				final FaceScanner.Face scanned = faceScanner.detectFace(faceBmp, allowPostprocessing);
+				if (scanned == null) continue;
+
+				scanned.addData(face.getId(), face.getLocation());
+
+				results.add(new Pair<>(face, scanned));
+			}
+		}
+
+		return results;
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java
new file mode 100644
index 0000000..8fb0ae9
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java
@@ -0,0 +1,227 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Task-specific API for detecting &amp; recognizing faces in an image.
+ * Uses {@link FaceFinder} to detect and scan faces, {@link FaceStorageBackend} to store and retrieve the saved faces and returns the optimal result.<br>
+ * Refrain from using this class for registering faces into the recognition system, {@link FaceFinder} does not perform post processing and is as such better suited.
+ */
+public class FaceRecognizer {
+	private final FaceStorageBackend storage;
+	private final FaceFinder detector;
+	// Minimum detection confidence to track a detection.
+	private final float maxDistance;
+	// Minimum count of matching detection models.
+	private final int minMatchingModels;
+	// Minimum count of matching detection models. (ratio)
+	private final float minModelRatio;
+
+	private FaceRecognizer(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, int minMatchingModels, float minModelRatio, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		this.storage = storage;
+		this.detector = FaceFinder.create(ctx, minConfidence, inputWidth, inputHeight, sensorOrientation, hwAcceleration, enhancedHwAcceleration, numThreads);
+		this.maxDistance = maxDistance;
+		this.minMatchingModels = minMatchingModels;
+		this.minModelRatio = minModelRatio;
+	}
+
+	/**
+	 * Create {@link FaceRecognizer} instance, with minimum matching model constraint.
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param storage The {@link FaceStorageBackend} containing faces to be recognized.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param minMatchingModels Minimum count of matching models for one face to count as recognized. If undesired, set to 1
+	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+	 * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+	 * @return {@link FaceRecognizer} instance.
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int)
+	 */
+	public static FaceRecognizer create(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, int minMatchingModels, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		return new FaceRecognizer(ctx, storage, minConfidence, inputWidth, inputHeight, sensorOrientation, maxDistance, minMatchingModels, 0, hwAcceleration, enhancedHwAcceleration, numThreads);
+	}
+
+	/**
+	 * Create {@link FaceRecognizer} instance, with matching model ratio constraint.
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param storage The {@link FaceStorageBackend} containing faces to be recognized.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param minModelRatio Minimum count of matching models for one face to count as recognized. Must be higher or equal to 0.0f and smaller or equal to 1.0f. If undesired, set to 0f
+	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+	 * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+	 * @return {@link FaceRecognizer} instance.
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int)
+	 */
+	public static FaceRecognizer create(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, float minModelRatio, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		return new FaceRecognizer(ctx, storage, minConfidence, inputWidth, inputHeight, sensorOrientation, maxDistance, 0, minModelRatio, hwAcceleration, enhancedHwAcceleration, numThreads);
+	}
+
+	/**
+	 * Create {@link FaceRecognizer} instance, with minimum matching model constraint and sensible defaults regarding hardware acceleration (CPU, XNNPACK, 4 threads).
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param storage The {@link FaceStorageBackend} containing faces to be recognized.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param minMatchingModels Minimum count of matching models for one face to count as recognized. If undesired, set to 1
+	 * @return {@link FaceRecognizer} instance.
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float)
+	 */
+	public static FaceRecognizer create(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, int minMatchingModels) {
+		return create(ctx, storage, minConfidence, inputWidth, inputHeight, sensorOrientation, maxDistance, minMatchingModels, false, true, 4);
+	}
+
+	/**
+	 * Create {@link FaceRecognizer} instance, with matching model ratio constraint and sensible defaults regarding hardware acceleration (CPU, XNNPACK, 4 threads).
+	 * @param ctx Android {@link Context} object, may be in background.
+	 * @param storage The {@link FaceStorageBackend} containing faces to be recognized.
+	 * @param minConfidence Minimum confidence to track a detection, must be higher than 0.0f and smaller than 1.0f
+	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
+	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
+	 * @param sensorOrientation rotation if the image should be rotated, or 0.
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param minModelRatio Minimum count of matching models for one face to count as recognized. Must be higher or equal to 0.0f and smaller or equal to 1.0f. If undesired, set to 0f
+	 * @return {@link FaceRecognizer} instance.
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float, boolean, boolean, int)
+	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int)
+	 */
+	@SuppressWarnings("unused")
+	public static FaceRecognizer create(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, float minModelRatio) {
+		return create(ctx, storage, minConfidence, inputWidth, inputHeight, sensorOrientation, maxDistance, minModelRatio, false, true, 4);
+	}
+
+	/** Stores a combination of {@link FaceScanner.Face} and {@link FaceDetector.Face}, for face recognition workloads */
+	public static class Face extends FaceScanner.Face {
+		private final float confidence;
+		private final int modelCount;
+		private final float modelRatio;
+
+		/* package-private */ Face(String id, String title, Float distance, Float confidence, RectF location, Bitmap crop, float[] extra, int modelCount, float modelRatio, float brightnessTest1, float brightnessTest2) {
+			super(id, title, distance, location, crop, extra, brightnessTest1, brightnessTest2);
+			this.confidence = confidence;
+			this.modelRatio = modelRatio;
+			this.modelCount = modelCount;
+		}
+
+		/* package-private */ Face(FaceScanner.Face original, Float confidence, int modelCount, float modelRatio) {
+			this(original.getId(), original.getTitle(), original.getDistance(), confidence, original.getLocation(), original.getCrop(), original.getExtra(), modelCount, modelRatio, original.brightnessTest1, original.brightnessTest2);
+		}
+
+		/* package-private */ Face(FaceDetector.Face raw, FaceScanner.Face original, int modelCount, float modelRatio) {
+			this(original, raw.getConfidence(), modelCount, modelRatio);
+		}
+
+		/**
+		 * A score for how good the detection (NOT recognition, that's {@link #getDistance()}) is relative to others.
+		 * @return Sortable score, higher is better. Min: 0f Max: 1.0f
+		 */
+		public float getDetectionConfidence() {
+			return confidence;
+		}
+
+		/**
+		 * How many models detected the face.
+		 * @return Model count
+		 */
+		public int getModelCount() {
+			return modelCount;
+		}
+
+		/**
+		 * How many models detected the face, ratio. Min: 0f Max: 1f
+		 * @return {@link #getModelCount()} divided through number of available models
+		 */
+		@SuppressWarnings("unused")
+		public float getModelRatio() {
+			return modelRatio;
+		}
+	}
+
+	/**
+	 * Detect faces and scan them
+	 * @param input {@link Bitmap} to process
+	 * @return {@link List} of {@link Face}s
+	 */
+	public List<Face> recognize(Bitmap input) {
+		final Set<String> savedFaces = storage.getNames();
+		final List<Pair<FaceDetector.Face, FaceScanner.Face>> faces = detector.process(input,
+				true /* allow post processing, nobody will (should) use this class for registering faces */);
+		final List<Face> results = new ArrayList<>();
+
+		for (Pair<FaceDetector.Face, FaceScanner.Face> faceFacePair : faces) {
+			FaceDetector.Face found = faceFacePair.first; // The generic Face object indicating where a Face is
+			FaceScanner.Face scanned = faceFacePair.second; // The Face object with face-scanning data
+			// Go through all saved faces and compare them with our scanned face
+			int matchingModelsOut = 0;
+			float modelRatioOut = 0;
+			for (String savedName : savedFaces) {
+				float[][] rawData = storage.get(savedName);
+				int matchingModels = 0;
+				float finalDistance = Float.MAX_VALUE;
+				// Go through all saved models for one face
+				for (float[] data : rawData) {
+					float newDistance = scanned.compare(data);
+					// If the similarity is really low (not the same face), don't save it
+					if (newDistance < maxDistance) {
+						matchingModels++;
+						if (finalDistance > newDistance)
+							finalDistance = newDistance;
+					}
+				}
+				float modelRatio = (float)matchingModels / rawData.length;
+				// If another known face had better similarity, don't save it
+				if (minModelRatio > 0 ? minModelRatio < modelRatio :
+						matchingModels >= Math.min(rawData.length, minMatchingModels) && finalDistance < scanned.getDistance()) {
+					// We have a match! Save "Face identifier" and "Distance to original values"
+					scanned.addRecognitionData(savedName, finalDistance);
+					matchingModelsOut = matchingModels;
+					modelRatioOut = modelRatio;
+				}
+			}
+
+			results.add(new Face(found, scanned, matchingModelsOut, modelRatioOut));
+		}
+		return results;
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java
new file mode 100644
index 0000000..77002c4
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java
@@ -0,0 +1,491 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.ColorMatrix;
+import android.graphics.ColorMatrixColorFilter;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.RectF;
+import android.util.Log;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Raw wrapper around AI model that scans ONE Face inside a perfectly cropped Bitmap and returns facial features.
+ * Most likely, specialized classes like {@link FaceRecognizer} or {@link FaceFinder}
+ * fit your use case better.
+ */
+public class FaceScanner {
+	// Asset manager to load TFLite model
+	private final AssetManager am;
+	// TFLite Model API
+	private SimilarityClassifier classifier;
+	// Optional settings
+	private final boolean hwAcceleration, enhancedHwAcceleration;
+	private final int numThreads;
+	// MobileFaceNet model parameters
+	private static final int TF_OD_API_INPUT_SIZE = 112;
+	private static final boolean TF_OD_API_IS_QUANTIZED = false;
+	private static final String TF_OD_API_MODEL_FILE = "mobile_face_net.tflite";
+	private static final String TF_OD_API_LABELS_FILE = "mobile_face_net.txt";
+	// Maintain aspect ratio or squish image?
+	private static final boolean MAINTAIN_ASPECT = false;
+	// Brightness data
+	private final float[][] brightnessTest;
+
+	/**
+	 * Wrapper around Bitmap to avoid user passing unprocessed data
+	 * @see InputImageProcessor
+	 */
+	public static class InputImage {
+		private final Bitmap processedImage;
+		private final Bitmap userDisplayableImage;
+
+		/* package-private */ InputImage(Bitmap processedImage, Bitmap userDisplayableImage) {
+			this.processedImage = processedImage;
+			this.userDisplayableImage = userDisplayableImage;
+		}
+
+		/* package-private */ Bitmap getProcessedImage() {
+			return processedImage;
+		}
+
+		/* package-private */ Bitmap getUserDisplayableImage() {
+			return userDisplayableImage;
+		}
+	}
+
+	/**
+	 * Processes Bitmaps to compatible format.
+	 * This class supports 2 modes of operation:<br>
+	 * 1. Preprocess perfectly cropped {@link Bitmap} to AI-compatible format, using the static method {@link #process(Bitmap, int)}<br>
+	 * 2. Crop one large {@link Bitmap} to multiple {@link InputImage}s using bounds inside {@link RectF} objects,
+	 *    with {@link #InputImageProcessor(Bitmap, int)} and {@link #process(RectF)}.
+	 *    This allows processing multiple faces on one {@link Bitmap}, for usage with {@link FaceDetector} and similar classes.
+	 * @see InputImage
+	 */
+	public static class InputImageProcessor {
+		private final Bitmap portraitBmp;
+		private final Matrix transform;
+		private final int sensorOrientation;
+
+		/**
+		 * If the class gets instantiated, we enter a special mode of operation for detecting multiple faces on one large {@link Bitmap}.
+		 * @param rawImage The image with all faces to be detected
+		 * @param sensorOrientation rotation if the image should be rotated, or 0.
+		 */
+		public InputImageProcessor(Bitmap rawImage, int sensorOrientation) {
+			this.sensorOrientation = sensorOrientation;
+			Bitmap portraitBmp = Bitmap.createBitmap(
+					(sensorOrientation % 180) == 90 ? rawImage.getHeight() : rawImage.getWidth(),
+					(sensorOrientation % 180) == 90 ? rawImage.getWidth() : rawImage.getHeight(), Bitmap.Config.ARGB_8888);
+			transform = ImageUtils.getTransformationMatrix(
+					rawImage.getWidth(),
+					rawImage.getHeight(),
+					rawImage.getWidth(),
+					rawImage.getHeight(),
+					0,
+					MAINTAIN_ASPECT);
+			if (sensorOrientation != 0) {
+				Matrix myRotationMatrix =
+						ImageUtils.getTransformationMatrix(
+								rawImage.getWidth(), rawImage.getHeight(),
+								sensorOrientation % 180 != 0 ? rawImage.getHeight() : rawImage.getWidth(),
+								sensorOrientation % 180 != 0 ? rawImage.getWidth() : rawImage.getHeight(),
+								sensorOrientation % 360, false);
+				transform.setConcat(myRotationMatrix, transform);
+			}
+			final Canvas cv = new Canvas(portraitBmp);
+			cv.drawBitmap(rawImage, transform, null);
+			this.portraitBmp = portraitBmp;
+		}
+
+		/**
+		 * In normal mode of operation, we take a perfectly cropped {@link Bitmap} containing one face and process it.
+		 * @param input Bitmap to process.
+		 * @param sensorOrientation rotation if the image should be rotated, or 0.
+		 * @return Converted {@link InputImage}
+		 */
+		public static InputImage process(Bitmap input, int sensorOrientation) {
+			Matrix frameToCropTransform =
+					ImageUtils.getTransformationMatrix(
+							sensorOrientation % 180 != 0 ? input.getHeight() : input.getWidth(),
+							sensorOrientation % 180 != 0 ? input.getWidth() : input.getHeight(),
+							TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE,
+							0, MAINTAIN_ASPECT);
+			if (sensorOrientation != 0) {
+				Matrix myRotationMatrix =
+						ImageUtils.getTransformationMatrix(
+								input.getWidth(), input.getHeight(),
+								sensorOrientation % 180 != 0 ? input.getHeight() : input.getWidth(),
+								sensorOrientation % 180 != 0 ? input.getWidth() : input.getHeight(),
+								sensorOrientation % 360, false);
+				frameToCropTransform.setConcat(frameToCropTransform, myRotationMatrix);
+			}
+			Bitmap croppedBitmap = Bitmap.createBitmap(TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE, Bitmap.Config.ARGB_8888);
+			final Canvas canvas = new Canvas(croppedBitmap);
+			canvas.drawBitmap(input, frameToCropTransform, null);
+			return new InputImage(croppedBitmap, input);
+		}
+
+		/**
+		 * In normal mode of operation, we take a perfectly cropped {@link Bitmap} containing one face and process it.
+		 * This utility method uses sensorOrientation that was passed in the constructor and calls {@link #process(Bitmap, int)}
+		 * @param input Bitmap to process.
+		 * @return Converted {@link InputImage}
+		 * @see #process(Bitmap, int)
+		 */
+		public InputImage process(Bitmap input) {
+			return process(input, sensorOrientation);
+		}
+
+		/**
+		 * In special mode of operation, we crop the image to detect multiple faces on one large {@link Bitmap} (in multiple passes).
+		 * @param inputBB {@link RectF} containing location of face cropped out next
+		 * @return Converted {@link InputImage}
+		 */
+		public InputImage process(RectF inputBB) {
+			RectF faceBB = new RectF(inputBB);
+			transform.mapRect(faceBB);
+			if (faceBB.left < 0 || faceBB.top < 0 || faceBB.bottom < 0 ||
+					faceBB.right < 0 || (faceBB.left + faceBB.width()) > portraitBmp.getWidth()
+					|| (faceBB.top + faceBB.height()) > portraitBmp.getHeight()) return null;
+			return process(Bitmap.createBitmap(portraitBmp,
+					(int) faceBB.left,
+					(int) faceBB.top,
+					(int) faceBB.width(),
+					(int) faceBB.height()), 0);
+		}
+	}
+
+	private float[] brightnessTest(int color) {
+		Bitmap b = Bitmap.createBitmap(FaceScanner.TF_OD_API_INPUT_SIZE, FaceScanner.TF_OD_API_INPUT_SIZE, Bitmap.Config.ARGB_8888);
+		Canvas c = new Canvas(b);
+		c.drawColor(color);
+		List<SimilarityClassifier.Recognition> results;
+		try {
+			results = getClassifier().recognizeImage(b);
+		} catch (IOException e) {
+			throw new RuntimeException(e);
+		}
+		return results.get(0).getExtra()[0];
+	}
+
+
+	/** An immutable result returned by a FaceDetector describing what was recognized. */
+	public static class Face {
+		// A unique identifier for what has been recognized. Specific to the class, not the instance of
+		// the object.
+		private String id;
+
+		private String title;
+
+		private Float distance;
+
+		private RectF location;
+
+		private final Bitmap crop;
+
+		private final float[] extra;
+
+		/* package-private */ final float brightnessTest1, brightnessTest2;
+
+		/* package-private */ Face(
+				final String id, final String title, final Float distance, final RectF location, final Bitmap crop, final float[] extra, final float brightnessTest1, final float brightnessTest2) {
+			this.id = id;
+			this.title = title;
+			this.distance = distance;
+			this.location = location;
+			this.crop = crop;
+			this.extra = extra;
+			this.brightnessTest1 = brightnessTest1;
+			this.brightnessTest2 = brightnessTest2;
+		}
+
+		/* package-private */ String getId() {
+			return id;
+		}
+
+		/**
+		 * Display name for the recognition.
+		 * @return Title as {@link String}
+		 */
+		public String getTitle() {
+			return title;
+		}
+
+		/**
+		 * A score for how good the recognition is relative to others.
+		 * Do not confuse with 3D distance, this is entirely about recognition.
+		 * @return Sortable score. Lower is better.
+		 */
+		public Float getDistance() {
+			return distance;
+		}
+
+		/**
+		 * Optional location within the source image for the location of the recognized object.
+		 * @return {@link RectF} containing location on input image
+		 */
+		public RectF getLocation() {
+			return new RectF(location);
+		}
+
+		/**
+		 * Optional, source bitmap
+		 * @return User-displayable {@link Bitmap} containing the cropped face
+		 */
+		public Bitmap getCrop() {
+			if (crop == null) return null;
+			return Bitmap.createBitmap(crop);
+		}
+
+		/**
+		 * Optional, raw AI output
+		 * @return Facial features encoded in float[]
+		 */
+		public float[] getExtra() {
+			return extra;
+		}
+
+		// add metadata from FaceDetector
+		/* package-private */ void addData(String id, RectF location) {
+			this.id = id;
+			this.location = location;
+		}
+
+		/**
+		 * Add metadata obtainable after face recognition.
+		 * @param title The new title (name) to store.
+		 * @param distance The new distance to store.
+		 */
+		public void addRecognitionData(String title, float distance) {
+			this.title = title;
+			this.distance = distance;
+		}
+
+		/**
+		 * Test if the face has already been recognized (if {@link #addRecognitionData(String, float)} has been called)
+		 * @return equivalent of {@code getDistance() < Float.MAX_VALUE}
+		 */
+		public boolean isRecognized() {
+			return getDistance() < Float.MAX_VALUE;
+		}
+
+		@Override
+		public String toString() {
+			String resultString = "";
+			if (id != null) {
+				resultString += "[" + id + "] ";
+			}
+
+			if (title != null) {
+				resultString += title + " ";
+			}
+
+			if (distance != null) {
+				resultString += String.format(Locale.US, "(%.1f%%) ", distance * 100.0f);
+			}
+
+			if (location != null) {
+				resultString += location + " ";
+			}
+
+			return resultString.trim();
+		}
+
+		/**
+		 * Get information about image brightness/face light conditions
+		 * @return negative if bad, 0 if neutral, positive if good
+		 */
+		public int getBrightnessHint() {
+			return (brightnessTest1 < 0.5f || brightnessTest2 < 0.4f) ? -1 : // really bad light
+					(brightnessTest1 + brightnessTest2 < 2.2f ? 0 // suboptimal
+							: 1); // optimal
+		}
+
+		/**
+		 * Static method to compare two {@link Face}s.
+		 * Usually, one of the instance methods is used though.
+		 * @param me The {@link #getExtra() extra} from one face.
+		 * @param other The {@link #getExtra() extra} from the other face.
+		 * @return The {@link #getDistance() distance}, lower is better.
+		 * @see #compare(Face)
+		 * @see #compare(float[])
+		 */
+		public static float compare(float[] me, float[] other) {
+			final float[] emb = normalizeFloat(me);
+			final float[] knownEmb = normalizeFloat(other);
+			float distance = 0;
+			for (int i = 0; i < emb.length; i++) {
+				float diff = emb[i] - knownEmb[i];
+				distance += diff*diff;
+			}
+			return (float) Math.sqrt(distance);
+		}
+
+		/**
+		 * Compare two {@link Face}s
+		 * @param other The {@link #getExtra() extra} from the other face.
+		 * @return The {@link #getDistance() distance}, lower is better.
+		 * @see #compare(Face)
+		 * @see #compare(float[], float[])
+		 */
+		public float compare(float[] other) {
+			return compare(getExtra(), other);
+		}
+
+		/**
+		 * Compare two {@link Face}s
+		 * @param other The other face.
+		 * @return The {@link #getDistance() distance}, lower is better.
+		 * @see #compare(float[])
+		 * @see #compare(float[], float[])
+		 */
+		@SuppressWarnings("unused")
+		public float compare(Face other) {
+			return compare(other.getExtra());
+		}
+
+		private static float sumSquares(float[] data) {
+			float ans = 0.0f;
+			for (float datum : data) {
+				ans += datum * datum;
+			}
+			return (ans);
+		}
+
+		private static float[] normalizeFloat(float[] emb) {
+			float [] norm_out = new float[512];
+			double norm  = Math.sqrt(sumSquares(emb));
+			for (int i=0;i< emb.length;i++){
+				norm_out[i] = (float)(emb[i]/norm);
+			}
+			return norm_out;
+		}
+	}
+
+	/**
+	 * Create {@link FaceScanner} instance.
+	 * @param context Android {@link Context} object, may be in background.
+	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+	 * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+	 * @return {@link FaceScanner} instance.
+	 * @see #create(Context)
+	 */
+	public static FaceScanner create(Context context, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		AssetManager assetmanager = null;
+		if (context != null)
+			assetmanager = context.getAssets();
+		return new FaceScanner(assetmanager, hwAcceleration, enhancedHwAcceleration, numThreads);
+	}
+
+	/**
+	 * Create {@link FaceScanner} instance with sensible defaults regarding hardware acceleration (CPU, XNNPACK, 4 threads).
+	 * @param context Android {@link Context} object, may be in background.
+	 * @return {@link FaceScanner} instance.
+	 * @see #create(Context, boolean, boolean, int)
+	 */
+	@SuppressWarnings("unused")
+	public static FaceScanner create(Context context) {
+		return create(context, false, true, 4);
+	}
+
+	private FaceScanner(AssetManager am, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
+		this.am = am;
+		this.hwAcceleration = hwAcceleration;
+		this.enhancedHwAcceleration = enhancedHwAcceleration;
+		this.numThreads = numThreads;
+		this.brightnessTest = new float[][] { brightnessTest(Color.WHITE),brightnessTest(Color.BLACK) };
+	}
+
+	private SimilarityClassifier getClassifier() throws IOException {
+		if (classifier == null) {
+			classifier = SimilarityClassifier.create(am,
+					TF_OD_API_MODEL_FILE,
+					TF_OD_API_LABELS_FILE,
+					TF_OD_API_INPUT_SIZE,
+					TF_OD_API_IS_QUANTIZED,
+					hwAcceleration,
+					enhancedHwAcceleration,
+					numThreads
+			);
+		}
+		return classifier;
+	}
+
+	/**
+	 * Scan the face inside the {@link InputImage}.
+	 * @param input The {@link InputImage} to process
+	 * @param allowPostprocessing Allow postprocessing to improve detection quality. Undesirable when registering faces.
+	 * @return {@link Face}
+	 */
+	public Face detectFace(InputImage input, boolean allowPostprocessing) {
+		try {
+			List<SimilarityClassifier.Recognition> results = getClassifier().recognizeImage(input.getProcessedImage());
+			SimilarityClassifier.Recognition result = results.get(0);
+			float[] e = result.getExtra()[0];
+			Face f = new Face(result.getId(), result.getTitle(), result.getDistance(), null, input.getUserDisplayableImage(), e, Face.compare(e, brightnessTest[0]), Face.compare(e, brightnessTest[1]));
+			if (f.getBrightnessHint() == 0 && allowPostprocessing /* try to improve light situation with postprocessing if its bad but not terrible */) {
+				Face f2 = detectFace(new InputImage(doBrightnessPostProc(input.getProcessedImage()), doBrightnessPostProc(input.getUserDisplayableImage())), false);
+				if (f2 == null) // Earlier logs will have printed the cause.
+					return null;
+				if (f2.getBrightnessHint() == 1)
+					return f2; // Return if it helped.
+			}
+			return f;
+		} catch (IOException e) {
+			Log.e("FaceScanner", Log.getStackTraceString(e));
+			return null;
+		}
+	}
+
+	private Bitmap doBrightnessPostProc(Bitmap input) {
+		// 30, which has been obtained using manual testing, gives the best balance between brightness and trashing facial features
+		return changeBitmapContrastBrightness(input, 30f);
+	}
+
+	// https://stackoverflow.com/a/17887577
+	private static Bitmap changeBitmapContrastBrightness(Bitmap bmp, float brightness) {
+		ColorMatrix cm = new ColorMatrix(new float[]
+				{
+						1, 0, 0, 0, brightness,
+						0, 1, 0, 0, brightness,
+						0, 0, 1, 0, brightness,
+						0, 0, 0, 1, 0
+				});
+
+		Bitmap ret = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), bmp.getConfig());
+
+		Canvas canvas = new Canvas(ret);
+
+		Paint paint = new Paint();
+		paint.setColorFilter(new ColorMatrixColorFilter(cm));
+		canvas.drawBitmap(bmp, 0, 0, paint);
+
+		return ret;
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceStorageBackend.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceStorageBackend.java
new file mode 100644
index 0000000..e5a1ca0
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceStorageBackend.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * Store Faces on disk (or in memory, or anywhere else, really).
+ * This abstract class already performs error checking, caching and data type conversion for both users and implementations.
+ * Creating a new implementation only requires any key-value store that can store Base64-encoded strings.
+ * An implementation is required to use this class.
+ * @see VolatileFaceStorageBackend
+ * @see SharedPreferencesFaceStorageBackend
+ */
+public abstract class FaceStorageBackend {
+	private static final Base64.Encoder encoder = Base64.getUrlEncoder();
+	private static final Base64.Decoder decoder = Base64.getUrlDecoder();
+
+	/* package-private */ Set<String> cachedNames = null;
+	/* package-private */ HashMap<String, float[][]> cachedData = null;
+
+	public FaceStorageBackend() {
+		flushCache();
+	}
+
+	/**
+	 * Get all known faces
+	 * @return {@link Set} of all known faces (names only)
+	 */
+	public Set<String> getNames() {
+		Set<String> result = getNamesCached();
+		if (result != null) return result;
+		return (cachedNames = getNamesInternal().stream().map(v -> new String(decoder.decode(v), StandardCharsets.UTF_8)).collect(Collectors.toSet()));
+	}
+
+	/**
+	 * Register/store new face.
+	 * @param rawname Name of the face, needs to be unique.
+	 * @param alldata Face detection model data to store.
+	 * @param replace Allow replacing an already registered face (based on name). If false and it's still attempted, the method returns false and does nothing.
+	 * @return If registering was successful.
+	 * @see #register(String, float[][])
+	 * @see #register(String, float[])
+	 */
+	public boolean register(String rawname, float[][] alldata, boolean replace) {
+		String name = encoder.encodeToString(rawname.getBytes(StandardCharsets.UTF_8));
+		boolean duplicate = getNamesInternal().contains(name);
+		if (duplicate && !replace) {
+			return false;
+		}
+		if (cachedNames != null) {
+			cachedNames.add(rawname);
+			cachedData.put(rawname, alldata);
+		} else {
+			flushCache();
+		}
+		return registerInternal(name, FaceDataEncoder.encode(alldata), duplicate);
+	}
+
+	/**
+	 * Register/store new face. Calls {@link #register(String, float[][], boolean)} and does not allow replacements.
+	 * @param rawname Name of the face, needs to be unique.
+	 * @param alldata Face detection model data to store.
+	 * @return If registering was successful.
+	 * @see #register(String, float[][], boolean)
+	 * @see #register(String, float[])
+	 */
+	public boolean register(String rawname, float[][] alldata) {
+		return register(rawname, alldata, false);
+	}
+
+	/**
+	 * Store 1D face model by converting it to 2D and then calling {@link #register(String, float[][])}.<br>
+	 * Implementation looks like this: <code>return register(rawname, new float[][] { alldata })</code>).<br>
+	 * @param rawname Name of the face, needs to be unique.
+	 * @param alldata 1D face detection model data to store.
+	 * @return If registering was successful.
+	 * @see #register(String, float[][], boolean)
+	 * @see #register(String, float[][])
+	 */
+	public boolean register(String rawname, float[] alldata) {
+		return register(rawname, new float[][] { alldata });
+	}
+
+	/**
+	 * Adds 1D face model to existing 2D face model to improve accuracy.
+	 * @param rawname Name of the face, needs to be unique.
+	 * @param alldata 1D face detection model data to store
+	 * @param add If the face doesn't already exist, can we create it?
+	 * @return If registering was successful.
+	 */
+	public boolean extendRegistered(String rawname, float[] alldata, boolean add) {
+		if (!getNames().contains(rawname)) {
+			if (!add)
+				return false;
+			return register(rawname, alldata);
+		}
+		float[][] array1 = get(rawname);
+		float[][] combinedArray = new float[array1.length + 1][];
+		System.arraycopy(array1, 0, combinedArray, 0, array1.length);
+		combinedArray[array1.length] = alldata;
+		return register(rawname, combinedArray, true);
+	}
+
+	/**
+	 * Load 2D face model from storage.
+	 * @param name The name of the face to load.
+	 * @return The face model.
+	 */
+	public float[][] get(String name) {
+		float[][] f = getCached(name);
+		if (f != null) return f;
+		f = FaceDataEncoder.decode(getInternal(encoder.encodeToString(name.getBytes(StandardCharsets.UTF_8))));
+		cachedData.put(name, f);
+		return f;
+	}
+
+	/**
+	 * Delete all references to a face.
+	 * @param name The face to delete.
+	 * @return If deletion was successful.
+	 */
+	@SuppressWarnings("unused")
+	public boolean delete(String name) {
+		cachedNames.remove(name);
+		cachedData.remove(name);
+		return deleteInternal(encoder.encodeToString(name.getBytes(StandardCharsets.UTF_8)));
+	}
+
+	/**
+	 * Get all known faces
+	 * @return {@link Set} of all known faces (names only)
+	 */
+	protected abstract Set<String> getNamesInternal();
+	/**
+	 * Register/store new face.
+	 * @param name Name of the face, needs to be unique.
+	 * @param data Face detection model data to store.
+	 * @param duplicate Only true if we are adding a duplicate and want to replace the saved one.
+	 * @return If registering was successful.
+	 */
+	protected abstract boolean registerInternal(String name, String data, boolean duplicate);
+	/**
+	 * Load 2D face model from storage.
+	 * @param name The name of the face to load.
+	 * @return The face model.
+	 */
+	protected abstract String getInternal(String name);
+	/**
+	 * Delete all references to a face.
+	 * @param name The face to delete.
+	 * @return If deletion was successful.
+	 */
+	protected abstract boolean deleteInternal(String name);
+
+	/* package-private */ Set<String> getNamesCached() {
+		return cachedNames;
+	}
+	/* package-private */ float[][] getCached(String name) {
+		return cachedData.get(name);
+	}
+	private void flushCache() {
+		cachedNames = null;
+		cachedData = new HashMap<>();
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java
new file mode 100644
index 0000000..526d058
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/ImageUtils.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.graphics.Matrix;
+
+public class ImageUtils {
+  // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
+  // are normalized to eight bits.
+  static final int kMaxChannelValue = 262143;
+
+  private ImageUtils() {}
+
+  /**
+   * Returns a transformation matrix from one reference frame into another.
+   * Handles cropping (if maintaining aspect ratio is desired) and rotation.
+   *
+   * @param srcWidth Width of source frame.
+   * @param srcHeight Height of source frame.
+   * @param dstWidth Width of destination frame.
+   * @param dstHeight Height of destination frame.
+   * @param applyRotation Amount of rotation to apply from one frame to another.
+   *  Must be a multiple of 90.
+   * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
+   * cropping the image if necessary.
+   * @return The transformation fulfilling the desired requirements.
+   */
+  public static Matrix getTransformationMatrix(
+      final int srcWidth,
+      final int srcHeight,
+      final int dstWidth,
+      final int dstHeight,
+      final int applyRotation,
+      final boolean maintainAspectRatio) {
+    final Matrix matrix = new Matrix();
+
+    if (applyRotation != 0) {
+      // Translate so center of image is at origin.
+      matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
+
+      // Rotate around origin.
+      matrix.postRotate(applyRotation);
+    }
+
+    // Account for the already applied rotation, if any, and then determine how
+    // much scaling is needed for each axis.
+    final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
+
+    final int inWidth = transpose ? srcHeight : srcWidth;
+    final int inHeight = transpose ? srcWidth : srcHeight;
+
+    // Apply scaling if necessary.
+    if (inWidth != dstWidth || inHeight != dstHeight) {
+      final float scaleFactorX = dstWidth / (float) inWidth;
+      final float scaleFactorY = dstHeight / (float) inHeight;
+
+      if (maintainAspectRatio) {
+        // Scale by minimum factor so that dst is filled completely while
+        // maintaining the aspect ratio. Some image may fall off the edge.
+        final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
+        matrix.postScale(scaleFactor, scaleFactor);
+      } else {
+        // Scale exactly to fill dst from src.
+        matrix.postScale(scaleFactorX, scaleFactorY);
+      }
+    }
+
+    if (applyRotation != 0) {
+      // Translate back from origin centered reference to destination frame.
+      matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
+    }
+
+    return matrix;
+  }
+
+  private static int YUV2RGB(int y, int u, int v) {
+    // Adjust and check YUV values
+    y = Math.max((y - 16), 0);
+    u -= 128;
+    v -= 128;
+
+    // This is the floating point equivalent. We do the conversion in integer
+    // because some Android devices do not have floating point in hardware.
+    // nR = (int)(1.164 * nY + 2.018 * nU);
+    // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
+    // nB = (int)(1.164 * nY + 1.596 * nV);
+    int y1192 = 1192 * y;
+    int r = (y1192 + 1634 * v);
+    int g = (y1192 - 833 * v - 400 * u);
+    int b = (y1192 + 2066 * u);
+
+    // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
+    r = r > kMaxChannelValue ? kMaxChannelValue : (Math.max(r, 0));
+    g = g > kMaxChannelValue ? kMaxChannelValue : (Math.max(g, 0));
+    b = b > kMaxChannelValue ? kMaxChannelValue : (Math.max(b, 0));
+
+    return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
+  }
+
+  public static void convertYUV420ToARGB8888(
+          byte[] yData,
+          byte[] uData,
+          byte[] vData,
+          int width,
+          int height,
+          int yRowStride,
+          int uvRowStride,
+          int uvPixelStride,
+          int[] out) {
+    int yp = 0;
+    for (int j = 0; j < height; j++) {
+      int pY = yRowStride * j;
+      int pUV = uvRowStride * (j >> 1);
+
+      for (int i = 0; i < width; i++) {
+        int uv_offset = pUV + (i >> 1) * uvPixelStride;
+
+        out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
+      }
+    }
+  }
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/SharedPreferencesFaceStorageBackend.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/SharedPreferencesFaceStorageBackend.java
new file mode 100644
index 0000000..df89724
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/SharedPreferencesFaceStorageBackend.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.SharedPreferences;
+
+import java.util.Set;
+
+/**
+ * {@link FaceStorageBackend} storing data in {@link SharedPreferences}
+ */
+public class SharedPreferencesFaceStorageBackend extends FaceStorageBackend {
+	private final SharedPreferences prefs;
+
+	/**
+	 * Create/load {@link SharedPreferencesFaceStorageBackend}
+	 * @param prefs {@link SharedPreferences} to use
+	 */
+	public SharedPreferencesFaceStorageBackend(SharedPreferences prefs) {
+		this.prefs = prefs;
+	}
+
+	@Override
+	protected Set<String> getNamesInternal() {
+		return prefs.getAll().keySet();
+	}
+
+	@Override
+	protected boolean registerInternal(String name, String data, boolean replace) {
+		return prefs.edit().putString(name, data).commit();
+	}
+
+	@Override
+	protected String getInternal(String name) {
+		return prefs.getString(name, null);
+	}
+
+	@Override
+	protected boolean deleteInternal(String name) {
+		return prefs.edit().remove(name).commit();
+	}
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/SimilarityClassifier.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/SimilarityClassifier.java
new file mode 100644
index 0000000..c13513d
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/SimilarityClassifier.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2019 The TensorFlow Authors
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+
+/** Generic interface for interacting with different recognition engines. */
+/* package-private */ abstract class SimilarityClassifier {
+
+  /* package-private */ static SimilarityClassifier create(
+          final AssetManager assetManager,
+          final String modelFilename,
+          final String labelFilename,
+          final int inputSize,
+          final boolean isQuantized,
+          final boolean hwAcceleration,
+          final boolean useEnhancedAcceleration, // if hwAcceleration==true, setting this uses NNAPI instead of GPU. if false, it toggles XNNPACK
+          final int numThreads) throws IOException {
+    return TFLiteObjectDetectionAPIModel.create(assetManager, modelFilename, labelFilename, inputSize, isQuantized, hwAcceleration, useEnhancedAcceleration, numThreads);
+  }
+
+  /* package-private */ abstract List<Recognition> recognizeImage(Bitmap bitmap);
+
+  /** An immutable result returned by a Classifier describing what was recognized. */
+  /* package-private */ static class Recognition {
+    /**
+     * A unique identifier for what has been recognized. Specific to the class, not the instance of
+     * the object.
+     */
+    private final String id;
+
+    /** Display name for the recognition. */
+    private final String title;
+
+    /**
+     * A sortable score for how good the recognition is relative to others. Lower should be better.
+     */
+    private final Float distance;
+    private float[][] extra;
+
+    /** Optional location within the source image for the location of the recognized object. */
+    private final RectF location;
+
+    /* package-private */ Recognition(
+            final String id, final String title, final Float distance, final RectF location) {
+      this.id = id;
+      this.title = title;
+      this.distance = distance;
+      this.location = location;
+      this.extra = null;
+    }
+
+    public void setExtra(float[][] extra) {
+        this.extra = extra;
+    }
+    public float[][] getExtra() {
+        return this.extra;
+    }
+
+    public String getId() {
+      return id;
+    }
+
+    public String getTitle() {
+      return title;
+    }
+
+    public Float getDistance() {
+      return distance;
+    }
+
+    public RectF getLocation() {
+      return new RectF(location);
+    }
+
+    @Override
+    public String toString() {
+      String resultString = "";
+      if (id != null) {
+        resultString += "[" + id + "] ";
+      }
+
+      if (title != null) {
+        resultString += title + " ";
+      }
+
+      if (distance != null) {
+        resultString += String.format(Locale.US, "(%.1f%%) ", distance * 100.0f);
+      }
+
+      if (location != null) {
+        resultString += location + " ";
+      }
+
+      return resultString.trim();
+    }
+  }
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/TFLiteObjectDetectionAPIModel.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/TFLiteObjectDetectionAPIModel.java
new file mode 100644
index 0000000..c95c2e7
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/TFLiteObjectDetectionAPIModel.java
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2019 The TensorFlow Authors
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import android.content.res.AssetFileDescriptor;
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+import android.os.Trace;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Vector;
+
+import org.tensorflow.lite.Interpreter;
+import org.tensorflow.lite.nnapi.NnApiDelegate;
+
+import com.libremobileos.yifan.util.GpuDelegateFactory;
+
+/**
+ * Wrapper for frozen detection models trained using the Tensorflow Object Detection API
+ */
+/* package-private */ class TFLiteObjectDetectionAPIModel
+        extends SimilarityClassifier {
+
+  private static final int OUTPUT_SIZE = 512;
+  //private static final int OUTPUT_SIZE = 192;
+
+  // Only return this many results.
+  private static final int NUM_DETECTIONS = 10;
+
+  // Float model
+  private static final float IMAGE_MEAN = 127.5f;
+  private static final float IMAGE_STD = 127.5f;
+  private static final String SYSTEM_MODEL_DIR = "/system/etc/face";
+
+  private boolean isModelQuantized;
+  // Config values.
+  private int inputSize;
+  // Pre-allocated buffers.
+  private final Vector<String> labels = new Vector<>();
+  private int[] intValues;
+  // outputLocations: array of shape [Batch-size, NUM_DETECTIONS,4]
+  // contains the location of detected boxes
+  private float[][][] outputLocations;
+  // outputClasses: array of shape [Batch-size, NUM_DETECTIONS]
+  // contains the classes of detected boxes
+  private float[][] outputClasses;
+  // outputScores: array of shape [Batch-size, NUM_DETECTIONS]
+  // contains the scores of detected boxes
+  private float[][] outputScores;
+  // numDetections: array of shape [Batch-size]
+  // contains the number of detected boxes
+  private float[] numDetections;
+
+  private float[][] embeddings;
+
+  private ByteBuffer imgData;
+
+  private Interpreter tfLite;
+
+  private TFLiteObjectDetectionAPIModel() {}
+
+  /** Memory-map the model file in Assets. */
+  private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename) throws IOException {
+    FileChannel fileChannel;
+    long startOffset;
+    long declaredLength;
+    try {
+      AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
+      FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
+      fileChannel = inputStream.getChannel();
+      startOffset = fileDescriptor.getStartOffset();
+      declaredLength = fileDescriptor.getDeclaredLength();
+    } catch (Exception e) {
+      File f = new File(SYSTEM_MODEL_DIR, modelFilename);
+      if (f.exists() && f.canRead()) {
+        FileInputStream inputStream = new FileInputStream(f);
+        fileChannel = inputStream.getChannel();
+        startOffset = 0;
+        declaredLength = f.length();
+      } else {
+        throw new IOException(modelFilename + " not found in assets or " + SYSTEM_MODEL_DIR);
+      }
+    }
+    return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
+  }
+
+  /**
+   * Initializes a native TensorFlow session for classifying images.
+   *
+   * @param assetManager The asset manager to be used to load assets.
+   * @param modelFilename The filepath of the model GraphDef protocol buffer.
+   * @param labelFilename The filepath of label file for classes.
+   * @param inputSize The size of image input
+   * @param isQuantized Boolean representing model is quantized or not
+   * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
+   * @param useEnhancedAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
+   * @param numThreads How many threads to use, if running on CPU or with XNNPACK
+   */
+  public static SimilarityClassifier create(
+      final AssetManager assetManager,
+      final String modelFilename,
+      final String labelFilename,
+      final int inputSize,
+      final boolean isQuantized,
+      final boolean hwAcceleration,
+      final boolean useEnhancedAcceleration,
+      final int numThreads)
+      throws IOException {
+
+    final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel();
+
+    InputStream labelsInput;
+    try {
+      labelsInput = assetManager.open(labelFilename);
+    } catch (Exception e) {
+      File f = new File(SYSTEM_MODEL_DIR, labelFilename);
+      if (f.exists() && f.canRead()) {
+        labelsInput = new FileInputStream(f);
+      } else {
+        throw new IOException(labelFilename + " not found in assets or " + SYSTEM_MODEL_DIR);
+      }
+    }
+    BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
+    String line;
+    while ((line = br.readLine()) != null) {
+      d.labels.add(line);
+    }
+    br.close();
+
+    d.inputSize = inputSize;
+
+    Interpreter.Options options = new Interpreter.Options();
+    options.setNumThreads(numThreads);
+    options.setUseXNNPACK(hwAcceleration || useEnhancedAcceleration);
+    if (hwAcceleration) {
+      if (useEnhancedAcceleration) {
+        options.addDelegate(new NnApiDelegate());
+      } else {
+        options.addDelegate(GpuDelegateFactory.get());
+      }
+    }
+
+    try {
+      d.tfLite = new Interpreter(loadModelFile(assetManager, modelFilename), options);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+
+    d.isModelQuantized = isQuantized;
+    // Pre-allocate buffers.
+    int numBytesPerChannel;
+    if (isQuantized) {
+      numBytesPerChannel = 1; // Quantized
+    } else {
+      numBytesPerChannel = 4; // Floating point
+    }
+    d.imgData = ByteBuffer.allocateDirect(d.inputSize * d.inputSize * 3 * numBytesPerChannel);
+    d.imgData.order(ByteOrder.nativeOrder());
+    d.intValues = new int[d.inputSize * d.inputSize];
+
+    d.outputLocations = new float[1][NUM_DETECTIONS][4];
+    d.outputClasses = new float[1][NUM_DETECTIONS];
+    d.outputScores = new float[1][NUM_DETECTIONS];
+    d.numDetections = new float[1];
+    return d;
+  }
+
+  @Override
+  public List<Recognition> recognizeImage(final Bitmap bitmap) {
+    // Log this method so that it can be analyzed with systrace.
+    Trace.beginSection("recognizeImage");
+
+    Trace.beginSection("preprocessBitmap");
+    // Preprocess the image data from 0-255 int to normalized float based
+    // on the provided parameters.
+    bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
+
+    imgData.rewind();
+    for (int i = 0; i < inputSize; i++) {
+      for (int j = 0; j < inputSize; j++) {
+        int pixelValue = intValues[i * inputSize + j];
+        if (isModelQuantized) {
+          // Quantized model
+          imgData.put((byte) ((pixelValue >> 16) & 0xFF));
+          imgData.put((byte) ((pixelValue >> 8) & 0xFF));
+          imgData.put((byte) (pixelValue & 0xFF));
+        } else { // Float model
+          imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+          imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+          imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+        }
+      }
+    }
+    Trace.endSection(); // preprocessBitmap
+
+    // Copy the input data into TensorFlow.
+    Trace.beginSection("feed");
+
+    Map<Integer, Object> outputMap = new HashMap<>();
+
+    Object[] inputArray = {imgData};
+
+    Trace.endSection();
+
+    if (!isModelQuantized) {
+      // Here outputMap is changed to fit the Face Mask detector
+      embeddings = new float[1][OUTPUT_SIZE];
+      outputMap.put(0, embeddings);
+    } else {
+      outputLocations = new float[1][NUM_DETECTIONS][4];
+      outputClasses = new float[1][NUM_DETECTIONS];
+      outputScores = new float[1][NUM_DETECTIONS];
+      numDetections = new float[1];
+      outputMap.put(0, outputLocations);
+      outputMap.put(1, outputClasses);
+      outputMap.put(2, outputScores);
+      outputMap.put(3, numDetections);
+    }
+
+
+    // Run the inference call.
+    Trace.beginSection("run");
+    tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
+    Trace.endSection();
+
+
+    final ArrayList<Recognition> recognitions = new ArrayList<>(isModelQuantized ? NUM_DETECTIONS : 1);
+
+    if (!isModelQuantized) {
+
+      float distance = Float.MAX_VALUE;
+      String id = "0";
+      String label = "?";
+
+      Recognition rec = new Recognition(
+              id,
+              label,
+              distance,
+              new RectF());
+
+      recognitions.add(rec);
+
+      rec.setExtra(embeddings);
+    } else {
+      // Show the best detections.
+      // after scaling them back to the input size.
+      for (int i = 0; i < NUM_DETECTIONS; ++i) {
+        final RectF detection =
+                new RectF(
+                        outputLocations[0][i][1] * inputSize,
+                        outputLocations[0][i][0] * inputSize,
+                        outputLocations[0][i][3] * inputSize,
+                        outputLocations[0][i][2] * inputSize);
+        // SSD Mobilenet V1 Model assumes class 0 is background class
+        // in label file and class labels start from 1 to number_of_classes+1,
+        // while outputClasses correspond to class index from 0 to number_of_classes
+        int labelOffset = 1;
+
+        recognitions.add(
+                new Recognition(
+                        "" + i,
+                        labels.get((int) outputClasses[0][i] + labelOffset),
+                        outputScores[0][i],
+                        detection));
+      }
+    }
+
+    Trace.endSection();
+    return recognitions;
+  }
+}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/VolatileFaceStorageBackend.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/VolatileFaceStorageBackend.java
new file mode 100644
index 0000000..502ad2b
--- /dev/null
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/VolatileFaceStorageBackend.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2023 LibreMobileOS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.libremobileos.yifan.face;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+
+/** In-memory FaceStorageBackend, bypassing encoding and storage, relying on cache entirely for performance */
+@SuppressWarnings("unused")
+public class VolatileFaceStorageBackend extends FaceStorageBackend {
+
+	public VolatileFaceStorageBackend() {
+		super();
+		cachedNames = new HashSet<>();
+		cachedData = new HashMap<>();
+	}
+
+	@Override
+	protected Set<String> getNamesInternal() {
+		throw new RuntimeException("Stub!");
+	}
+
+	@Override
+	protected boolean registerInternal(String name, String data, boolean duplicate) {
+		throw new RuntimeException("Stub!");
+	}
+
+	@Override
+	protected String getInternal(String name) {
+		throw new RuntimeException("Stub!");
+	}
+
+	@Override
+	protected boolean deleteInternal(String name) {
+		return true;
+	}
+
+	@Override
+	public Set<String> getNames() {
+		return getNamesCached();
+	}
+
+	@Override
+	public boolean register(String rawname, float[][] alldata, boolean replace) {
+		cachedNames.add(rawname);
+		cachedData.put(rawname, alldata);
+		return true;
+	}
+
+	@Override
+	public float[][] get(String name) {
+		return getCached(name);
+	}
+}
diff --git a/FaceShared/src/withGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java b/FaceShared/src/withGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java
new file mode 100644
index 0000000..dc51443
--- /dev/null
+++ b/FaceShared/src/withGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java
@@ -0,0 +1,14 @@
+package com.libremobileos.yifan.util;
+
+import org.tensorflow.lite.Delegate;
+import org.tensorflow.lite.gpu.GpuDelegate;
+
+public class GpuDelegateFactory {
+	public static Delegate get() {
+		return new GpuDelegate();
+	}
+
+	public static boolean isSupported() {
+		return true;
+	}
+}
diff --git a/FaceShared/src/withoutGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java b/FaceShared/src/withoutGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java
new file mode 100644
index 0000000..95cf800
--- /dev/null
+++ b/FaceShared/src/withoutGpu/java/com/libremobileos/yifan/util/GpuDelegateFactory.java
@@ -0,0 +1,13 @@
+package com.libremobileos.yifan.util;
+
+import org.tensorflow.lite.Delegate;
+
+public class GpuDelegateFactory {
+	public static Delegate get() {
+		throw new UnsupportedOperationException("compiled without GPU library, can't create GPU delegate");
+	}
+
+	public static boolean isSupported() {
+		return false;
+	}
+}
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..7d42893
--- /dev/null
+++ b/README.md
@@ -0,0 +1,13 @@
+# Yifan - LibreMobileOS ML libraries
+
+This repository is an small repository of ML models for use in LibreMobileOS, building is supported with Soong and Gradle.
+
+All models found here are from open-source projects, and we would like to thank to everyone who made this possible.
+
+## Face
+
+Usecase: Find faces and compare them with saved face data.
+
+Models: detect-class1 (TFLite Object Detection) and MobileFaceNet V2
+
+Javadoc: https://LMODroid.github.io/platform_external_yifan/FaceShared/
diff --git a/build.gradle b/build.gradle
new file mode 100644
index 0000000..4ddf352
--- /dev/null
+++ b/build.gradle
@@ -0,0 +1,5 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+plugins {
+	id 'com.android.application' version '7.4.0' apply false
+	id 'com.android.library' version '7.4.0' apply false
+}
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
new file mode 100644
index 0000000..3e927b1
--- /dev/null
+++ b/gradle.properties
@@ -0,0 +1,21 @@
+# Project-wide Gradle settings.
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+# AndroidX package structure to make it clearer which packages are bundled with the
+# Android operating system, and which are packaged with your app's APK
+# https://developer.android.com/topic/libraries/support-library/androidx-rn
+android.useAndroidX=true
+# Enables namespacing of each library's R class so that its R class includes only the
+# resources declared in the library itself and none from the library's dependencies,
+# thereby reducing the size of the R class for that library
+android.nonTransitiveRClass=true
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..e708b1c
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..edf6433
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Tue Jan 10 17:18:06 CET 2023
+distributionBase=GRADLE_USER_HOME
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-bin.zip
+distributionPath=wrapper/dists
+zipStorePath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
diff --git a/gradlew b/gradlew
new file mode 100755
index 0000000..4f906e0
--- /dev/null
+++ b/gradlew
@@ -0,0 +1,185 @@
+#!/usr/bin/env sh
+
+#
+# Copyright 2015 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=`expr $i + 1`
+    done
+    case $i in
+        0) set -- ;;
+        1) set -- "$args0" ;;
+        2) set -- "$args0" "$args1" ;;
+        3) set -- "$args0" "$args1" "$args2" ;;
+        4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=`save "$@"`
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+exec "$JAVACMD" "$@"
diff --git a/gradlew.bat b/gradlew.bat
new file mode 100644
index 0000000..ac1b06f
--- /dev/null
+++ b/gradlew.bat
@@ -0,0 +1,89 @@
+@rem

+@rem Copyright 2015 the original author or authors.

+@rem

+@rem Licensed under the Apache License, Version 2.0 (the "License");

+@rem you may not use this file except in compliance with the License.

+@rem You may obtain a copy of the License at

+@rem

+@rem      https://www.apache.org/licenses/LICENSE-2.0

+@rem

+@rem Unless required by applicable law or agreed to in writing, software

+@rem distributed under the License is distributed on an "AS IS" BASIS,

+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+@rem See the License for the specific language governing permissions and

+@rem limitations under the License.

+@rem

+

+@if "%DEBUG%" == "" @echo off

+@rem ##########################################################################

+@rem

+@rem  Gradle startup script for Windows

+@rem

+@rem ##########################################################################

+

+@rem Set local scope for the variables with windows NT shell

+if "%OS%"=="Windows_NT" setlocal

+

+set DIRNAME=%~dp0

+if "%DIRNAME%" == "" set DIRNAME=.

+set APP_BASE_NAME=%~n0

+set APP_HOME=%DIRNAME%

+

+@rem Resolve any "." and ".." in APP_HOME to make it shorter.

+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi

+

+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.

+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"

+

+@rem Find java.exe

+if defined JAVA_HOME goto findJavaFromJavaHome

+

+set JAVA_EXE=java.exe

+%JAVA_EXE% -version >NUL 2>&1

+if "%ERRORLEVEL%" == "0" goto execute

+

+echo.

+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:findJavaFromJavaHome

+set JAVA_HOME=%JAVA_HOME:"=%

+set JAVA_EXE=%JAVA_HOME%/bin/java.exe

+

+if exist "%JAVA_EXE%" goto execute

+

+echo.

+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:execute

+@rem Setup the command line

+

+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar

+

+

+@rem Execute Gradle

+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*

+

+:end

+@rem End local scope for the variables with windows NT shell

+if "%ERRORLEVEL%"=="0" goto mainEnd

+

+:fail

+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of

+rem the _cmd.exe /c_ return code!

+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1

+exit /b 1

+

+:mainEnd

+if "%OS%"=="Windows_NT" endlocal

+

+:omega

diff --git a/settings.gradle b/settings.gradle
new file mode 100644
index 0000000..7aa767d
--- /dev/null
+++ b/settings.gradle
@@ -0,0 +1,17 @@
+pluginManagement {
+	repositories {
+		google()
+		mavenCentral()
+		gradlePluginPortal()
+	}
+}
+dependencyResolutionManagement {
+	repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
+	repositories {
+		google()
+		mavenCentral()
+	}
+}
+rootProject.name = "Yifan"
+include ':FaceShared'
+include ':FaceExample'