support brightness post proc
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java
index 4cd3d00..ee6de41 100644
--- a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceFinder.java
@@ -80,9 +80,10 @@
 	 * scanning the resulting found faces using {@link FaceScanner} after manually cropping the image.
 	 * Adds extra metadata (location) to {@link FaceScanner.Face} based on best effort basis.
 	 * @param input Bitmap to process.
+	 * @param allowPostprocessing Allow postprocessing to improve detection quality. Undesirable when registering faces.
 	 * @return {@link List} of {@link Pair}s of detection results from {@link FaceDetector} and {@link FaceScanner}
 	 */
-	public List<Pair<FaceDetector.Face, FaceScanner.Face>> process(Bitmap input) {
+	public List<Pair<FaceDetector.Face, FaceScanner.Face>> process(Bitmap input, boolean allowPostprocessing) {
 		FaceDetector.InputImage inputImage = detectorInputProcessor.process(input);
 
 		final List<FaceDetector.Face> faces = faceDetector.detectFaces(inputImage);
@@ -97,7 +98,7 @@
 				FaceScanner.InputImage faceBmp = scannerInputProcessor.process(face.getLocation());
 				if (faceBmp == null) continue;
 
-				final FaceScanner.Face scanned = faceScanner.detectFace(faceBmp);
+				final FaceScanner.Face scanned = faceScanner.detectFace(faceBmp, allowPostprocessing);
 				if (scanned == null) continue;
 
 				scanned.addData(face.getId(), face.getLocation());
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java
index bd1db8a..8206678 100644
--- a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceRecognizer.java
@@ -18,8 +18,6 @@
 
 import android.content.Context;
 import android.graphics.Bitmap;
-import android.graphics.Canvas;
-import android.graphics.Color;
 import android.graphics.RectF;
 import android.util.Pair;
 
@@ -29,7 +27,8 @@
 
 /**
  * Task-specific API for detecting &amp; recognizing faces in an image.
- * Uses {@link FaceFinder} to detect and scan faces, {@link FaceStorageBackend} to store and retrieve the saved faces and returns the optimal result.
+ * Uses {@link FaceFinder} to detect and scan faces, {@link FaceStorageBackend} to store and retrieve the saved faces and returns the optimal result.<br>
+ * Refrain from using this class for registering faces into the recognition system, {@link FaceFinder} does not perform post processing and is as such better suited.
  */
 public class FaceRecognizer {
 	private final FaceStorageBackend storage;
@@ -41,15 +40,12 @@
 	// Minimum count of matching detection models. (ratio)
 	private final float minModelRatio;
 
-	private final float[][] brightnessTest;
-
 	private FaceRecognizer(Context ctx, FaceStorageBackend storage, float minConfidence, int inputWidth, int inputHeight, int sensorOrientation, float maxDistance, int minMatchingModels, float minModelRatio, boolean hwAcceleration, boolean enhancedHwAcceleration, int numThreads) {
 		this.storage = storage;
 		this.detector = FaceFinder.create(ctx, minConfidence, inputWidth, inputHeight, sensorOrientation, hwAcceleration, enhancedHwAcceleration, numThreads);
 		this.maxDistance = maxDistance;
 		this.minMatchingModels = minMatchingModels;
 		this.minModelRatio = minModelRatio;
-		this.brightnessTest = new float[][] { detector.faceScanner.brightnessTest(Color.WHITE), detector.faceScanner.brightnessTest(Color.BLACK) };
 	}
 
 	/**
@@ -60,7 +56,7 @@
 	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
 	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
 	 * @param sensorOrientation rotation if the image should be rotated, or 0.
-	 * @param maxDistance Maximum distance (difference) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
 	 * @param minMatchingModels Minimum count of matching models for one face to count as recognized. If undesired, set to 1
 	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
 	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
@@ -82,7 +78,7 @@
 	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
 	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
 	 * @param sensorOrientation rotation if the image should be rotated, or 0.
-	 * @param maxDistance Maximum distance (difference) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
 	 * @param minModelRatio Minimum count of matching models for one face to count as recognized. Must be higher or equal to 0.0f and smaller or equal to 1.0f. If undesired, set to 0f
 	 * @param hwAcceleration Enable hardware acceleration (NNAPI/GPU)
 	 * @param enhancedHwAcceleration if hwAcceleration is enabled, use NNAPI instead of GPU. if not, this toggles XNNPACK
@@ -104,7 +100,7 @@
 	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
 	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
 	 * @param sensorOrientation rotation if the image should be rotated, or 0.
-	 * @param maxDistance Maximum distance (difference) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
 	 * @param minMatchingModels Minimum count of matching models for one face to count as recognized. If undesired, set to 1
 	 * @return {@link FaceRecognizer} instance.
 	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, float, boolean, boolean, int)
@@ -123,7 +119,7 @@
 	 * @param inputWidth width of the {@link Bitmap}s that are going to be processed
 	 * @param inputHeight height of the {@link Bitmap}s that are going to be processed
 	 * @param sensorOrientation rotation if the image should be rotated, or 0.
-	 * @param maxDistance Maximum distance (difference) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
+	 * @param maxDistance Maximum distance (difference, not 3D distance) to a saved face to count as recognized. Must be higher than 0.0f and smaller than 1.0f
 	 * @param minModelRatio Minimum count of matching models for one face to count as recognized. Must be higher or equal to 0.0f and smaller or equal to 1.0f. If undesired, set to 0f
 	 * @return {@link FaceRecognizer} instance.
 	 * @see #create(Context, FaceStorageBackend, float, int, int, int, float, int, boolean, boolean, int)
@@ -140,24 +136,20 @@
 		private final float confidence;
 		private final int modelCount;
 		private final float modelRatio;
-		/* package-private */ final float brightnessTest1;
-		/* package-private */ final float brightnessTest2;
 
 		/* package-private */ Face(String id, String title, Float distance, Float confidence, RectF location, Bitmap crop, float[] extra, int modelCount, float modelRatio, float brightnessTest1, float brightnessTest2) {
-			super(id, title, distance, location, crop, extra);
+			super(id, title, distance, location, crop, extra, brightnessTest1, brightnessTest2);
 			this.confidence = confidence;
 			this.modelRatio = modelRatio;
 			this.modelCount = modelCount;
-			this.brightnessTest1 = brightnessTest1;
-			this.brightnessTest2 = brightnessTest2;
 		}
 
-		/* package-private */ Face(FaceScanner.Face original, Float confidence, int modelCount, float modelRatio, float brightnessTest1, float brightnessTest2) {
-			this(original.getId(), original.getTitle(), original.getDistance(), confidence, original.getLocation(), original.getCrop(), original.getExtra(), modelCount, modelRatio, brightnessTest1, brightnessTest2);
+		/* package-private */ Face(FaceScanner.Face original, Float confidence, int modelCount, float modelRatio) {
+			this(original.getId(), original.getTitle(), original.getDistance(), confidence, original.getLocation(), original.getCrop(), original.getExtra(), modelCount, modelRatio, original.brightnessTest1, original.brightnessTest2);
 		}
 
-		/* package-private */ Face(FaceDetector.Face raw, FaceScanner.Face original, int modelCount, float modelRatio, float brightnessTest1, float brightnessTest2) {
-			this(original, raw.getConfidence(), modelCount, modelRatio, brightnessTest1, brightnessTest2);
+		/* package-private */ Face(FaceDetector.Face raw, FaceScanner.Face original, int modelCount, float modelRatio) {
+			this(original, raw.getConfidence(), modelCount, modelRatio);
 		}
 
 		/**
@@ -184,16 +176,6 @@
 		public float getModelRatio() {
 			return modelRatio;
 		}
-
-		/**
-		 * Get hints on brightness (light situation) of face.
-		 * @return -1 = really bad, 0 = suboptimal, 1 = optimal
-		 */
-		public int getBrightnessHint() {
-			return (brightnessTest1 < 0.5f || brightnessTest2 < 0.4f) ? -1 : // really bad light
-					(brightnessTest1 + brightnessTest2 < 2.0f ? 0 // suboptimal
-							: 1); // optimal
-		}
 	}
 
 	/**
@@ -203,7 +185,8 @@
 	 */
 	public List<Face> recognize(Bitmap input) {
 		final Set<String> savedFaces = storage.getNames();
-		final List<Pair<FaceDetector.Face, FaceScanner.Face>> faces = detector.process(input);
+		final List<Pair<FaceDetector.Face, FaceScanner.Face>> faces = detector.process(input,
+				true /* allow post processing, nobody will (should) use this class for registering faces */);
 		final List<Face> results = new ArrayList<>();
 
 		for (Pair<FaceDetector.Face, FaceScanner.Face> faceFacePair : faces) {
@@ -237,7 +220,7 @@
 				}
 			}
 
-			results.add(new Face(found, scanned, matchingModelsOut, modelRatioOut, scanned.compare(brightnessTest[0]), scanned.compare(brightnessTest[1])));
+			results.add(new Face(found, scanned, matchingModelsOut, modelRatioOut));
 		}
 		return results;
 	}
diff --git a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java
index 6b164a0..0483c88 100644
--- a/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java
+++ b/FaceShared/src/main/java/com/libremobileos/yifan/face/FaceScanner.java
@@ -21,7 +21,10 @@
 import android.graphics.Bitmap;
 import android.graphics.Canvas;
 import android.graphics.Color;
+import android.graphics.ColorMatrix;
+import android.graphics.ColorMatrixColorFilter;
 import android.graphics.Matrix;
+import android.graphics.Paint;
 import android.graphics.RectF;
 import android.util.Log;
 
@@ -51,6 +54,8 @@
 	private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/mobile_face_net.txt";
 	// Maintain aspect ratio or squish image?
 	private static final boolean MAINTAIN_ASPECT = false;
+	// Brightness data
+	private final float[][] brightnessTest;
 
 	/**
 	 * Wrapper around Bitmap to avoid user passing unprocessed data
@@ -158,6 +163,20 @@
 		}
 	}
 
+	private float[] brightnessTest(int color) {
+		Bitmap b = Bitmap.createBitmap(FaceScanner.TF_OD_API_INPUT_SIZE, FaceScanner.TF_OD_API_INPUT_SIZE, Bitmap.Config.ARGB_8888);
+		Canvas c = new Canvas(b);
+		c.drawColor(color);
+		List<SimilarityClassifier.Recognition> results;
+		try {
+			results = getClassifier().recognizeImage(b);
+		} catch (IOException e) {
+			throw new RuntimeException(e);
+		}
+		return results.get(0).getExtra()[0];
+	}
+
+
 	/** An immutable result returned by a FaceDetector describing what was recognized. */
 	public static class Face {
 		// A unique identifier for what has been recognized. Specific to the class, not the instance of
@@ -174,14 +193,18 @@
 
 		private final float[] extra;
 
+		/* package-private */ final float brightnessTest1, brightnessTest2;
+
 		/* package-private */ Face(
-				final String id, final String title, final Float distance, final RectF location, final Bitmap crop, final float[] extra) {
+				final String id, final String title, final Float distance, final RectF location, final Bitmap crop, final float[] extra, final float brightnessTest1, final float brightnessTest2) {
 			this.id = id;
 			this.title = title;
 			this.distance = distance;
 			this.location = location;
 			this.crop = crop;
 			this.extra = extra;
+			this.brightnessTest1 = brightnessTest1;
+			this.brightnessTest2 = brightnessTest2;
 		}
 
 		/* package-private */ String getId() {
@@ -198,6 +221,7 @@
 
 		/**
 		 * A score for how good the recognition is relative to others.
+		 * Do not confuse with 3D distance, this is entirely about recognition.
 		 * @return Sortable score. Lower is better.
 		 */
 		public Float getDistance() {
@@ -276,14 +300,23 @@
 			return resultString.trim();
 		}
 
+		public int getBrightnessHint() {
+			return (brightnessTest1 < 0.5f || brightnessTest2 < 0.4f) ? -1 : // really bad light
+					(brightnessTest1 + brightnessTest2 < 2.2f ? 0 // suboptimal
+							: 1); // optimal
+		}
+
 		/**
-		 * Compare two {@link Face}s
+		 * Static method to compare two {@link Face}s.
+		 * Usually, one of the instance methods is used though.
+		 * @param me The {@link #getExtra() extra} from one face.
 		 * @param other The {@link #getExtra() extra} from the other face.
 		 * @return The {@link #getDistance() distance}, lower is better.
 		 * @see #compare(Face)
+		 * @see #compare(float[])
 		 */
-		public float compare(float[] other) {
-			final float[] emb = normalizeFloat(extra);
+		public static float compare(float[] me, float[] other) {
+			final float[] emb = normalizeFloat(me);
 			final float[] knownEmb = normalizeFloat(other);
 			float distance = 0;
 			for (int i = 0; i < emb.length; i++) {
@@ -295,9 +328,21 @@
 
 		/**
 		 * Compare two {@link Face}s
+		 * @param other The {@link #getExtra() extra} from the other face.
+		 * @return The {@link #getDistance() distance}, lower is better.
+		 * @see #compare(Face)
+		 * @see #compare(float[], float[])
+		 */
+		public float compare(float[] other) {
+			return compare(getExtra(), other);
+		}
+
+		/**
+		 * Compare two {@link Face}s
 		 * @param other The other face.
 		 * @return The {@link #getDistance() distance}, lower is better.
 		 * @see #compare(float[])
+		 * @see #compare(float[], float[])
 		 */
 		@SuppressWarnings("unused")
 		public float compare(Face other) {
@@ -351,6 +396,7 @@
 		this.hwAcceleration = hwAcceleration;
 		this.enhancedHwAcceleration = enhancedHwAcceleration;
 		this.numThreads = numThreads;
+		this.brightnessTest = new float[][] { brightnessTest(Color.WHITE),brightnessTest(Color.BLACK) };
 	}
 
 	private SimilarityClassifier getClassifier() throws IOException {
@@ -371,29 +417,52 @@
 	/**
 	 * Scan the face inside the {@link InputImage}.
 	 * @param input The {@link InputImage} to process
+	 * @param allowPostprocessing Allow postprocessing to improve detection quality. Undesirable when registering faces.
 	 * @return {@link Face}
 	 */
-	public Face detectFace(InputImage input) {
+	public Face detectFace(InputImage input, boolean allowPostprocessing) {
 		try {
 			List<SimilarityClassifier.Recognition> results = getClassifier().recognizeImage(input.getProcessedImage());
 			SimilarityClassifier.Recognition result = results.get(0);
-			return new Face(result.getId(), result.getTitle(), result.getDistance(), null, input.getUserDisplayableImage(), result.getExtra()[0]);
+			float[] e = result.getExtra()[0];
+			Face f = new Face(result.getId(), result.getTitle(), result.getDistance(), null, input.getUserDisplayableImage(), e, Face.compare(e, brightnessTest[0]), Face.compare(e, brightnessTest[1]));
+			if (f.getBrightnessHint() == 0 && allowPostprocessing /* try to improve light situation with postprocessing if its bad but not terrible */) {
+				Face f2 = detectFace(new InputImage(doBrightnessPostProc(input.getProcessedImage()), doBrightnessPostProc(input.getUserDisplayableImage())), false);
+				if (f2 == null) // Earlier logs will have printed the cause.
+					return null;
+				if (f2.getBrightnessHint() == 1)
+					return f2; // Return if it helped.
+			}
+			return f;
 		} catch (IOException e) {
 			Log.e("FaceScanner", Log.getStackTraceString(e));
 			return null;
 		}
 	}
 
-	/* package-private */ float[] brightnessTest(int color) {
-		Bitmap b = Bitmap.createBitmap(FaceScanner.TF_OD_API_INPUT_SIZE, FaceScanner.TF_OD_API_INPUT_SIZE, Bitmap.Config.ARGB_8888);
-		Canvas c = new Canvas(b);
-		c.drawColor(color);
-		List<SimilarityClassifier.Recognition> results = null;
-		try {
-			results = getClassifier().recognizeImage(b);
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		return results.get(0).getExtra()[0];
+	private Bitmap doBrightnessPostProc(Bitmap input) {
+		// 30, which has been obtained using manual testing, gives the best balance between brightness and trashing facial features
+		return changeBitmapContrastBrightness(input, 30f);
+	}
+
+	// https://stackoverflow.com/a/17887577
+	private static Bitmap changeBitmapContrastBrightness(Bitmap bmp, float brightness) {
+		ColorMatrix cm = new ColorMatrix(new float[]
+				{
+						1, 0, 0, 0, brightness,
+						0, 1, 0, 0, brightness,
+						0, 0, 1, 0, brightness,
+						0, 0, 0, 1, 0
+				});
+
+		Bitmap ret = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), bmp.getConfig());
+
+		Canvas canvas = new Canvas(ret);
+
+		Paint paint = new Paint();
+		paint.setColorFilter(new ColorMatrixColorFilter(cm));
+		canvas.drawBitmap(bmp, 0, 0, paint);
+
+		return ret;
 	}
 }
diff --git a/app/src/main/java/com/libremobileos/facedetect/EnrollActivity.java b/app/src/main/java/com/libremobileos/facedetect/EnrollActivity.java
index 75b8943..bb07f04 100644
--- a/app/src/main/java/com/libremobileos/facedetect/EnrollActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/EnrollActivity.java
@@ -20,7 +20,7 @@
 		getLayoutInflater().inflate(R.layout.enroll_finish, f);
 		TextView t = f.findViewById(R.id.textView2);
 		if (getIntent() == null || !getIntent().hasExtra("faces")) {
-			t.setText("Click Next to start!");
+			t.setText(R.string.welcome_text);
 			findViewById(R.id.button).setOnClickListener(v -> {
 				startActivity(new Intent(this, ScanActivity.class));
 				finish();
@@ -38,10 +38,10 @@
 		});
 		RemoteFaceServiceClient.connect(this, faced -> {
 			if (!faced.enroll(getIntent().getStringExtra("faces"))) {
-				runOnUiThread(() -> t.setText("oops something's wrong"));
+				runOnUiThread(() -> t.setText(R.string.register_failed));
 			} else {
 				runOnUiThread(() -> t.setText(
-						"Face Unlock will unlock your phone even if it's not your face. If you don't want that, stop reading and go earn some money to buy an iPhone. Thank you."));
+						R.string.finish_msg));
 			}
 		});
 	}
diff --git a/app/src/main/java/com/libremobileos/facedetect/MainActivity.java b/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
index 63ab9ea..919ea43 100644
--- a/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/MainActivity.java
@@ -17,39 +17,22 @@
 package com.libremobileos.facedetect;
 
 import android.content.Intent;
-import android.content.res.Configuration;
 import android.graphics.Matrix;
 import android.graphics.RectF;
 import android.os.Bundle;
 import android.util.Pair;
-import android.util.Size;
-import android.view.LayoutInflater;
-import android.view.View;
-import android.widget.EditText;
-import android.widget.ImageView;
-import android.widget.TextView;
-import android.widget.Toast;
 
-import androidx.annotation.NonNull;
 import androidx.annotation.Nullable;
 import androidx.annotation.OptIn;
-import androidx.appcompat.app.AlertDialog;
-import androidx.appcompat.app.AppCompatActivity;
-import androidx.camera.core.CameraSelector;
 import androidx.camera.core.ExperimentalGetImage;
 import androidx.camera.core.ImageAnalysis;
-import androidx.camera.core.Preview;
-import androidx.camera.lifecycle.ProcessCameraProvider;
-import androidx.camera.view.PreviewView;
 
-import com.google.common.util.concurrent.ListenableFuture;
 import com.libremobileos.yifan.face.FaceRecognizer;
 import com.libremobileos.yifan.face.FaceStorageBackend;
 import com.libremobileos.yifan.face.SharedPreferencesFaceStorageBackend;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.ExecutionException;
 
 public class MainActivity extends CameraActivity {
 	// AI-based detector
@@ -119,7 +102,7 @@
 				width, /* bitmap width */
 				height, /* bitmap height */
 				0, /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
-				0.7f, /* maximum distance to track face */
+				0.7f, /* maximum distance (to saved face model, not from camera) to track face */
 				1 /* minimum model count to track face */
 		);
 	}
diff --git a/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java b/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
index 37050b1..e946d58 100644
--- a/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
+++ b/app/src/main/java/com/libremobileos/facedetect/ScanActivity.java
@@ -18,6 +18,7 @@
 
 import android.content.Intent;
 import android.os.Bundle;
+import android.util.Pair;
 import android.view.View;
 import android.widget.FrameLayout;
 import android.widget.TextView;
@@ -28,11 +29,9 @@
 import androidx.camera.core.ImageAnalysis;
 
 import com.libremobileos.yifan.face.FaceDataEncoder;
-import com.libremobileos.yifan.face.FaceRecognizer;
+import com.libremobileos.yifan.face.FaceDetector;
+import com.libremobileos.yifan.face.FaceFinder;
 import com.libremobileos.yifan.face.FaceScanner;
-import com.libremobileos.yifan.face.FaceStorageBackend;
-import com.libremobileos.yifan.face.SharedPreferencesFaceStorageBackend;
-import com.libremobileos.yifan.face.VolatileFaceStorageBackend;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -40,12 +39,12 @@
 public class ScanActivity extends CameraActivity {
 
 	// AI-based detector
-	private FaceRecognizer faceRecognizer;
+	private FaceFinder faceRecognizer;
 	// Simple view allowing us to draw a circle over the Preview
 	private CircleOverlayView overlayView;
 	// If we are waiting for a face to be added to knownFaces
 	private long lastAdd;
-	private final List<FaceRecognizer.Face> faces = new ArrayList<>();
+	private final List<FaceScanner.Face> faces = new ArrayList<>();
 	private TextView subText;
 
 	@Override
@@ -58,7 +57,7 @@
 		connectToCam(f.findViewById(R.id.viewFinder));
 		overlayView = f.findViewById(R.id.overlay);
 		subText = f.findViewById(R.id.textView);
-		subText.setText("Scan your face now");
+		subText.setText(R.string.scan_face_now);
 		findViewById(R.id.button2).setOnClickListener(v -> {
 			startActivity(new Intent(this, SettingsActivity.class));
 			finish();
@@ -75,41 +74,42 @@
 			}
 			// Convert CameraX Image to Bitmap and process it
 			// Return list of detected faces
-			List<FaceRecognizer.Face> data = faceRecognizer.recognize(BitmapUtils.getBitmap(imageProxy));
+			List<Pair<FaceDetector.Face, FaceScanner.Face>> data = faceRecognizer.process(BitmapUtils.getBitmap(imageProxy), false);
 
 			if (data.size() > 1) {
 				if (lastAdd == -1) { // last frame had two faces too
-					subText.setText("Almost nobody has 2 faces, and I'm pretty sure you don't :)");
+					subText.setText(R.string.found_2_faces);
 				}
 				lastAdd = -1;
 				imageProxy.close();
 				return;
 			} else if (lastAdd == -1) {
 				lastAdd = System.currentTimeMillis();
-				subText.setText("Scan your face now");
 			}
 			if (data.size() == 0) {
 				if (lastAdd == -2) { // last frame had 0 faces too
-					subText.setText("Where's your face?");
+					subText.setText(R.string.cant_find_face);
 				}
 				lastAdd = -2;
 				imageProxy.close();
 				return;
 			} else if (lastAdd == -2) {
 				lastAdd = System.currentTimeMillis();
-				subText.setText("Scan your face now");
 			}
 
-			FaceRecognizer.Face face = data.get(0);
+			Pair<FaceDetector.Face, FaceScanner.Face> face = data.get(0);
 
 			// Do we want to add a new face?
 			if (lastAdd + 1000 < System.currentTimeMillis()) {
 				lastAdd = System.currentTimeMillis();
-				if (face.getBrightnessHint() < 0) {
-					subText.setText("Can't properly see your face, maybe turn the lamp on?");
+				if (face.second.getBrightnessHint() < 1) {
+					subText.setText(R.string.cant_scan_face);
+					imageProxy.close();
 					return;
+				} else {
+					subText.setText(R.string.scan_face_now);
 				}
-				faces.add(face);
+				faces.add(face.second);
 				overlayView.setPercentage(faces.size() * 10);
 			}
 
@@ -123,18 +123,12 @@
 			imageProxy.close();
 		});
 
-		// We don't need recognition here
-		FaceStorageBackend faceStorage = new VolatileFaceStorageBackend();
-
 		// Create AI-based face detection
-		faceRecognizer = FaceRecognizer.create(this,
-				faceStorage, /* face data storage */
+		faceRecognizer = FaceFinder.create(this,
 				0.6f, /* minimum confidence to consider object as face */
 				width, /* bitmap width */
 				height, /* bitmap height */
-				0, /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
-				0.7f, /* maximum distance to track face */
-				1 /* minimum model count to track face */
+				0 /* CameraX rotates the image for us, so we chose to IGNORE sensorRotation altogether */
 		);
 	}
 }
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
index 1afc513..0b1191c 100644
--- a/app/src/main/res/values/strings.xml
+++ b/app/src/main/res/values/strings.xml
@@ -1,14 +1,15 @@
 <resources>
 	<string name="app_name">FaceDetect</string>
-	<string name="add_face">Add Face</string>
-	<string name="input_name">Input name</string>
-	<string name="ok">OK</string>
-	<string name="tap_to_add_face">Tap anywhere to open settings</string>
-	<string name="register_failed">Registering the face failed.</string>
-	<string name="title_activity_settings">SettingsActivity</string>
 	<string name="internal_err">Internal error</string>
 	<string name="face_unlock">Face Unlock</string>
 	<string name="next">Next</string>
 	<string name="cancel">Cancel</string>
-	<string name="accuracy">We recommend 5 scans in optimal light and 5 in suboptimal. This ensures maximum accuracy. If you scan your face in different positions, there will be added accuracy.</string>
+	<string name="accuracy">You must scan your face in optimal light conditions. This ensures maximum accuracy. If you scan your face in different positions, there will be added accuracy.</string>
+	<string name="found_2_faces">2 faces detected, please only have one face visible for your camera</string>
+	<string name="cant_find_face">Unable to find any face</string>
+	<string name="cant_scan_face">Can\'t properly scan your face, try to look into the camera directly and make sure your face is well-lit</string>
+	<string name="scan_face_now">Scan your face now</string>
+	<string name="register_failed">Registering your face has failed. We are sorry for the inconvience. Please try again later.</string>
+	<string name="welcome_text">**welcome text placeholder uwu**</string>
+	<string name="finish_msg">**finish text placeholder uwu**</string>
 </resources>
\ No newline at end of file