diff --git a/app/src/androidTest/java/com/k2_9/omrekap/aprilTag/AprilTagConfigDetectionTest.kt b/app/src/androidTest/java/com/k2_9/omrekap/aprilTag/AprilTagConfigDetectionTest.kt
index 6d80b4578042cb41dfb8f26d04836ee111d1f8f9..0c49e5ffe1f07f8c2410da615f129477315baf2c 100644
--- a/app/src/androidTest/java/com/k2_9/omrekap/aprilTag/AprilTagConfigDetectionTest.kt
+++ b/app/src/androidTest/java/com/k2_9/omrekap/aprilTag/AprilTagConfigDetectionTest.kt
@@ -32,18 +32,18 @@ class AprilTagConfigDetectionTest {
 
 		CoroutineScope(Dispatchers.Default).launch {
 			OMRConfigDetector.loadConfiguration(
-				appContext
+				appContext,
 			)
 			val result = OMRConfigDetector.detectConfiguration(grayImageMat)
 			val gson = Gson()
 			Log.d("ConfigDetectionTestx", gson.toJson(result))
 			val compare = OMRConfigRepository.loadConfigurations(appContext)
 
-//			val resultHash = result!!.first.hashCode()
-//			val compareHash = compare!!.configs["102"].hashCode()
-//			Log.d("ConfigDetectionTestx1", resultHash.toString())
-//			Log.d("ConfigDetectionTestx1", compareHash.toString())
-//			assert(resultHash == compareHash)
+// 			val resultHash = result!!.first.hashCode()
+// 			val compareHash = compare!!.configs["102"].hashCode()
+// 			Log.d("ConfigDetectionTestx1", resultHash.toString())
+// 			Log.d("ConfigDetectionTestx1", compareHash.toString())
+// 			assert(resultHash == compareHash)
 
 			val resultJSONString = gson.toJson(result!!.first)
 			val compareJSONString = gson.toJson(compare!!.omrConfigs["102"])
diff --git a/app/src/androidTest/java/com/k2_9/omrekap/omr/ContourOMRHelperTest.kt b/app/src/androidTest/java/com/k2_9/omrekap/omr/ContourOMRHelperTest.kt
index 186f40e5ef968b0a2fdb930162247038930571d7..df80cec45f2b315013c526c9af4e20d7bd2d4b7a 100644
--- a/app/src/androidTest/java/com/k2_9/omrekap/omr/ContourOMRHelperTest.kt
+++ b/app/src/androidTest/java/com/k2_9/omrekap/omr/ContourOMRHelperTest.kt
@@ -4,18 +4,19 @@ import android.content.Context
 import android.util.Log
 import androidx.test.ext.junit.runners.AndroidJUnit4
 import androidx.test.platform.app.InstrumentationRegistry
-import com.google.gson.Gson
 import com.k2_9.omrekap.R
-import com.k2_9.omrekap.data.configs.omr.ContourOMRHelperConfig
-import com.k2_9.omrekap.data.configs.omr.OMRCropper
-import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
+import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
 import com.k2_9.omrekap.data.configs.omr.OMRSection
 import com.k2_9.omrekap.utils.SaveHelper
 import com.k2_9.omrekap.utils.omr.ContourOMRHelper
+import com.k2_9.omrekap.utils.omr.OMRConfigDetector
+import kotlinx.coroutines.runBlocking
 import org.junit.Test
 import org.junit.runner.RunWith
 import org.opencv.android.OpenCVLoader
 import org.opencv.android.Utils
+import org.opencv.core.Mat
+import org.opencv.imgproc.Imgproc
 
 @RunWith(AndroidJUnit4::class)
 class ContourOMRHelperTest {
@@ -28,44 +29,55 @@ class ContourOMRHelperTest {
 		appContext = InstrumentationRegistry.getInstrumentation().targetContext
 
 		// Load the image resource as a Bitmap
-		val image = Utils.loadResource(appContext, R.raw.example)
+		val imageMat = Utils.loadResource(appContext, R.raw.test)
+		val templateLoader = CircleTemplateLoader(appContext, R.raw.circle_template)
 
-		val sectionPositions =
-			mapOf(
-				OMRSection.FIRST to Pair(780, 373),
-				OMRSection.SECOND to Pair(0, 0),
-				OMRSection.THIRD to Pair(0, 0),
-			)
+		// Convert if image is not grayscale
+		val grayscaleImageMat =
+			if (imageMat.channels() != 1) {
+				val grayImageMat = Mat()
+				Imgproc.cvtColor(imageMat, grayImageMat, Imgproc.COLOR_BGR2GRAY)
+				grayImageMat
+			} else {
+				imageMat
+			}
 
-		val cropperConfig =
-			OMRCropperConfig(
-				image,
-				Pair(140, 220),
-				sectionPositions,
-			)
+		runBlocking {
+			// Get OMR Config by AprilTag
+			OMRConfigDetector.loadConfiguration(appContext)
+			val configResult = OMRConfigDetector.detectConfiguration(grayscaleImageMat)
+			assert(configResult != null)
 
-		val cropper = OMRCropper(cropperConfig)
+			val config = configResult!!.first
 
-		val config =
-			ContourOMRHelperConfig(
-				cropper,
-				12,
-				30,
-				0.5f,
-				1.5f,
-				0.9f,
-				230,
-			)
-		Log.d("ContourOMRHelperTest", Gson().toJson(config))
-		helper = ContourOMRHelper(config)
+			config.contourOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.setTemplate(templateLoader)
+
+			helper = ContourOMRHelper(config.contourOMRHelperConfig)
+		}
 	}
 
 	@Test
-	fun test_detect() {
-		val result = helper.detect(OMRSection.FIRST)
-		val imageAnnotated = helper.annotateImage(result)
-		Log.d("ContourOMRHelperTest", result.toString())
-		assert(result == 172)
-		SaveHelper.saveImage(appContext, imageAnnotated, "test", "test_detect")
+	fun test_contour_omr() {
+		val resultFirst = helper.detect(OMRSection.FIRST)
+		val resultSecond = helper.detect(OMRSection.SECOND)
+		val resultThird = helper.detect(OMRSection.THIRD)
+
+		val imgFirst = helper.annotateImage(resultFirst)
+		val imgSecond = helper.annotateImage(resultSecond)
+		val imgThird = helper.annotateImage(resultThird)
+
+		Log.d("ContourOMRHelperTest", resultFirst.toString())
+		Log.d("ContourOMRHelperTest", resultSecond.toString())
+		Log.d("ContourOMRHelperTest", resultThird.toString())
+
+		SaveHelper.saveImage(appContext, imgFirst, "test", "test_contour_omr_first")
+		SaveHelper.saveImage(appContext, imgSecond, "test", "test_contour_omr_second")
+		SaveHelper.saveImage(appContext, imgThird, "test", "test_contour_omr_third")
+
+		assert(resultFirst == 172)
+		assert(resultSecond == 24)
+		assert(resultThird == 2)
 	}
 }
diff --git a/app/src/androidTest/java/com/k2_9/omrekap/omr/OMRCropperTest.kt b/app/src/androidTest/java/com/k2_9/omrekap/omr/OMRCropperTest.kt
index d7853f000a5f4654b9fc88ce073db32c48309c89..2e4f0b7bd9550ff7deea11042a6673cd9ee5ccbc 100644
--- a/app/src/androidTest/java/com/k2_9/omrekap/omr/OMRCropperTest.kt
+++ b/app/src/androidTest/java/com/k2_9/omrekap/omr/OMRCropperTest.kt
@@ -5,14 +5,18 @@ import android.graphics.Bitmap
 import androidx.test.ext.junit.runners.AndroidJUnit4
 import androidx.test.platform.app.InstrumentationRegistry
 import com.k2_9.omrekap.R
+import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
 import com.k2_9.omrekap.data.configs.omr.OMRCropper
-import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
 import com.k2_9.omrekap.data.configs.omr.OMRSection
 import com.k2_9.omrekap.utils.SaveHelper
+import com.k2_9.omrekap.utils.omr.OMRConfigDetector
+import kotlinx.coroutines.runBlocking
 import org.junit.Test
 import org.junit.runner.RunWith
 import org.opencv.android.OpenCVLoader
 import org.opencv.android.Utils
+import org.opencv.core.Mat
+import org.opencv.imgproc.Imgproc
 
 @RunWith(AndroidJUnit4::class)
 class OMRCropperTest {
@@ -25,33 +29,58 @@ class OMRCropperTest {
 		appContext = InstrumentationRegistry.getInstrumentation().targetContext
 
 		// Load the image resource as a Bitmap
-		val image = Utils.loadResource(appContext, R.raw.example)
-
-		val sectionPositions =
-			mapOf(
-				OMRSection.FIRST to Pair(780, 375),
-				OMRSection.SECOND to Pair(0, 0),
-				OMRSection.THIRD to Pair(0, 0),
-			)
-
-		val config =
-			OMRCropperConfig(
-				image,
-				Pair(140, 225),
-				sectionPositions,
-			)
-
-		cropper = OMRCropper(config)
+		val imageMat = Utils.loadResource(appContext, R.raw.example)
+		val templateLoader = CircleTemplateLoader(appContext, R.raw.circle_template)
+
+		// Convert if image is not grayscale
+		val grayscaleImageMat =
+			if (imageMat.channels() != 1) {
+				val grayImageMat = Mat()
+				Imgproc.cvtColor(imageMat, grayImageMat, Imgproc.COLOR_BGR2GRAY)
+				grayImageMat
+			} else {
+				imageMat
+			}
+
+		runBlocking {
+			// Get OMR Config by AprilTag
+			OMRConfigDetector.loadConfiguration(appContext)
+			val configResult = OMRConfigDetector.detectConfiguration(grayscaleImageMat)
+			assert(configResult != null)
+
+			val config = configResult!!.first
+
+			config.contourOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.setTemplate(templateLoader)
+
+			cropper = OMRCropper(config.contourOMRHelperConfig.omrCropper.config)
+		}
 	}
 
 	@Test
 	fun test_crop() {
-		val result = cropper.crop(OMRSection.FIRST)
+		val resultFirst = cropper.crop(OMRSection.FIRST)
+		val resultSecond = cropper.crop(OMRSection.SECOND)
+		val resultThird = cropper.crop(OMRSection.THIRD)
+
+		val bitmapFirst = Bitmap.createBitmap(resultFirst.cols(), resultFirst.rows(), Bitmap.Config.ARGB_8888)
+		val bitmapSecond = Bitmap.createBitmap(resultSecond.cols(), resultSecond.rows(), Bitmap.Config.ARGB_8888)
+		val bitmapThird = Bitmap.createBitmap(resultThird.cols(), resultThird.rows(), Bitmap.Config.ARGB_8888)
+
+		Utils.matToBitmap(resultFirst, bitmapFirst)
+		Utils.matToBitmap(resultSecond, bitmapSecond)
+		Utils.matToBitmap(resultThird, bitmapThird)
+
+		SaveHelper.saveImage(appContext, bitmapFirst, "test", "test_crop_first.png")
+		SaveHelper.saveImage(appContext, bitmapSecond, "test", "test_crop_second.png")
+		SaveHelper.saveImage(appContext, bitmapThird, "test", "test_crop_third.png")
 
-		val bitmap = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888)
-		Utils.matToBitmap(result, bitmap)
+		val width = cropper.config.omrSectionSize.first
+		val height = cropper.config.omrSectionSize.second
 
-		SaveHelper.saveImage(appContext, bitmap, "test", "test_crop.png")
-		assert(result.width() == 140 && result.height() == 225)
+		assert(resultFirst.width() == width && resultFirst.height() == height)
+		assert(resultSecond.width() == width && resultSecond.height() == height)
+		assert(resultThird.width() == width && resultThird.height() == height)
 	}
 }
diff --git a/app/src/androidTest/java/com/k2_9/omrekap/omr/TemplateMatchingOMRHelperTest.kt b/app/src/androidTest/java/com/k2_9/omrekap/omr/TemplateMatchingOMRHelperTest.kt
index ca8e81920f35f2859e138ba8316ed9b69d65f117..e3d45f58a1052a78a5eaef5d60f0987ed1b89d49 100644
--- a/app/src/androidTest/java/com/k2_9/omrekap/omr/TemplateMatchingOMRHelperTest.kt
+++ b/app/src/androidTest/java/com/k2_9/omrekap/omr/TemplateMatchingOMRHelperTest.kt
@@ -6,22 +6,21 @@ import androidx.test.ext.junit.runners.AndroidJUnit4
 import androidx.test.platform.app.InstrumentationRegistry
 import com.k2_9.omrekap.R
 import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
-import com.k2_9.omrekap.data.configs.omr.OMRCropper
-import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
 import com.k2_9.omrekap.data.configs.omr.OMRSection
-import com.k2_9.omrekap.data.configs.omr.TemplateMatchingOMRHelperConfig
 import com.k2_9.omrekap.utils.SaveHelper
+import com.k2_9.omrekap.utils.omr.OMRConfigDetector
 import com.k2_9.omrekap.utils.omr.TemplateMatchingOMRHelper
+import kotlinx.coroutines.runBlocking
 import org.junit.Test
 import org.junit.runner.RunWith
 import org.opencv.android.OpenCVLoader
 import org.opencv.android.Utils
 import org.opencv.core.Mat
+import org.opencv.imgproc.Imgproc
 
 @RunWith(AndroidJUnit4::class)
 class TemplateMatchingOMRHelperTest {
 	private var helper: TemplateMatchingOMRHelper
-	private val image: Mat
 	private val appContext: Context
 
 	init {
@@ -29,44 +28,56 @@ class TemplateMatchingOMRHelperTest {
 
 		appContext = InstrumentationRegistry.getInstrumentation().targetContext
 
-		// Load the image resource
-		image = Utils.loadResource(appContext, R.raw.example)
-
-		val sectionPositions =
-			mapOf(
-				OMRSection.FIRST to Pair(780, 373),
-				OMRSection.SECOND to Pair(0, 0),
-				OMRSection.THIRD to Pair(0, 0),
-			)
+		// Load the image resource as a Bitmap
+		val imageMat = Utils.loadResource(appContext, R.raw.test)
+		val templateLoader = CircleTemplateLoader(appContext, R.raw.circle_template)
 
-		val cropperConfig =
-			OMRCropperConfig(
-				image,
-				Pair(140, 220),
-				sectionPositions,
-			)
+		// Convert if image is not grayscale
+		val grayscaleImageMat =
+			if (imageMat.channels() != 1) {
+				val grayImageMat = Mat()
+				Imgproc.cvtColor(imageMat, grayImageMat, Imgproc.COLOR_BGR2GRAY)
+				grayImageMat
+			} else {
+				imageMat
+			}
 
-		val cropper = OMRCropper(cropperConfig)
+		runBlocking {
+			// Get OMR Config by AprilTag
+			OMRConfigDetector.loadConfiguration(appContext)
+			val configResult = OMRConfigDetector.detectConfiguration(grayscaleImageMat)
+			assert(configResult != null)
 
-		// Load the template image resource
-		val templateLoader = CircleTemplateLoader(appContext, R.raw.circle_template)
+			val config = configResult!!.first
 
-		val config =
-			TemplateMatchingOMRHelperConfig(
-				cropper,
-				templateLoader,
-				0.7f,
-			)
+			config.contourOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.omrCropper.config.setImage(grayscaleImageMat)
+			config.templateMatchingOMRHelperConfig.setTemplate(templateLoader)
 
-		helper = TemplateMatchingOMRHelper(config)
+			helper = TemplateMatchingOMRHelper(config.templateMatchingOMRHelperConfig)
+		}
 	}
 
 	@Test
-	fun test_detect() {
-		val result = helper.detect(OMRSection.FIRST)
-		val imgRes = helper.annotateImage(result)
-		Log.d("TemplateMatchingOMRHelperTest", result.toString())
-		SaveHelper.saveImage(appContext, imgRes, "test", "test_detect")
-		assert(result == 172)
+	fun test_template_matching_omr() {
+		val resultFirst = helper.detect(OMRSection.FIRST)
+		val resultSecond = helper.detect(OMRSection.SECOND)
+		val resultThird = helper.detect(OMRSection.THIRD)
+
+		val imgFirst = helper.annotateImage(resultFirst)
+		val imgSecond = helper.annotateImage(resultSecond)
+		val imgThird = helper.annotateImage(resultThird)
+
+		Log.d("TemplateMatchingOMRHelperTest", resultFirst.toString())
+		Log.d("TemplateMatchingOMRHelperTest", resultSecond.toString())
+		Log.d("TemplateMatchingOMRHelperTest", resultThird.toString())
+
+		SaveHelper.saveImage(appContext, imgFirst, "test", "test_template_matching_omr_first")
+		SaveHelper.saveImage(appContext, imgSecond, "test", "test_template_matching_omr_second")
+		SaveHelper.saveImage(appContext, imgThird, "test", "test_template_matching_omr_third")
+
+		assert(resultFirst == 172)
+		assert(resultSecond == 24)
+		assert(resultThird == 2)
 	}
 }
diff --git a/app/src/androidTest/java/com/k2_9/omrekap/preprocess/CropHelperTest.kt b/app/src/androidTest/java/com/k2_9/omrekap/preprocess/CropHelperTest.kt
index a5522c034fa3da52221a9fe5f25562bd6138de82..3b15aad8d686adffdbcbcec5e09f21235ca32aa9 100644
--- a/app/src/androidTest/java/com/k2_9/omrekap/preprocess/CropHelperTest.kt
+++ b/app/src/androidTest/java/com/k2_9/omrekap/preprocess/CropHelperTest.kt
@@ -17,6 +17,7 @@ import org.opencv.android.OpenCVLoader
 import org.opencv.android.Utils
 import org.opencv.core.CvType
 import org.opencv.core.Mat
+import java.time.Instant
 
 @RunWith(JUnit4::class)
 class CropHelperTest {
@@ -35,23 +36,24 @@ class CropHelperTest {
 		image = Utils.loadResource(appContext, R.raw.example, CvType.CV_8UC1)
 		patternImage = Utils.loadResource(appContext, R.raw.corner_pattern, CvType.CV_8UC4)
 
-		patternBitmap = Bitmap.createBitmap(
-			patternImage.width(),
-			patternImage.height(),
-			Bitmap.Config.ARGB_8888
-		)
+		patternBitmap =
+			Bitmap.createBitmap(
+				patternImage.width(),
+				patternImage.height(),
+				Bitmap.Config.ARGB_8888,
+			)
 		imageBitmap = Bitmap.createBitmap(image.width(), image.height(), Bitmap.Config.ARGB_8888)
 		Utils.matToBitmap(image, imageBitmap)
 		Utils.matToBitmap(patternImage, patternBitmap)
 
 		CropHelper.loadPattern(patternBitmap)
 
-		imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>())
+		imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>(), Instant.now())
 	}
 
 	@Before
 	fun beforeEachTest() {
-		imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>())
+		imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>(), Instant.now())
 	}
 
 	@Test
@@ -64,7 +66,7 @@ class CropHelperTest {
 			appContext,
 			imageSaveData.annotatedImage,
 			"test",
-			"test_preprocess_annotated"
+			"test_preprocess_annotated",
 		)
 	}
 
diff --git a/app/src/main/assets/omr_config.json b/app/src/main/assets/omr_config.json
index 7509badb09fb47ba40ca3f29ca4c74f98204369d..bc4515a20ec7d237055426a5704de08b28f07bd9 100644
--- a/app/src/main/assets/omr_config.json
+++ b/app/src/main/assets/omr_config.json
@@ -1,11 +1,16 @@
 {
   "omrConfigs": {
     "102": {
+      "contents": {
+        "FIRST": "Anis",
+        "SECOND": "Bowo",
+        "THIRD": "Janggar"
+      },
       "contourOMRHelperConfig": {
-        "darkIntensityThreshold": 230,
+        "darkIntensityThreshold": 200,
         "darkPercentageThreshold": 0.9,
         "maxAspectRatio": 1.5,
-        "maxRadius": 30,
+        "maxRadius": 25,
         "minAspectRatio": 0.5,
         "minRadius": 12,
         "omrCropper": {
@@ -13,21 +18,21 @@
             "image": null,
             "omrSectionPosition": {
               "FIRST": {
-                "first": 780,
-                "second": 373
+                "first": 712,
+                "second": 296
               },
               "SECOND": {
-                "first": 0,
-                "second": 0
+                "first": 712,
+                "second": 588
               },
               "THIRD": {
-                "first": 0,
-                "second": 0
+                "first": 712,
+                "second": 880
               }
             },
             "omrSectionSize": {
-              "first": 140,
-              "second": 220
+              "first": 120,
+              "second": 205
             }
           }
         }
@@ -40,21 +45,21 @@
             "image": null,
             "omrSectionPosition": {
               "FIRST": {
-                "first": 780,
-                "second": 373
+                "first": 712,
+                "second": 296
               },
               "SECOND": {
-                "first": 0,
-                "second": 0
+                "first": 712,
+                "second": 588
               },
               "THIRD": {
-                "first": 0,
-                "second": 0
+                "first": 712,
+                "second": 880
               }
             },
             "omrSectionSize": {
-              "first": 140,
-              "second": 220
+              "first": 120,
+              "second": 205
             }
           }
         }
diff --git a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropper.kt b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropper.kt
index c70a3d13f6759d9b7cb5995c6d07ff4ad5009163..7ee53ff4e727bd4262169b5e000e10cd1867b3d7 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropper.kt
@@ -12,4 +12,11 @@ class OMRCropper(val config: OMRCropperConfig) {
 
 		return Mat(config.image, roi)
 	}
+
+	fun sectionPosition(section: OMRSection): Rect {
+		val (x, y) = config.getSectionPosition(section)
+		val (width, height) = config.omrSectionSize
+
+		return Rect(x, y, width, height)
+	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropperConfig.kt b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropperConfig.kt
index 9467e226785d67a374ecbe8cdcc7fdaf755decc4..fabe92619a12e5b9fb6d70a870eb82248b5a1cb7 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropperConfig.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/OMRCropperConfig.kt
@@ -3,13 +3,15 @@ package com.k2_9.omrekap.data.configs.omr
 import org.opencv.core.Mat
 
 class OMRCropperConfig(
-	image: Mat,
+	image: Mat?,
 	val omrSectionSize: Pair<Int, Int>,
 	omrSectionPosition: Map<OMRSection, Pair<Int, Int>>,
 ) {
-	var image: Mat
+	var image: Mat?
 		private set
-		get() = field.clone()
+		get() = field?.clone()
+
+	private val omrSectionPosition: Map<OMRSection, Pair<Int, Int>>
 
 	// Check if all the sections are present
 	init {
@@ -21,10 +23,6 @@ class OMRCropperConfig(
 			"OMR section size must be non-negative"
 		}
 
-		require(omrSectionSize.first <= image.width() && omrSectionSize.second <= image.height()) {
-			"OMR section size must be less than or equal to the image size"
-		}
-
 		require(omrSectionPosition.keys.containsAll(OMRSection.entries)) {
 			"All OMR sections must be present"
 		}
@@ -33,16 +31,27 @@ class OMRCropperConfig(
 			"OMR section position must be non-negative"
 		}
 
-		this.image = image.clone()
-	}
+		this.image = null
+		this.omrSectionPosition = omrSectionPosition.toMap()
 
-	private val omrSectionPosition: Map<OMRSection, Pair<Int, Int>> = omrSectionPosition.toMap()
+		if (image != null) {
+			setImage(image)
+		}
+	}
 
 	fun getSectionPosition(section: OMRSection): Pair<Int, Int> {
 		return omrSectionPosition[section]!!
 	}
 
 	fun setImage(image: Mat) {
-		this.image = image
+		require(omrSectionSize.first <= image.width() && omrSectionSize.second <= image.height()) {
+			"OMR section size must be less than or equal to the image size"
+		}
+
+		require(omrSectionPosition.values.all { it.first <= image.width() && it.second <= image.height() }) {
+			"OMR section position must be less than the image size"
+		}
+
+		this.image = image.clone()
 	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/TemplateMatchingOMRHelperConfig.kt b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/TemplateMatchingOMRHelperConfig.kt
index 68a481df431371f874a0fa60eb102113a000b742..f3d83c34d2ece54916ac5368670d518ffbdd704d 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/configs/omr/TemplateMatchingOMRHelperConfig.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/configs/omr/TemplateMatchingOMRHelperConfig.kt
@@ -4,12 +4,12 @@ import org.opencv.core.Mat
 
 class TemplateMatchingOMRHelperConfig(
 	omrCropper: OMRCropper,
-	templateLoader: CircleTemplateLoader,
+	templateLoader: CircleTemplateLoader?,
 	similarityThreshold: Float,
 ) : OMRHelperConfig(omrCropper) {
-	var template: Mat
+	var template: Mat?
 		private set
-		get() = field.clone()
+		get() = field?.clone()
 
 	var similarityThreshold: Float
 		private set
@@ -19,7 +19,11 @@ class TemplateMatchingOMRHelperConfig(
 			"similarity_threshold must be between 0 and 1"
 		}
 
-		this.template = templateLoader.loadTemplateImage()
+		this.template = templateLoader?.loadTemplateImage()
 		this.similarityThreshold = similarityThreshold
 	}
+
+	public fun setTemplate(templateLoader: CircleTemplateLoader) {
+		this.template = templateLoader.loadTemplateImage()
+	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/data/models/ImageSaveData.kt b/app/src/main/java/com/k2_9/omrekap/data/models/ImageSaveData.kt
index adef620e1e5f1f49b207db68cb1d2ba0770c41c4..e8186b427965953e6d3a871588ebb36ae17977b8 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/models/ImageSaveData.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/models/ImageSaveData.kt
@@ -1,9 +1,11 @@
 package com.k2_9.omrekap.data.models
 
 import android.graphics.Bitmap
+import java.time.Instant
 
 data class ImageSaveData(
 	val rawImage: Bitmap,
 	var annotatedImage: Bitmap,
-	var data: Map<String, Int>,
+	var data: Map<String, Int?>,
+	var timestamp: Instant,
 )
diff --git a/app/src/main/java/com/k2_9/omrekap/data/models/OMRBaseConfiguration.kt b/app/src/main/java/com/k2_9/omrekap/data/models/OMRBaseConfiguration.kt
index cc9cfa44f2d84e7c50a204dd4ac8c58d64ff553a..f5cc6f3d16a24b0c378d4888bfa920251d6e2284 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/models/OMRBaseConfiguration.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/models/OMRBaseConfiguration.kt
@@ -1,16 +1,18 @@
 package com.k2_9.omrekap.data.models
 
 import com.k2_9.omrekap.data.configs.omr.ContourOMRHelperConfig
+import com.k2_9.omrekap.data.configs.omr.OMRSection
 import com.k2_9.omrekap.data.configs.omr.TemplateMatchingOMRHelperConfig
 
 /**
  * Scanned image's OMR detection template
  */
 data class OMRBaseConfiguration(
-	val omrConfigs: Map<String, OMRConfigurationParameter>
+	val omrConfigs: Map<String, OMRConfigurationParameter>,
 )
 
 data class OMRConfigurationParameter(
+	val contents: Map<OMRSection, String>,
 	val contourOMRHelperConfig: ContourOMRHelperConfig,
-	val templateMatchingOMRHelperConfig: TemplateMatchingOMRHelperConfig
+	val templateMatchingOMRHelperConfig: TemplateMatchingOMRHelperConfig,
 )
diff --git a/app/src/main/java/com/k2_9/omrekap/data/repository/OMRConfigRepository.kt b/app/src/main/java/com/k2_9/omrekap/data/repository/OMRConfigRepository.kt
index fcbfe5a078cc889c20de1554bdbc76bc2c4adb58..05dbebd85b7a681ee9c63483e9cca47cd8dd8cc1 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/repository/OMRConfigRepository.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/repository/OMRConfigRepository.kt
@@ -10,9 +10,10 @@ import java.io.IOException
 
 object OMRConfigRepository {
 	suspend fun loadConfigurations(context: Context): OMRBaseConfiguration? {
-		val jsonString = withContext(Dispatchers.IO) {
-			readConfigString(context)
-		}
+		val jsonString =
+			withContext(Dispatchers.IO) {
+				readConfigString(context)
+			}
 		return if (jsonString == null) {
 			Toast.makeText(context, "Error! Unable to read configuration", Toast.LENGTH_SHORT)
 				.show()
@@ -33,7 +34,7 @@ object OMRConfigRepository {
 		return try {
 			val buffer = ByteArray(inputStream.available())
 			inputStream.read(buffer)
-//			Log.d("OMRConfigLoader", String(buffer))
+// 			Log.d("OMRConfigLoader", String(buffer))
 
 			String(buffer)
 		} catch (e: IOException) {
diff --git a/app/src/main/java/com/k2_9/omrekap/data/repository/OMRJsonConfigLoader.kt b/app/src/main/java/com/k2_9/omrekap/data/repository/OMRJsonConfigLoader.kt
index 793c0520e1f4b3e931ef9c9b3994e8af3d11897b..9cf3d98d63238614c21d4ec83682d2d6ea66ef62 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/repository/OMRJsonConfigLoader.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/repository/OMRJsonConfigLoader.kt
@@ -3,7 +3,6 @@ package com.k2_9.omrekap.data.repository
 import com.google.gson.Gson
 import com.k2_9.omrekap.data.models.OMRBaseConfiguration
 
-
 object OMRJsonConfigLoader {
 	private val gson = Gson()
 
diff --git a/app/src/main/java/com/k2_9/omrekap/data/view_models/ImageDataViewModel.kt b/app/src/main/java/com/k2_9/omrekap/data/view_models/ImageDataViewModel.kt
index 83a090511274daa7e968e7207ade9a6474ee076d..3f3696f69d6b899efbc3b27931cff9f98d1561d0 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/view_models/ImageDataViewModel.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/view_models/ImageDataViewModel.kt
@@ -1,26 +1,38 @@
 package com.k2_9.omrekap.data.view_models
 
+import android.graphics.Bitmap
+import android.util.Log
 import androidx.lifecycle.LiveData
 import androidx.lifecycle.MutableLiveData
 import androidx.lifecycle.ViewModel
 import androidx.lifecycle.viewModelScope
+import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
+import com.k2_9.omrekap.data.configs.omr.OMRSection
 import com.k2_9.omrekap.data.models.ImageSaveData
 import com.k2_9.omrekap.utils.AprilTagHelper
+import com.k2_9.omrekap.utils.ImageAnnotationHelper
+import com.k2_9.omrekap.utils.omr.ContourOMRHelper
 import com.k2_9.omrekap.utils.omr.OMRConfigDetector
+import com.k2_9.omrekap.utils.omr.OMRHelper
+import com.k2_9.omrekap.utils.omr.TemplateMatchingOMRHelper
 import kotlinx.coroutines.launch
 import org.opencv.android.Utils
 import org.opencv.core.Mat
 import org.opencv.imgproc.Imgproc
+import java.time.Instant
 
 class ImageDataViewModel : ViewModel() {
-	private val _data = MutableLiveData<ImageSaveData>()
-	val data = _data as LiveData<ImageSaveData>
+	private val _data = MutableLiveData<ImageSaveData?>()
+	val data = _data as LiveData<ImageSaveData?>
 
-	fun processImage(data: ImageSaveData) {
+	fun processImage(
+		data: ImageSaveData,
+		circleTemplateLoader: CircleTemplateLoader,
+	) {
 		viewModelScope.launch {
 			val rawImage = data.rawImage
 			val imageMat = Mat()
-//			val annotatedImageMat = Mat()
+// 			val annotatedImageMat = Mat()
 			Utils.bitmapToMat(rawImage, imageMat)
 
 			// convert image to gray
@@ -28,14 +40,78 @@ class ImageDataViewModel : ViewModel() {
 			Imgproc.cvtColor(imageMat, grayImageMat, Imgproc.COLOR_BGR2GRAY)
 
 			// load configuration
-			val (loadedConfig, id, corners) = OMRConfigDetector.detectConfiguration(grayImageMat)!!
+			val configurationResult = OMRConfigDetector.detectConfiguration(grayImageMat)
+
+			if (configurationResult == null) {
+				_data.value = data
+				return@launch
+			}
+
+			val (loadedConfig, _, _) = configurationResult
 
 			// annotate the detected AprilTag
-			val annotatedImage = AprilTagHelper.annotateImage(rawImage)
+			var annotatedImage = AprilTagHelper.annotateImage(rawImage)
+
+			// process OMR
+			val matImage = Mat()
+
+			Utils.bitmapToMat(data.rawImage, matImage)
+
+			loadedConfig.contourOMRHelperConfig.omrCropper.config.setImage(matImage)
+			loadedConfig.templateMatchingOMRHelperConfig.omrCropper.config.setImage(matImage)
+			loadedConfig.templateMatchingOMRHelperConfig.setTemplate(circleTemplateLoader)
+
+			val contourOMRHelper = ContourOMRHelper(loadedConfig.contourOMRHelperConfig)
+			val templateMatchingOMRHelper =
+				TemplateMatchingOMRHelper(loadedConfig.templateMatchingOMRHelperConfig)
+
+			val result: MutableMap<OMRSection, Int?> = mutableMapOf()
 
-			// TODO: Process the raw image using OMRHelper
-			data.annotatedImage = annotatedImage
+			for (section in OMRSection.entries) {
+				try {
+					result[section] = contourOMRHelper.detect(section)
+				} catch (e: OMRHelper.DetectionError) {
+					try {
+						result[section] = templateMatchingOMRHelper.detect(section)
+					} catch (e: OMRHelper.DetectionError) {
+						result[section] = null
+					}
+				}
+			}
+
+			val pageContent = loadedConfig.contents
+			val stringKeyResult = mutableMapOf<String, Int?>()
+
+			result.let {
+				for ((section, value) in it) {
+					stringKeyResult[pageContent[section]!!] = value
+
+					annotatedImage = ImageAnnotationHelper.annotateOMR(
+						annotatedImage,
+						contourOMRHelper.getSectionPosition(section),
+						value
+					)
+					Log.d("Result", "${pageContent[section]}: $value")
+				}
+			}
+
+			data.data = stringKeyResult
+			data.timestamp = Instant.now()
+
+			// TODO: annotate omr result
+			val annotatedImageBitmap =
+				Bitmap.createBitmap(
+					annotatedImage.width(),
+					annotatedImage.height(),
+					Bitmap.Config.ARGB_8888,
+				)
+			Utils.matToBitmap(annotatedImage, annotatedImageBitmap)
+			data.annotatedImage = annotatedImageBitmap
 			_data.value = data
 		}
 	}
+
+	fun resetState() {
+		_data.value = null
+	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/data/view_models/PreviewViewModel.kt b/app/src/main/java/com/k2_9/omrekap/data/view_models/PreviewViewModel.kt
index ec0e7a301887f280044a1e1f9f6c5a234b9b1cbc..89451f9810cb009b0616a64bfe160e24281fcf3e 100644
--- a/app/src/main/java/com/k2_9/omrekap/data/view_models/PreviewViewModel.kt
+++ b/app/src/main/java/com/k2_9/omrekap/data/view_models/PreviewViewModel.kt
@@ -8,6 +8,7 @@ import androidx.lifecycle.viewModelScope
 import com.k2_9.omrekap.data.models.ImageSaveData
 import com.k2_9.omrekap.utils.PreprocessHelper
 import kotlinx.coroutines.launch
+import java.time.Instant
 
 class PreviewViewModel : ViewModel() {
 	private val _data = MutableLiveData<ImageSaveData>()
@@ -15,7 +16,7 @@ class PreviewViewModel : ViewModel() {
 
 	fun preprocessImage(img: Bitmap) {
 		viewModelScope.launch {
-			val data = ImageSaveData(img, img, mapOf())
+			val data = ImageSaveData(img, img, mapOf(), Instant.now())
 			_data.value = PreprocessHelper.preprocessImage(data)
 		}
 	}
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/AprilTagHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/AprilTagHelper.kt
index 4f1e9f8fc661413d40f00f86ebd4c9e2fa774bbd..ecd7aac2e6f4ad0d645ecfe2496faae0f671195f 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/AprilTagHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/AprilTagHelper.kt
@@ -66,7 +66,7 @@ object AprilTagHelper {
 				"with corners at: (${cornerPoints[0, 0][0]},${cornerPoints[0, 0][1]}), " +
 					"(${cornerPoints[0, 1][0]},${cornerPoints[0, 1][1]}) " +
 					"(${cornerPoints[0, 2][0]},${cornerPoints[0, 2][1]}) " +
-					"(${cornerPoints[0, 3][0]},${cornerPoints[0, 3][1]})"
+					"(${cornerPoints[0, 3][0]},${cornerPoints[0, 3][1]})",
 			)
 			idList.add(id)
 		}
@@ -74,19 +74,11 @@ object AprilTagHelper {
 		return (idList to corners)
 	}
 
-	fun annotateImage(imageBitmap: Bitmap): Bitmap {
+	fun annotateImage(imageBitmap: Bitmap): Mat {
 		val res = getAprilTagId(imageBitmap)
 		val cornerPoints = res.second
 		val ids = (res.first)[0]
-		val annotatedImageMat =
-			ImageAnnotationHelper.annotateAprilTag(prepareImage(imageBitmap), cornerPoints, ids)
-		val annotatedImageBitmap = Bitmap.createBitmap(
-			annotatedImageMat.width(),
-			annotatedImageMat.height(),
-			Bitmap.Config.ARGB_8888
-		)
-		Utils.matToBitmap(annotatedImageMat, annotatedImageBitmap)
-		return annotatedImageBitmap
+		return ImageAnnotationHelper.annotateAprilTag(prepareImage(imageBitmap), cornerPoints, ids)
 	}
 
 	private fun prepareDetector(detectorDictionary: Dictionary): ArucoDetector {
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/CropHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/CropHelper.kt
index b48ee52ec2e6d6f6f8825d511bace2516a6cf13d..1ff85ab9f1d9b408bcd941ce7794f09f61c50aed 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/CropHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/CropHelper.kt
@@ -33,7 +33,7 @@ object CropHelper {
 		Utils.bitmapToMat(patternBitmap, cv8uc4pattern)
 		cvtColor(cv8uc4pattern, this.pattern, COLOR_BGR2GRAY)
 
-		PreprocessHelper.preprocessPattern(this.pattern)
+		this.pattern = PreprocessHelper.preprocessPattern(this.pattern)
 	}
 
 	fun detectCorner(img: Mat): CornerPoints {
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/ImageAnnotationHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/ImageAnnotationHelper.kt
index bed44fbe62849938b690da790792980cc049e30a..7ae0919b1ac5c1853ee617ee699afc7463c54d2e 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/ImageAnnotationHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/ImageAnnotationHelper.kt
@@ -27,6 +27,8 @@ object ImageAnnotationHelper {
 		id: String,
 	): Mat {
 		val imgWithAnnotations = img.clone()
+		// Change image to color
+		Imgproc.cvtColor(img, imgWithAnnotations, Imgproc.COLOR_GRAY2BGR)
 		if (id.isNotEmpty()) {
 			// points -> list<Point*s*>, inside list of points are corners of the detector
 			val points =
@@ -39,34 +41,36 @@ object ImageAnnotationHelper {
 					}
 					points
 				}
-
 			// Draw ID and bounding box
+			val originalPoint = points[0][0]
+			val drawnPoint = Point(originalPoint.x - 30, originalPoint.y - 40.0)
+
 			Imgproc.putText(
 				imgWithAnnotations,
 				id,
-				points[0][0],
+				drawnPoint,
 				Imgproc.FONT_HERSHEY_SIMPLEX,
-				1.0,
+				0.5,
 				Scalar(0.0, 255.0, 0.0),
-				5
+				1,
 			)
 			Imgproc.polylines(
 				imgWithAnnotations,
 				listOf(MatOfPoint(*points[0].toTypedArray())),
 				true,
 				Scalar(0.0, 255.0, 0.0),
-				5
+				1,
 			)
 		} else {
 			val topLeft = Point(cornerPoints[0].get(0, 0)[0], cornerPoints[0].get(1, 0)[0])
 			Imgproc.putText(
 				imgWithAnnotations,
-				"April Tag Not Detected",
+				"Not Detected",
 				topLeft,
 				Imgproc.FONT_HERSHEY_SIMPLEX,
 				1.0,
 				Scalar(0.0, 255.0, 0.0),
-				5
+				5,
 			)
 		}
 		return imgWithAnnotations
@@ -78,6 +82,8 @@ object ImageAnnotationHelper {
 		contourNumber: Int,
 	): Mat {
 		val imgWithAnnotations = img.clone()
+		// Change image to color
+		Imgproc.cvtColor(img, imgWithAnnotations, Imgproc.COLOR_GRAY2BGR)
 		for (rect in cornerPoints) {
 			Imgproc.rectangle(imgWithAnnotations, rect.tl(), rect.br(), Scalar(0.0, 255.0, 0.0), 1)
 		}
@@ -114,4 +120,43 @@ object ImageAnnotationHelper {
 		)
 		return imgWithAnnotations
 	}
+
+	fun annotateOMR(
+		img: Mat,
+		section: Rect,
+		result: Int?,
+	): Mat {
+		val imgWithAnnotations = img.clone()
+
+		// Draw text on the image
+		if (result == null) {
+			Imgproc.putText(
+				imgWithAnnotations,
+				"Not Detected",
+				Point(section.x.toDouble() - 13.0, section.y.toDouble() - 20.0),
+				Imgproc.FONT_HERSHEY_SIMPLEX,
+				0.5,
+				Scalar(0.0, 255.0, 0.0),
+				1,
+			)
+		} else {
+			Imgproc.putText(
+				imgWithAnnotations,
+				"$result",
+				Point(section.x.toDouble() + 50.0, section.y.toDouble() - 10.0),
+				Imgproc.FONT_HERSHEY_SIMPLEX,
+				1.0,
+				Scalar(0.0, 255.0, 0.0),
+				2,
+			)
+		}
+		Imgproc.rectangle(
+			imgWithAnnotations,
+			section.tl(),
+			section.br(),
+			Scalar(0.0, 255.0, 0.0),
+			2,
+		)
+		return imgWithAnnotations
+	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/PreprocessHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/PreprocessHelper.kt
index 827b27a7e3d25adda0cdf6a5c4ac716c88159405..6858fea2f2763f3614b355552d129cb58e959607 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/PreprocessHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/PreprocessHelper.kt
@@ -7,10 +7,11 @@ import org.opencv.core.Core
 import org.opencv.core.Mat
 import org.opencv.core.Size
 import org.opencv.imgproc.Imgproc
+import java.time.Instant
 
 object PreprocessHelper {
-	private const val FINAL_WIDTH = 540.0
-	private const val FINAL_HEIGHT = 960.0
+	private const val FINAL_WIDTH = 900.0
+	private const val FINAL_HEIGHT = 1600.0
 
 	fun preprocessImage(data: ImageSaveData): ImageSaveData {
 		// Initialize Mats
@@ -28,7 +29,6 @@ object PreprocessHelper {
 		val cornerPoints = CropHelper.detectCorner(mainImageResult)
 
 		// Annotate annotated image
-		// TODO: Call function to annotate image
 		annotatedImageResult = ImageAnnotationHelper.annotateCorner(annotatedImageResult, cornerPoints)
 
 		// Crop both images
@@ -46,31 +46,26 @@ object PreprocessHelper {
 		Utils.matToBitmap(mainImageResult, mainImageBitmap)
 		Utils.matToBitmap(annotatedImageResult, annotatedImageBitmap)
 
-		return ImageSaveData(mainImageBitmap, annotatedImageBitmap, data.data)
+		return ImageSaveData(mainImageBitmap, annotatedImageBitmap, data.data, Instant.now())
 	}
 
 	private fun preprocessMat(img: Mat): Mat {
-		return img.apply {
-			resizeMat(this)
-			normalize(this)
-		}
+		return resizeMat(img)
 	}
 
 	fun preprocessPattern(img: Mat): Mat {
-		return img.apply {
-			normalize(this)
-		}
+		return normalize(img)
 	}
 
 	private fun resizeMat(img: Mat): Mat {
-		val resizedImg = Mat()
-		Imgproc.resize(img, resizedImg, Size(FINAL_WIDTH, FINAL_HEIGHT))
+		val resizedImg = Mat(Size(FINAL_WIDTH, FINAL_HEIGHT), img.type())
+		Imgproc.resize(img, resizedImg, Size(FINAL_WIDTH, FINAL_HEIGHT), 0.0, 0.0, Imgproc.INTER_CUBIC)
 		return resizedImg
 	}
 
 	private fun normalize(img: Mat): Mat {
 		val normalizedImg = Mat()
-		Core.normalize(img, normalizedImg)
+		Core.normalize(img, normalizedImg, 0.0, 255.0, Core.NORM_MINMAX)
 		return normalizedImg
 	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/SaveHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/SaveHelper.kt
index 7d22fc16b18f8713c1caee7dc6d2cb0e228149d2..793eec738fbf85cd17276948700c49df932fc3c7 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/SaveHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/SaveHelper.kt
@@ -26,10 +26,10 @@ object SaveHelper {
 	) {
 		val folderName: String = generateFolderName()
 
-//		TODO: Uncomment after implemented
-//		if (data.data.isEmpty()) {
-//			throw RuntimeException("Image has not been processed yet")
-//		}
+// 		TODO: Uncomment after implemented
+// 		if (data.data.isEmpty()) {
+// 			throw RuntimeException("Image has not been processed yet")
+// 		}
 
 		if (data.rawImage.width <= 0 || data.rawImage.height <= 0) {
 			throw RuntimeException("The raw image bitmap is empty")
@@ -42,6 +42,8 @@ object SaveHelper {
 		withContext(Dispatchers.IO) {
 			saveImage(context, data.rawImage, folderName, "raw_image.jpg")
 			saveImage(context, data.annotatedImage, folderName, "annotated_image.jpg")
+
+			// TODO: handle when data is null, that is detection failed for OMR
 			saveJSON(context, data.data, folderName, "data.json")
 		}
 	}
@@ -79,7 +81,7 @@ object SaveHelper {
 
 	private fun saveJSON(
 		context: Context,
-		data: Map<String, Int>,
+		data: Map<String, Int?>,
 		folderName: String,
 		fileName: String,
 	) {
@@ -148,7 +150,7 @@ object SaveHelper {
 	}
 
 	private fun saveJSONBeforeAndroidQ(
-		data: Map<String, Int>,
+		data: Map<String, Int?>,
 		folderName: String,
 		fileName: String,
 	) {
@@ -174,7 +176,7 @@ object SaveHelper {
 	@RequiresApi(Build.VERSION_CODES.Q)
 	private fun saveJSONAndroidQandAbove(
 		context: Context,
-		data: Map<String, Int>,
+		data: Map<String, Int?>,
 		folderName: String,
 		fileName: String,
 	) {
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/omr/ContourOMRHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/omr/ContourOMRHelper.kt
index d155d1785fb7a25355f42e90f6d87e1f528ce140..b5a4d9c47de76e4a1c7c9d19bdbafd2175a0e077 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/omr/ContourOMRHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/omr/ContourOMRHelper.kt
@@ -10,6 +10,7 @@ import org.opencv.core.Core
 import org.opencv.core.CvType
 import org.opencv.core.Mat
 import org.opencv.core.MatOfPoint
+import org.opencv.core.Rect
 import org.opencv.core.Scalar
 import org.opencv.imgproc.Imgproc
 
@@ -24,21 +25,35 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 		return ContourInfo(Pair(centerX, centerY), Pair(rect.width, rect.height))
 	}
 
-	private fun getContourInfo(filledContours: List<Mat>): List<ContourInfo?> {
-		val contourInfos = mutableListOf<ContourInfo?>()
-		val sortedContours = filledContours.sortedBy { Imgproc.boundingRect(it).x }
+	private fun getContourInfo(
+		filledContours: List<Mat>,
+		filledIntensities: List<Int>,
+	): List<ContourInfo?> {
+		val contourInfos = mutableListOf<ContourInfo>()
 
-		// Sort the filled contours from left to right and get the center and size of each contour
+		// Zip filledContours with filledIntensities
+		val contoursWithIntensities = filledContours.zip(filledIntensities)
+
+		// Sort contours and intensities based on the x-coordinate of bounding rectangles
+		val sortedContoursWithIntensities = contoursWithIntensities.sortedBy { (contour, _) -> Imgproc.boundingRect(contour).x }
+
+		// Unzip sorted contours and intensities
+		val (sortedContours, sortedIntensities) = sortedContoursWithIntensities.unzip()
+
+		// Get contour info for each sorted contour
 		for (contour in sortedContours) {
 			contourInfos.add(createContourInfo(contour))
 		}
-		return filterContourInfos(contourInfos)
+
+		// Filter contour infos with sorted intensities
+		return filterContourInfos(contourInfos, sortedIntensities.map { it.toDouble() })
 	}
 
 	private fun predictForFilledCircle(contours: List<MatOfPoint>): Int {
 		// Predict the number based on the filled circle contours
 
 		val filledContours = mutableListOf<Mat>()
+		val filledIntensities = mutableListOf<Int>()
 
 		for (contour in contours) {
 			val mask = Mat.zeros(currentSectionBinary!!.size(), CvType.CV_8UC1)
@@ -61,10 +76,16 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 				percentageDarkPixels >= config.darkPercentageThreshold
 			) {
 				filledContours.add(contour)
+				filledIntensities.add(totalIntensity)
 			}
 		}
 
-		val contourInfos = getContourInfo(filledContours)
+		val contourInfos = getContourInfo(filledContours, filledIntensities)
+
+		if (contourInfos.size != 3) {
+			throw DetectionError("Failed to detect 3 filled circle")
+		}
+
 		return contourInfosToNumbers(contourInfos)
 	}
 
@@ -118,10 +139,13 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 		// Loop through each column
 		for (col in 0 until 3) {
 			// Get contours for the current column and sort by rows
-			val colContours = contoursSorted.subList(col * 10, (col + 1) * 10)
-				.sortedBy { Imgproc.boundingRect(it).y }
+			val colContours =
+				contoursSorted.subList(col * 10, (col + 1) * 10)
+					.sortedBy { Imgproc.boundingRect(it).y }
 
-			val darkestRow = getDarkestRow(colContours)
+			var darkestRow = getDarkestRow(colContours)
+
+			darkestRow = darkestRow ?: 0
 
 			// Append the darkest row for the current column to the list
 			darkestRows.add(darkestRow)
@@ -144,7 +168,7 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 			contours,
 			hierarchy,
 			Imgproc.RETR_EXTERNAL,
-			Imgproc.CHAIN_APPROX_SIMPLE
+			Imgproc.CHAIN_APPROX_SIMPLE,
 		)
 
 		// Initialize a list to store filtered contours
@@ -164,7 +188,7 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 			} else {
 				Log.d(
 					"ContourOMRHelper",
-					"Contour with aspect ratio $ar and size ${rect.width} x ${rect.height} filtered out"
+					"Contour with aspect ratio $ar and size ${rect.width} x ${rect.height} filtered out",
 				)
 			}
 		}
@@ -172,27 +196,18 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 		return filteredContours
 	}
 
-	fun annotateImage(contourNumber: Int): Bitmap {
-		var annotatedImg = currentSectionGray!!.clone()
-		val contours = getAllContours()
-		annotatedImg =
-			ImageAnnotationHelper.annotateContourOMR(annotatedImg, contours, contourNumber)
-
-		val annotatedImageBitmap = Bitmap.createBitmap(
-			annotatedImg.width(),
-			annotatedImg.height(),
-			Bitmap.Config.ARGB_8888
-		)
-		Utils.matToBitmap(annotatedImg, annotatedImageBitmap)
-		return annotatedImageBitmap
-	}
-
 	override fun detect(section: OMRSection): Int {
 		val omrSectionImage = config.omrCropper.crop(section)
 
-		// Convert image to grayscale
-		val gray = Mat()
-		Imgproc.cvtColor(omrSectionImage, gray, Imgproc.COLOR_BGR2GRAY)
+		// Convert image to grayscale if it is not
+		val gray =
+			if (omrSectionImage.channels() != 1) {
+				val grayImageMat = Mat()
+				Imgproc.cvtColor(omrSectionImage, grayImageMat, Imgproc.COLOR_BGR2GRAY)
+				grayImageMat
+			} else {
+				omrSectionImage
+			}
 
 		// Apply binary thresholding
 		val binary = Mat()
@@ -201,7 +216,7 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 			binary,
 			0.0,
 			255.0,
-			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE
+			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE,
 		)
 
 		// Update states
@@ -213,7 +228,7 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 		return if (contours.size != 30) {
 			Log.d(
 				"ContourOMRHelper",
-				"Some circles are not detected, considering only filled circles"
+				"Some circles are not detected, considering only filled circles",
 			)
 			predictForFilledCircle(contours)
 		} else {
@@ -221,4 +236,26 @@ class ContourOMRHelper(private val config: ContourOMRHelperConfig) : OMRHelper(c
 			compareAll(contours)
 		}
 	}
+
+	// Get Section Position For Annotating Purpose
+	fun getSectionPosition(section: OMRSection): Rect {
+		return config.omrCropper.sectionPosition(section)
+	}
+
+	// Annotating Image For Testing Purpose
+	fun annotateImage(contourNumber: Int): Bitmap {
+		var annotatedImg = currentSectionGray!!.clone()
+		val contours = getAllContours()
+		annotatedImg =
+			ImageAnnotationHelper.annotateContourOMR(annotatedImg, contours, contourNumber)
+
+		val annotatedImageBitmap =
+			Bitmap.createBitmap(
+				annotatedImg.width(),
+				annotatedImg.height(),
+				Bitmap.Config.ARGB_8888,
+			)
+		Utils.matToBitmap(annotatedImg, annotatedImageBitmap)
+		return annotatedImageBitmap
+	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRConfigDetector.kt b/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRConfigDetector.kt
index 8f542416cee9391810d19ce1dee6b497e509bae2..81b7e87d87cc409b0b7ba48873deb4f9c25c5aad 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRConfigDetector.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRConfigDetector.kt
@@ -23,10 +23,11 @@ object OMRConfigDetector {
 	 */
 	fun loadConfiguration(context: Context) {
 		if (!this::loadedConfig.isInitialized) {
-			job = CoroutineScope(Dispatchers.IO).launch {
-				loadedConfig = OMRConfigRepository.loadConfigurations(context)
-					?: throw Exception("Failed to load OMR Configuration!")
-			}
+			job =
+				CoroutineScope(Dispatchers.IO).launch {
+					loadedConfig = OMRConfigRepository.loadConfigurations(context)
+						?: throw Exception("Failed to load OMR Configuration!")
+				}
 		}
 	}
 
@@ -36,8 +37,7 @@ object OMRConfigDetector {
 	 * @return Triple of OMR configuration, the ID of the detected AprilTag,
 	 * and the image's tag corners that was used for configuration detector
 	 */
-	suspend fun detectConfiguration(imageMat: Mat):
-		Triple<OMRConfigurationParameter, String, Mat>? {
+	suspend fun detectConfiguration(imageMat: Mat): Triple<OMRConfigurationParameter, String, Mat>? {
 		job?.join().also { job = null }
 		val configs = loadedConfig.omrConfigs
 
@@ -54,7 +54,7 @@ object OMRConfigDetector {
 					} else {
 						Log.e(
 							"OMRConfigurationDetector",
-							"Multiple tags detected, unable to determine configuration"
+							"Multiple tags detected, unable to determine configuration",
 						)
 						result = null
 						break
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRHelper.kt
index fd34cc0a7f39512a6666b5a43c9ac333d1c02ff7..efdbb5eff0a190c90484b056296a60a0b4dc5840 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/omr/OMRHelper.kt
@@ -2,10 +2,35 @@ package com.k2_9.omrekap.utils.omr
 
 import com.k2_9.omrekap.data.configs.omr.OMRHelperConfig
 import com.k2_9.omrekap.data.configs.omr.OMRSection
+import kotlin.math.abs
 import kotlin.math.floor
 
 abstract class OMRHelper(private val config: OMRHelperConfig) {
-	data class ContourInfo(val center: Pair<Int, Int>, val size: Pair<Int, Int>)
+	class ContourInfo(val center: Pair<Int, Int>, val size: Pair<Int, Int>) {
+		fun isOverlapping(other: ContourInfo): Boolean {
+			return isColumnOverlapping(other) && isRowOverlapping(other)
+		}
+
+		fun isColumnOverlapping(other: ContourInfo): Boolean {
+			val x1 = center.first
+			val x2 = other.center.first
+			val w1 = size.first
+			val w2 = other.size.first
+
+			return abs(x1 - x2) * 2 < w1 + w2
+		}
+
+		fun isRowOverlapping(other: ContourInfo): Boolean {
+			val y1 = center.second
+			val y2 = other.center.second
+			val h1 = size.second
+			val h2 = other.size.second
+
+			return abs(y1 - y2) * 2 < h1 + h2
+		}
+	}
+
+	class DetectionError(message: String) : Exception(message)
 
 	protected fun getCombinedNumbers(numbers: List<Int>): Int {
 		// Combine the detected numbers into a single integer
@@ -13,9 +38,10 @@ abstract class OMRHelper(private val config: OMRHelperConfig) {
 	}
 
 	protected fun contourInfosToNumbers(contourInfos: List<ContourInfo?>): Int {
-		// TODO: consider gap height between circles
 		// Return the detected numbers based on the vertical position of the filled circles for each column
-		require(contourInfos.size == 3)
+		if (contourInfos.size != 3) {
+			throw DetectionError("Filled circles are not detected correctly")
+		}
 
 		val columnHeight = config.omrCropper.config.omrSectionSize.second // Define the column height based on your image
 
@@ -28,9 +54,7 @@ abstract class OMRHelper(private val config: OMRHelperConfig) {
 			} else {
 				// Detect number based on vertical position of the contour
 				val centerY = contourInfo.center.second
-				val h = contourInfo.size.second
-
-				val columnIndex = floor(((centerY.toDouble() - h.toDouble() / 2.0) / columnHeight.toDouble()) * 10).toInt()
+				val columnIndex = floor((centerY.toDouble() / columnHeight.toDouble()) * 10).toInt()
 
 				result.add(columnIndex)
 			}
@@ -38,11 +62,43 @@ abstract class OMRHelper(private val config: OMRHelperConfig) {
 		return getCombinedNumbers(result)
 	}
 
-	protected fun filterContourInfos(contourInfos: List<ContourInfo?>): List<ContourInfo?> {
-		// TODO: Handle when 1 column has more than 1 filled circle
-		// TODO: Handle when no filled circle for each column (assume that the number is 0, with null as representation of the ContourInfo)
+	protected fun filterContourInfos(
+		contourInfos: List<ContourInfo>,
+		filledIntensities: List<Double>,
+	): List<ContourInfo?> {
+		val mutableContourInfos = contourInfos.toMutableList()
+		val uniqueContourInfos = mutableListOf<ContourInfo?>()
+
+		// Group by overlapping contour infos and choose the one with the highest intensity
+		for (i in 0 until mutableContourInfos.size - 1) {
+			if (mutableContourInfos[i].isColumnOverlapping(mutableContourInfos[i + 1])) {
+				if (filledIntensities[i] > filledIntensities[i + 1]) {
+					mutableContourInfos[i + 1] = mutableContourInfos[i]
+				}
+				continue
+			} else {
+				uniqueContourInfos.add(mutableContourInfos[i])
+			}
+		}
+
+		if (mutableContourInfos.isNotEmpty()) {
+			uniqueContourInfos.add(mutableContourInfos.last())
+		}
+
+		assert(uniqueContourInfos.size <= 3)
+
+		val sectionWidth = config.omrCropper.config.omrSectionSize.first
+		val finalContourInfos = arrayOfNulls<ContourInfo>(3)
+
+		uniqueContourInfos.forEach { contourInfo ->
+			if (contourInfo != null) {
+				val centerX = contourInfo.center.first
+				val columnIndex = floor((centerX.toDouble() / sectionWidth) * 3).toInt()
+				finalContourInfos[columnIndex] = contourInfo
+			}
+		}
 
-		return contourInfos
+		return finalContourInfos.toList()
 	}
 
 	abstract fun detect(section: OMRSection): Int
diff --git a/app/src/main/java/com/k2_9/omrekap/utils/omr/TemplateMatchingOMRHelper.kt b/app/src/main/java/com/k2_9/omrekap/utils/omr/TemplateMatchingOMRHelper.kt
index d36e7ca5064e84aea973fc44323fcbe461350799..d6a481447dd3c0196607dc6daf28611fca841452 100644
--- a/app/src/main/java/com/k2_9/omrekap/utils/omr/TemplateMatchingOMRHelper.kt
+++ b/app/src/main/java/com/k2_9/omrekap/utils/omr/TemplateMatchingOMRHelper.kt
@@ -15,9 +15,7 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 	private var currentSectionGray: Mat? = null
 	private var currentSectionBinary: Mat? = null
 
-	private fun getMatchRectangles(): List<Rect> {
-		// TODO: fix algorithm bug
-
+	private fun getMatchRectangles(): List<Pair<Rect, Double>> {
 		// Load the template image
 		val template = config.template
 
@@ -28,7 +26,7 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 			templateBinary,
 			0.0,
 			255.0,
-			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE
+			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE,
 		)
 
 		// Perform template matching
@@ -37,7 +35,7 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 			currentSectionBinary,
 			templateBinary,
 			result,
-			Imgproc.TM_CCOEFF_NORMED
+			Imgproc.TM_CCOEFF_NORMED,
 		)
 
 		// Set a threshold for template matching result
@@ -58,29 +56,30 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 		}
 
 		// Get the bounding rectangles for the matched locations
-		val matchedRectangles = ArrayList<Rect>()
+		val matchedRectangles = ArrayList<Pair<Rect, Double>>()
 		for (point in locations) {
 			val locX = point.x.toInt()
 			val locY = point.y.toInt()
-			val rect = Rect(locX, locY, template.width(), template.height())
-			matchedRectangles.add(rect)
+			val rect = Rect(locX, locY, template!!.width(), template.height())
+			matchedRectangles.add(Pair(rect, result.get(locY, locX)[0]))
 		}
 
 		return matchedRectangles
 	}
 
-	private fun getContourInfos(matchedRectangles: List<Rect>): List<ContourInfo?> {
+	private fun getContourInfos(matchedRectangles: List<Pair<Rect, Double>>): Pair<List<ContourInfo>, List<Double>> {
 		// Initialize a set to keep track of added rectangles
 		val addedRectangles = mutableSetOf<Rect>()
 
 		val contourInfos = mutableListOf<ContourInfo>()
+		val similarities = matchedRectangles.map { it.second }
 
 		// Iterate through the rectangles
 		for (rect in matchedRectangles) {
-			val x = rect.x
-			val y = rect.y
-			val w = rect.width
-			val h = rect.height
+			val x = rect.first.x
+			val y = rect.first.y
+			val w = rect.first.width
+			val h = rect.first.height
 
 			// Calculate the center of the rectangle
 			val centerX = x + w / 2
@@ -104,20 +103,27 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 			}
 		}
 
-		// short by center_x
-		contourInfos.sortBy { it.center.first }
+		// Sort contourInfos by center_x
+		val sortedContourInfos = contourInfos.sortedBy { it.center.first }
+
+		// Zip sorted contourInfos with similarities
+		val zippedContourInfos = sortedContourInfos.zip(similarities)
 
-		return contourInfos.toList()
+		// Unzip zipped contourInfos to separate lists
+		val (sortedContours, sortedSimilarities) = zippedContourInfos.unzip()
+
+		return Pair(sortedContours, sortedSimilarities)
 	}
 
 	fun annotateImage(contourNumber: Int): Bitmap {
 		val annotatedImg = currentSectionGray!!.clone()
 		val matchedRectangles = getMatchRectangles()
-		val res = ImageAnnotationHelper.annotateTemplateMatchingOMR(
-			annotatedImg,
-			matchedRectangles,
-			contourNumber
-		)
+		val res =
+			ImageAnnotationHelper.annotateTemplateMatchingOMR(
+				annotatedImg,
+				matchedRectangles.map { it.first },
+				contourNumber,
+			)
 
 		// Convert the annotated Mat to Bitmap
 		val annotatedImageBitmap =
@@ -133,9 +139,15 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 	override fun detect(section: OMRSection): Int {
 		val omrSectionImage = config.omrCropper.crop(section)
 
-		// Convert image to grayscale
-		val gray = Mat()
-		Imgproc.cvtColor(omrSectionImage, gray, Imgproc.COLOR_BGR2GRAY)
+		// Convert image to grayscale if it is not
+		val gray =
+			if (omrSectionImage.channels() != 1) {
+				val grayImageMat = Mat()
+				Imgproc.cvtColor(omrSectionImage, grayImageMat, Imgproc.COLOR_BGR2GRAY)
+				grayImageMat
+			} else {
+				omrSectionImage
+			}
 
 		// Apply binary thresholding
 		val binary = Mat()
@@ -144,7 +156,7 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 			binary,
 			0.0,
 			255.0,
-			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE
+			Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_TRIANGLE,
 		)
 
 		// Update states
@@ -154,7 +166,11 @@ class TemplateMatchingOMRHelper(private val config: TemplateMatchingOMRHelperCon
 		val matchedRectangles = getMatchRectangles()
 
 		val contourInfos = getContourInfos(matchedRectangles)
-		val filteredContourInfos = filterContourInfos(contourInfos.toList())
+		val filteredContourInfos = filterContourInfos(contourInfos.first, contourInfos.second)
+
+		if (filteredContourInfos.size != 3) {
+			throw DetectionError("Failed to detect 3 filled circle")
+		}
 
 		return contourInfosToNumbers(filteredContourInfos.toList())
 	}
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/CameraActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/CameraActivity.kt
index a022ed336ee429da56682151d2a03b84293d7b52..82bf49afe7b51737f941b17a4d4d0cca4fce8390 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/CameraActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/CameraActivity.kt
@@ -32,11 +32,11 @@ import java.util.concurrent.Executors
 
 class CameraActivity : AppCompatActivity() {
 	companion object {
-		const val EXTRA_NAME_IMAGE_URI_STRING = "IMAGE_URI_STRING"
+		const val EXTRA_NAME_IS_FROM_RESULT = "IS_FROM_RESULT"
 		const val EXTRA_NAME_IS_FROM_CAMERA_RESULT = "IS_FROM_CAMERA_RESULT"
 	}
 
-	private var imageUriString: String? = null
+	private var isFromResult: Boolean = false
 	private var isFromCameraResult: Boolean = false
 
 	private lateinit var previewView: PreviewView
@@ -54,16 +54,15 @@ class CameraActivity : AppCompatActivity() {
 
 		val intent = Intent(this, newIntentClass)
 
-		intent.putExtra(ResultActivity.EXTRA_NAME_IMAGE_URI_STRING, imageUriString)
 		intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT or Intent.FLAG_ACTIVITY_SINGLE_TOP)
 		startActivity(intent)
 	}
 
 	private fun handleBackNavigation() {
-		if (imageUriString == null) {
-			onBackHome()
-		} else {
+		if (isFromResult) {
 			onBackResult()
+		} else {
+			onBackHome()
 		}
 	}
 
@@ -71,7 +70,7 @@ class CameraActivity : AppCompatActivity() {
 		super.onNewIntent(intent)
 
 		if (intent != null) {
-			imageUriString = intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
+			isFromResult = intent.getBooleanExtra(EXTRA_NAME_IS_FROM_RESULT, false)
 		}
 	}
 
@@ -79,7 +78,7 @@ class CameraActivity : AppCompatActivity() {
 		super.onCreate(savedInstanceState)
 		setContentView(R.layout.activity_camera)
 
-		imageUriString = intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
+		isFromResult = intent.getBooleanExtra(EXTRA_NAME_IS_FROM_RESULT, false)
 		isFromCameraResult = intent.getBooleanExtra(EXTRA_NAME_IS_FROM_CAMERA_RESULT, false)
 
 		// back navigation
@@ -182,7 +181,6 @@ class CameraActivity : AppCompatActivity() {
 				override fun onCaptureSuccess(image: ImageProxy) {
 					super.onCaptureSuccess(image)
 					playShutterSound()
-					freezeImage(image)
 					runOnUiThread {
 						Toast.makeText(this@CameraActivity, "Photo taken", Toast.LENGTH_SHORT).show()
 					}
@@ -191,6 +189,7 @@ class CameraActivity : AppCompatActivity() {
 							saveImageOnCache(image)
 						}
 					}
+					freezeImage(image)
 				}
 			},
 		)
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/ExpandImageActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/ExpandImageActivity.kt
index a83ac5d4062c19ba9f5b60d2e5a9ca5a7bf37408..9440a4bf20bc9bcd10db1a2bb47a40da5e317e5a 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/ExpandImageActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/ExpandImageActivity.kt
@@ -1,6 +1,6 @@
 package com.k2_9.omrekap.views.activities
 
-import android.net.Uri
+import android.graphics.BitmapFactory
 import android.os.Bundle
 import androidx.appcompat.app.AppCompatActivity
 import com.github.chrisbanes.photoview.PhotoView
@@ -21,7 +21,13 @@ class ExpandImageActivity : AppCompatActivity() {
 		val imageResource = intent.getStringExtra(EXTRA_NAME_IMAGE_RESOURCE)
 
 		if (imageResource != null) {
-			photoView.setImageURI(Uri.parse(imageResource))
+			val stream = openFileInput(imageResource)
+			val bitmap = BitmapFactory.decodeStream(stream)
+			stream.close()
+
+			photoView.setImageBitmap(bitmap)
+
+			deleteFile(imageResource)
 		} else {
 			// Retrieve the image resource ID from the intent
 			val drawableResource = intent.getIntExtra(EXTRA_NAME_DRAWABLE_RESOURCE, 0)
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/MainActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/MainActivity.kt
index 19e43edd1877166e248a1f943ea2f5052ebebab0..4301977c8b44690ea3cf8ff47e9b72f4c37d5484 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/MainActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/MainActivity.kt
@@ -55,7 +55,6 @@ abstract class MainActivity : AppCompatActivity() {
 
 		// Create an instance of your fragment
 		val fragment = getFragment(intent)
-
 		fragmentTransaction.replace(R.id.fragment_container_view, fragment)
 		fragmentTransaction.commit()
 	}
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/PreviewActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/PreviewActivity.kt
index 7b813002e490e17f39645210df990fac0cf0c467..240353e7fb36bdba05d31a7b1bf4440eb645ca60 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/PreviewActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/PreviewActivity.kt
@@ -21,7 +21,7 @@ import org.opencv.android.OpenCVLoader
 
 class PreviewActivity : AppCompatActivity() {
 	companion object {
-		const val EXTRA_NAME_IMAGE_URI_STRING = ResultActivity.EXTRA_NAME_IMAGE_URI_STRING
+		const val EXTRA_NAME_IMAGE_URI_STRING = "IMAGE_URI_STRING"
 		const val EXTRA_NAME_IS_RESET = ResultActivity.EXTRA_NAME_IS_RESET
 		const val EXTRA_NAME_IS_FROM_CAMERA = "IS_FROM_CAMERA"
 	}
@@ -48,7 +48,8 @@ class PreviewActivity : AppCompatActivity() {
 		}
 
 		val bitmapOptions = BitmapFactory.Options()
-		bitmapOptions.inPreferredConfig = Bitmap.Config.ARGB_8888
+		bitmapOptions.inPreferredConfig = Bitmap.Config.ALPHA_8
+		bitmapOptions.inScaled = false
 		val cornerPatternBitmap: Bitmap = BitmapFactory.decodeResource(resources, R.raw.corner_pattern, bitmapOptions)
 
 		CropHelper.loadPattern(cornerPatternBitmap)
@@ -78,6 +79,11 @@ class PreviewActivity : AppCompatActivity() {
 				newIntent.putExtras(extras)
 			}
 
+			newIntent.putExtra(
+				EXTRA_NAME_IS_RESET,
+				intent.extras?.getBoolean(EXTRA_NAME_IS_RESET, false) ?: false,
+			)
+
 			newIntent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT or Intent.FLAG_ACTIVITY_SINGLE_TOP)
 			finish()
 
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultActivity.kt
index fcdca172937c92f358445c6176915fe38f66a75b..d7433f255c0fcb91e307169ab2c9249d76b01d6c 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultActivity.kt
@@ -2,7 +2,6 @@ package com.k2_9.omrekap.views.activities
 
 import android.Manifest
 import android.content.Intent
-import android.graphics.Bitmap
 import android.net.Uri
 import android.os.Build
 import android.os.Bundle
@@ -13,6 +12,8 @@ import androidx.activity.viewModels
 import androidx.fragment.app.Fragment
 import androidx.lifecycle.Observer
 import androidx.lifecycle.lifecycleScope
+import com.k2_9.omrekap.R
+import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
 import com.k2_9.omrekap.data.models.ImageSaveData
 import com.k2_9.omrekap.data.view_models.ImageDataViewModel
 import com.k2_9.omrekap.utils.ImageSaveDataHolder
@@ -28,7 +29,6 @@ import org.opencv.android.OpenCVLoader
 
 abstract class ResultActivity : MainActivity() {
 	companion object {
-		const val EXTRA_NAME_IMAGE_URI_STRING = "IMAGE_URI_STRING"
 		const val EXTRA_NAME_IS_RESET = "IS_RESET"
 	}
 
@@ -36,42 +36,33 @@ abstract class ResultActivity : MainActivity() {
 	private var saveFileJob: Job? = null
 	private var startSaveJob: Boolean = false
 	private val omrHelperObserver =
-		Observer<ImageSaveData> { newValue ->
-			saveFile()
+		Observer<ImageSaveData?> { data ->
+			if (data == null) {
+				Log.e("MainActivity", "Data is null")
+				return@Observer
+			}
 
-//			TODO: save file when data is not empty after implemented
-//			if (newValue.data.isNotEmpty()) {
-//				saveFile()
-//			}
+			saveFile()
 		}
 
-	private lateinit var imageUriString: String
-	private lateinit var imageBitmap: Bitmap
 	private var isReset: Boolean = false // reset ViewModel for new OMR process
 	private var isCreated = false
 
 	private fun updateStates(intent: Intent) {
 		isReset = intent.getBooleanExtra(EXTRA_NAME_IS_RESET, false)
-		Log.d("RESET GA YA", isReset.toString())
-		val uriString =
-			intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
-				?: throw IllegalArgumentException("Image URI string is null")
-
-		imageUriString = uriString
 
 		if (isReset) {
-			// TODO: reset view model (perlu diskusi dulu tentang stop proses kalau ganti page)
-			Log.d("CREATED GA YA", isCreated.toString())
+			viewModel.resetState()
+
 			if (isCreated) {
 				setFragment(intent)
 			}
 		}
 
-		imageBitmap = SaveHelper.uriToBitmap(applicationContext, Uri.parse(imageUriString))
-
 		if (viewModel.data.value == null) {
-			viewModel.processImage(ImageSaveDataHolder.get())
 			viewModel.data.observe(this, omrHelperObserver)
+			val templateLoader = CircleTemplateLoader(applicationContext, R.raw.circle_template)
+			viewModel.processImage(ImageSaveDataHolder.get(), templateLoader)
 		}
 	}
 
@@ -95,13 +86,8 @@ abstract class ResultActivity : MainActivity() {
 	override fun getFragment(intent: Intent): Fragment {
 		val fragment = ResultPageFragment()
 
-		val uriString =
-			intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
-				?: throw IllegalArgumentException("Image URI string is null")
-
 		val arguments =
 			Bundle().apply {
-				putString(ResultPageFragment.ARG_NAME_IMAGE_URI_STRING, uriString)
 			}
 
 		// Set the arguments for the fragment
@@ -137,7 +123,7 @@ abstract class ResultActivity : MainActivity() {
 			PermissionHelper.requirePermission(
 				this,
 				Manifest.permission.WRITE_EXTERNAL_STORAGE,
-				false
+				false,
 			) {}
 		}
 
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromCameraActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromCameraActivity.kt
index 4213381b9b1bd9af2a8a2e0551456bf6a1386f36..92adac23095f6174c64902b4d42563d71529be98 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromCameraActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromCameraActivity.kt
@@ -1,13 +1,11 @@
 package com.k2_9.omrekap.views.activities
 
 import android.content.Intent
-import android.util.Log
 
 class ResultFromCameraActivity : ResultActivity() {
 	private fun onBackCamera() {
 		val intent = Intent(this, CameraActivity::class.java)
 		intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT or Intent.FLAG_ACTIVITY_SINGLE_TOP)
-		Log.d("wtf is this", "onBackCamera: ")
 		finish()
 		startActivity(intent)
 	}
@@ -17,12 +15,8 @@ class ResultFromCameraActivity : ResultActivity() {
 	}
 
 	override fun getCameraIntent(): Intent {
-		val uriString =
-			intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
-				?: throw IllegalArgumentException("Image URI string is null")
-
 		val intent = Intent(this, CameraActivity::class.java)
-		intent.putExtra(CameraActivity.EXTRA_NAME_IMAGE_URI_STRING, uriString)
+		intent.putExtra(CameraActivity.EXTRA_NAME_IS_FROM_RESULT, true)
 		intent.putExtra(CameraActivity.EXTRA_NAME_IS_FROM_CAMERA_RESULT, true)
 		intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT or Intent.FLAG_ACTIVITY_SINGLE_TOP)
 		return intent
diff --git a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromGalleryActivity.kt b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromGalleryActivity.kt
index 29baa0896d016fd98041b8ff410eaae30e6251cb..44030aad57470f9c49e6a6b7f91ef95fbe5d7321 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromGalleryActivity.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/activities/ResultFromGalleryActivity.kt
@@ -16,12 +16,8 @@ class ResultFromGalleryActivity : ResultActivity() {
 	}
 
 	override fun getCameraIntent(): Intent {
-		val uriString =
-			intent.getStringExtra(EXTRA_NAME_IMAGE_URI_STRING)
-				?: throw IllegalArgumentException("Image URI string is null")
-
 		val intent = Intent(this, CameraActivity::class.java)
-		intent.putExtra(CameraActivity.EXTRA_NAME_IMAGE_URI_STRING, uriString)
+		intent.putExtra(CameraActivity.EXTRA_NAME_IS_FROM_RESULT, true)
 		intent.putExtra(CameraActivity.EXTRA_NAME_IS_FROM_CAMERA_RESULT, false)
 		intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT or Intent.FLAG_ACTIVITY_SINGLE_TOP)
 		return intent
diff --git a/app/src/main/java/com/k2_9/omrekap/views/adapters/ResultAdapter.kt b/app/src/main/java/com/k2_9/omrekap/views/adapters/ResultAdapter.kt
index 44c70928f9c6ca6c59e6a39b695c21f388b09b43..1a55f56e1374778354f30123b35af8f5d785050d 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/adapters/ResultAdapter.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/adapters/ResultAdapter.kt
@@ -3,16 +3,33 @@ import android.view.LayoutInflater
 import android.view.View
 import android.view.ViewGroup
 import android.widget.TextView
+import androidx.recyclerview.widget.DiffUtil
+import androidx.recyclerview.widget.ListAdapter
 import androidx.recyclerview.widget.RecyclerView
 import com.k2_9.omrekap.R
 
-class ResultAdapter(private val dataList: List<Pair<String, String>>) :
-	RecyclerView.Adapter<ResultAdapter.ResultViewHolder>() {
+class ResultAdapter : ListAdapter<Pair<String, String>, ResultAdapter.ResultViewHolder>(ResultDiffCallback()) {
 	inner class ResultViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) {
 		val keyTextView: TextView = itemView.findViewById(R.id.result_candidate_text)
 		val valueTextView: TextView = itemView.findViewById(R.id.result_count_text)
 	}
 
+	class ResultDiffCallback : DiffUtil.ItemCallback<Pair<String, String>>() {
+		override fun areItemsTheSame(
+			oldItem: Pair<String, String>,
+			newItem: Pair<String, String>,
+		): Boolean {
+			return oldItem == newItem
+		}
+
+		override fun areContentsTheSame(
+			oldItem: Pair<String, String>,
+			newItem: Pair<String, String>,
+		): Boolean {
+			return oldItem == newItem
+		}
+	}
+
 	override fun onCreateViewHolder(
 		parent: ViewGroup,
 		viewType: Int,
@@ -27,12 +44,8 @@ class ResultAdapter(private val dataList: List<Pair<String, String>>) :
 		holder: ResultViewHolder,
 		position: Int,
 	) {
-		val item = dataList[position]
+		val item = getItem(position)
 		holder.keyTextView.text = item.first
 		holder.valueTextView.text = item.second
 	}
-
-	override fun getItemCount(): Int {
-		return dataList.size
-	}
 }
diff --git a/app/src/main/java/com/k2_9/omrekap/views/fragments/ResultPageFragment.kt b/app/src/main/java/com/k2_9/omrekap/views/fragments/ResultPageFragment.kt
index 03fcfb83762e7499f3b0177583edbaa3ba4c902c..ebe8804b525a40f1d4b0fc65cf82c886a3ce2a7b 100644
--- a/app/src/main/java/com/k2_9/omrekap/views/fragments/ResultPageFragment.kt
+++ b/app/src/main/java/com/k2_9/omrekap/views/fragments/ResultPageFragment.kt
@@ -1,7 +1,8 @@
 package com.k2_9.omrekap.views.fragments
 
+import android.content.Context
 import android.content.Intent
-import android.net.Uri
+import android.graphics.Bitmap
 import android.os.Build
 import android.os.Bundle
 import android.view.LayoutInflater
@@ -10,14 +11,22 @@ import android.view.ViewGroup
 import android.widget.ImageButton
 import android.widget.ImageView
 import android.widget.ProgressBar
+import android.widget.TextView
 import androidx.cardview.widget.CardView
 import androidx.fragment.app.Fragment
+import androidx.lifecycle.ViewModelProvider
 import androidx.recyclerview.widget.LinearLayoutManager
 import androidx.recyclerview.widget.RecyclerView
 import com.k2_9.omrekap.R
+import com.k2_9.omrekap.data.view_models.ImageDataViewModel
+import com.k2_9.omrekap.utils.ImageSaveDataHolder
 import com.k2_9.omrekap.views.activities.ExpandImageActivity
 import com.k2_9.omrekap.views.activities.HomeActivity
 import com.k2_9.omrekap.views.adapters.ResultAdapter
+import java.io.FileOutputStream
+import java.time.Instant
+import java.time.ZoneId
+import java.time.format.DateTimeFormatter
 
 /**
  * A simple [Fragment] subclass.
@@ -26,14 +35,18 @@ import com.k2_9.omrekap.views.adapters.ResultAdapter
  */
 class ResultPageFragment : Fragment() {
 	companion object {
-		const val ARG_NAME_IS_FROM_CAMERA = "IS_FROM_CAMERA"
-		const val ARG_NAME_IMAGE_URI_STRING = "IMAGE_URI_STRING"
+		const val BITMAP_FILE_NAME = "temp.png"
 	}
 
-	private var imageUriString: String? = null
+	private var imageBitmap: Bitmap? = null
 
 	private lateinit var recyclerView: RecyclerView
 	private lateinit var resultAdapter: ResultAdapter
+	private lateinit var documentImageView: ImageView
+	private lateinit var timestampTextView: TextView
+	private lateinit var failureTextView: TextView
+
+	private lateinit var viewModel: ImageDataViewModel
 
 	private fun onHomeButtonClick() {
 		val intent = Intent(context, HomeActivity::class.java)
@@ -43,18 +56,58 @@ class ResultPageFragment : Fragment() {
 		startActivity(intent)
 	}
 
+	private fun timestampToString(timestamp: Instant): String {
+		val formatter = DateTimeFormatter.ofPattern("dd/MM/yyyy HH:mm")
+		return formatter.format(timestamp.atZone(ZoneId.systemDefault()))
+	}
+
+	private fun showFailureText() {
+		failureTextView.visibility = View.VISIBLE
+		recyclerView.visibility = View.GONE
+	}
+
+	private fun hideFailureText() {
+		failureTextView.visibility = View.GONE
+		recyclerView.visibility = View.VISIBLE
+	}
+
 	override fun onCreate(savedInstanceState: Bundle?) {
 		super.onCreate(savedInstanceState)
 
-		// Retrieve the arguments
-		val args = arguments
+		viewModel = ViewModelProvider(requireActivity())[ImageDataViewModel::class.java]
+
+		imageBitmap = ImageSaveDataHolder.get().annotatedImage
 
-		// Check if arguments are not null and retrieve values
-		if (args != null) {
-			imageUriString = args.getString(ARG_NAME_IMAGE_URI_STRING)
-			if (imageUriString == null) {
-				throw IllegalArgumentException("Image URI string is null")
+		viewModel.data.observe(this) {
+			if (it == null) {
+				return@observe
 			}
+
+			val dataList = it.data.toList()
+
+			if (dataList.isEmpty()) {
+				showFailureText()
+			} else {
+				hideFailureText()
+
+				val result =
+					dataList.map { (key, value) ->
+						key to (value?.toString() ?: "undetected")
+					}
+
+				resultAdapter.submitList(result)
+			}
+
+			val annotatedImage = it.annotatedImage
+
+			// change expand image
+			imageBitmap = annotatedImage
+
+			// change result image
+			documentImageView.setImageBitmap(annotatedImage)
+
+			// change timestamp
+			timestampTextView.text = timestampToString(it.timestamp)
 		}
 	}
 
@@ -68,13 +121,7 @@ class ResultPageFragment : Fragment() {
 		// Initialize your data (list of key-value pairs)
 		val resultData =
 			listOf(
-				Pair("Prabowo Subianto Djojohadikusumo", "270,20 juta"),
-				Pair("Key2", "Value2"),
-				Pair("Key3", "Value3"),
-				Pair("Key4", "Value4"),
-				Pair("Key5", "Value5"),
-				Pair("Key6", "Value6"),
-				// ... add more key-value pairs as needed
+				Pair("Candidate", "Vote count"),
 			)
 
 		// remove shadow
@@ -89,13 +136,22 @@ class ResultPageFragment : Fragment() {
 		// Set up RecyclerView
 		recyclerView = view.findViewById(R.id.result_recycler_view)
 		recyclerView.layoutManager = LinearLayoutManager(requireContext())
-		resultAdapter = ResultAdapter(resultData)
+		resultAdapter = ResultAdapter()
 		recyclerView.adapter = resultAdapter
 
+		resultAdapter.submitList(resultData)
+
 		// link image URI with view
-		val documentImageView: ImageView = view.findViewById(R.id.document_image)
-		documentImageView.tag = imageUriString
-		documentImageView.setImageURI(Uri.parse(imageUriString))
+		documentImageView = view.findViewById(R.id.document_image)
+		documentImageView.setImageBitmap(imageBitmap)
+
+		// timestamp text
+		timestampTextView = view.findViewById(R.id.result_timestamp)
+		timestampTextView.text = timestampToString(ImageSaveDataHolder.get().timestamp)
+
+		// failure text
+		failureTextView = view.findViewById(R.id.failure_text)
+		hideFailureText()
 
 		// remove progress bar
 		val progressLoader: ProgressBar = view.findViewById(R.id.progress_loader)
@@ -105,7 +161,7 @@ class ResultPageFragment : Fragment() {
 		val expandButton: ImageButton = view.findViewById(R.id.expand_button)
 		expandButton.setOnClickListener {
 			// Pass the image resource ID to ExpandImageActivity
-			val imageResource = documentImageView.tag ?: R.drawable.ic_image
+			val imageResource = imageBitmap ?: R.drawable.ic_image
 			val intent = Intent(requireContext(), ExpandImageActivity::class.java)
 
 			// Choose the appropriate constant based on the type of resource
@@ -113,8 +169,13 @@ class ResultPageFragment : Fragment() {
 				is Int -> {
 					intent.putExtra(ExpandImageActivity.EXTRA_NAME_DRAWABLE_RESOURCE, imageResource)
 				}
-				is String -> {
-					intent.putExtra(ExpandImageActivity.EXTRA_NAME_IMAGE_RESOURCE, imageResource.toString())
+				is Bitmap -> {
+					val stream: FileOutputStream =
+						requireActivity().openFileOutput(BITMAP_FILE_NAME, Context.MODE_PRIVATE)
+					imageResource.compress(Bitmap.CompressFormat.PNG, 100, stream)
+					stream.close()
+
+					intent.putExtra(ExpandImageActivity.EXTRA_NAME_IMAGE_RESOURCE, BITMAP_FILE_NAME)
 				}
 				else -> {
 					throw IllegalArgumentException("Unsupported resource type")
diff --git a/app/src/main/res/layout/fragment_result_page.xml b/app/src/main/res/layout/fragment_result_page.xml
index ff672cee005d00f30e58a21d61cb113f453da858..74543dff4827c49cc8d66f484a30e814a4071b01 100644
--- a/app/src/main/res/layout/fragment_result_page.xml
+++ b/app/src/main/res/layout/fragment_result_page.xml
@@ -148,6 +148,12 @@
                 android:layout_width="0dp"
                 android:layout_height="0dp"
                 android:layout_weight="2" />
+            <TextView
+                android:id="@+id/failure_text"
+                android:theme="@style/TextAppearance.ResultText"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/text_detection_failed"/>
             <androidx.recyclerview.widget.RecyclerView
                 android:id="@+id/result_recycler_view"
                 android:theme="@style/TextAppearance.ResultText"
diff --git a/app/src/main/res/raw/test.jpg b/app/src/main/res/raw/test.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..93fe43ab4ff4a1174eb96c1b521de7fba616654f
Binary files /dev/null and b/app/src/main/res/raw/test.jpg differ
diff --git a/app/src/main/res/raw/test2.jpg b/app/src/main/res/raw/test2.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6e7d18b21bba7d261b33bb9b01a12feb8ca27533
Binary files /dev/null and b/app/src/main/res/raw/test2.jpg differ
diff --git a/app/src/main/res/raw/test3.jpg b/app/src/main/res/raw/test3.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..420a4235b13e33b19ace1cf9e44fbd824bd16a8c
Binary files /dev/null and b/app/src/main/res/raw/test3.jpg differ
diff --git a/app/src/main/res/raw/test4.jpg b/app/src/main/res/raw/test4.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..15a95bdd4bf2f4a22799d8cbb41044ca91638935
Binary files /dev/null and b/app/src/main/res/raw/test4.jpg differ
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
index e93284ccafd1838b645c8ecb336de1c24bff911f..f8bf14cc1a4521f1acbf3b2223c0216edd2e635e 100644
--- a/app/src/main/res/values/strings.xml
+++ b/app/src/main/res/values/strings.xml
@@ -13,4 +13,5 @@
     <string name="title_activity_preview">PreviewActivity</string>
     <string name="desc_accept_preview">accept preview</string>
     <string name="desc_reject_preview">reject preview</string>
+    <string name="text_detection_failed">Detection failed</string>
 </resources>