Skip to content
Snippets Groups Projects
Commit 9e69eb85 authored by Michael Utama's avatar Michael Utama
Browse files

Merge branch 'dev' into 'main'

sprint-3 release

See merge request !42
parents e68125a2 42e1b981
Branches
Tags 3.0
4 merge requests!62fix: json writer,!61fix: json writer,!59Fix/context contour,!42sprint-3 release
Showing
with 595 additions and 15 deletions
...@@ -53,5 +53,8 @@ jobs: ...@@ -53,5 +53,8 @@ jobs:
- name: Run unit tests - name: Run unit tests
run: ./gradlew test run: ./gradlew test
- name: Run integration tests
run: ./gradlew connectedAndroidTest
- name: Code formatting - name: Code formatting
run: ./gradlew spotlessCheck run: ./gradlew spotlessCheck
...@@ -11,7 +11,8 @@ ...@@ -11,7 +11,8 @@
.DS_Store .DS_Store
/build /build
/captures /captures
/opencv /opencv/*
!/opencv/build.gradle
.externalNativeBuild .externalNativeBuild
.cxx .cxx
local.properties local.properties
...@@ -27,11 +27,11 @@ android { ...@@ -27,11 +27,11 @@ android {
} }
} }
compileOptions { compileOptions {
sourceCompatibility = JavaVersion.VERSION_17 sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_17 targetCompatibility = JavaVersion.VERSION_1_8
} }
kotlinOptions { kotlinOptions {
jvmTarget = "17" jvmTarget = "1.8"
} }
} }
...@@ -53,10 +53,16 @@ dependencies { ...@@ -53,10 +53,16 @@ dependencies {
implementation("androidx.camera:camera-view:${cameraxVersion}") implementation("androidx.camera:camera-view:${cameraxVersion}")
implementation("androidx.camera:camera-extensions:${cameraxVersion}") implementation("androidx.camera:camera-extensions:${cameraxVersion}")
// JSON
// implementation("com.squareup.moshi:moshi:1.15.1")
// implementation("com.squareup.moshi:moshi-adapters:1.15.1")
// implementation("com.squareup.moshi:moshi-kotlin:1.15.1")
implementation("com.google.code.gson:gson:2.10.1")
// OpenCV // OpenCV
implementation(project(":opencv")) implementation("org.opencv:opencv:4.9.0")
// Test // Test
testImplementation("junit:junit:4.13.2") testImplementation("junit:junit:4.13.2")
androidTestImplementation("androidx.test.ext:junit:1.1.5") androidTestImplementation("androidx.test.ext:junit:1.1.5")
androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1") androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1")
......
package com.k2_9.omrekap.aprilTag
import android.util.Log
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import com.google.gson.Gson
import com.k2_9.omrekap.R
import com.k2_9.omrekap.data.repository.OMRConfigRepository
import com.k2_9.omrekap.utils.omr.OMRConfigurationDetector
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import org.junit.Test
import org.junit.runner.RunWith
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
import org.opencv.core.Mat
import org.opencv.imgproc.Imgproc
@RunWith(AndroidJUnit4::class)
class AprilTagConfigDetectionTest {
@Test
fun test_detect() {
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
OpenCVLoader.initLocal()
val imageMat = Utils.loadResource(appContext, R.raw.example)
val grayImageMat = Mat()
// transform to grayscale for ArucoDetector
Imgproc.cvtColor(imageMat, grayImageMat, Imgproc.COLOR_BGR2GRAY)
CoroutineScope(Dispatchers.Default).launch {
OMRConfigurationDetector.loadConfiguration(
appContext
)
val result = OMRConfigurationDetector.detectConfiguration(grayImageMat)
val gson = Gson()
Log.d("ConfigDetectionTestx", gson.toJson(result))
val compare = OMRConfigRepository.loadConfigurations(appContext)
// val resultHash = result!!.first.hashCode()
// val compareHash = compare!!.configs["102"].hashCode()
// Log.d("ConfigDetectionTestx1", resultHash.toString())
// Log.d("ConfigDetectionTestx1", compareHash.toString())
// assert(resultHash == compareHash)
val resultJSONString = gson.toJson(result!!.first)
val compareJSONString = gson.toJson(compare!!.omrConfigs["102"])
Log.d("ConfigDetectionTestx2", resultJSONString)
Log.d("ConfigDetectionTestx2", compareJSONString)
assert(resultJSONString == compareJSONString)
}
}
}
package com.k2_9.omrekap.aprilTag
import android.util.Log
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import com.k2_9.omrekap.R
import com.k2_9.omrekap.utils.AprilTagHelper
import org.junit.Test
import org.junit.runner.RunWith
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
@RunWith(AndroidJUnit4::class)
class AprilTagHelperTest {
private val helper: AprilTagHelper = AprilTagHelper
@Test
fun testAprilTagDetection() {
OpenCVLoader.initLocal()
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
// Load the image resource as a Bitmap
val image = Utils.loadResource(appContext, R.raw.example)
// Call the method to detect AprilTag
val result = helper.getAprilTagId(image)
Log.d("ContourOMRHelperTest", result.toString())
}
}
package com.k2_9.omrekap.omr
import android.content.Context
import android.util.Log
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import com.google.gson.Gson
import com.k2_9.omrekap.R
import com.k2_9.omrekap.data.configs.omr.ContourOMRHelperConfig
import com.k2_9.omrekap.data.configs.omr.OMRCropper
import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
import com.k2_9.omrekap.data.configs.omr.OMRSection
import com.k2_9.omrekap.utils.SaveHelper
import com.k2_9.omrekap.utils.omr.ContourOMRHelper
import org.junit.Test
import org.junit.runner.RunWith
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
@RunWith(AndroidJUnit4::class)
class ContourOMRHelperTest {
private var helper: ContourOMRHelper
private val appContext: Context
init {
OpenCVLoader.initLocal()
appContext = InstrumentationRegistry.getInstrumentation().targetContext
// Load the image resource as a Bitmap
val image = Utils.loadResource(appContext, R.raw.example)
val sectionPositions =
mapOf(
OMRSection.FIRST to Pair(780, 373),
OMRSection.SECOND to Pair(0, 0),
OMRSection.THIRD to Pair(0, 0),
)
val cropperConfig =
OMRCropperConfig(
image,
Pair(140, 220),
sectionPositions,
)
val cropper = OMRCropper(cropperConfig)
val config =
ContourOMRHelperConfig(
cropper,
12,
30,
0.5f,
1.5f,
0.9f,
230,
)
Log.d("ContourOMRHelperTest", Gson().toJson(config))
helper = ContourOMRHelper(config)
}
@Test
fun test_detect() {
val result = helper.detect(OMRSection.FIRST)
val imageAnnotated = helper.annotateImage(result)
Log.d("ContourOMRHelperTest", result.toString())
assert(result == 172)
SaveHelper.saveImage(appContext, imageAnnotated, "test", "test_detect")
}
}
package com.k2_9.omrekap.omr
import android.content.Context
import android.graphics.Bitmap
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import com.k2_9.omrekap.R
import com.k2_9.omrekap.data.configs.omr.OMRCropper
import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
import com.k2_9.omrekap.data.configs.omr.OMRSection
import com.k2_9.omrekap.utils.SaveHelper
import org.junit.Test
import org.junit.runner.RunWith
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
@RunWith(AndroidJUnit4::class)
class OMRCropperTest {
private var cropper: OMRCropper
private var appContext: Context
init {
OpenCVLoader.initLocal()
appContext = InstrumentationRegistry.getInstrumentation().targetContext
// Load the image resource as a Bitmap
val image = Utils.loadResource(appContext, R.raw.example)
val sectionPositions =
mapOf(
OMRSection.FIRST to Pair(780, 375),
OMRSection.SECOND to Pair(0, 0),
OMRSection.THIRD to Pair(0, 0),
)
val config =
OMRCropperConfig(
image,
Pair(140, 225),
sectionPositions,
)
cropper = OMRCropper(config)
}
@Test
fun test_crop() {
val result = cropper.crop(OMRSection.FIRST)
val bitmap = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888)
Utils.matToBitmap(result, bitmap)
SaveHelper.saveImage(appContext, bitmap, "test", "test_crop.png")
assert(result.width() == 140 && result.height() == 225)
}
}
package com.k2_9.omrekap.omr
import android.content.Context
import android.util.Log
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import com.k2_9.omrekap.R
import com.k2_9.omrekap.data.configs.omr.CircleTemplateLoader
import com.k2_9.omrekap.data.configs.omr.OMRCropper
import com.k2_9.omrekap.data.configs.omr.OMRCropperConfig
import com.k2_9.omrekap.data.configs.omr.OMRSection
import com.k2_9.omrekap.data.configs.omr.TemplateMatchingOMRHelperConfig
import com.k2_9.omrekap.utils.SaveHelper
import com.k2_9.omrekap.utils.omr.TemplateMatchingOMRHelper
import org.junit.Test
import org.junit.runner.RunWith
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
import org.opencv.core.Mat
@RunWith(AndroidJUnit4::class)
class TemplateMatchingOMRHelperTest {
private var helper: TemplateMatchingOMRHelper
private val image: Mat
private val appContext: Context
init {
OpenCVLoader.initLocal()
appContext = InstrumentationRegistry.getInstrumentation().targetContext
// Load the image resource
image = Utils.loadResource(appContext, R.raw.example)
val sectionPositions =
mapOf(
OMRSection.FIRST to Pair(780, 373),
OMRSection.SECOND to Pair(0, 0),
OMRSection.THIRD to Pair(0, 0),
)
val cropperConfig =
OMRCropperConfig(
image,
Pair(140, 220),
sectionPositions,
)
val cropper = OMRCropper(cropperConfig)
// Load the template image resource
val templateLoader = CircleTemplateLoader(appContext, R.raw.circle_template)
val config =
TemplateMatchingOMRHelperConfig(
cropper,
templateLoader,
0.7f,
)
helper = TemplateMatchingOMRHelper(config)
}
@Test
fun test_detect() {
val result = helper.detect(OMRSection.FIRST)
val imgRes = helper.annotateImage(result)
Log.d("TemplateMatchingOMRHelperTest", result.toString())
SaveHelper.saveImage(appContext, imgRes, "test", "test_detect")
assert(result == 172)
}
}
package com.k2_9.omrekap.preprocess
import android.content.Context
import android.graphics.Bitmap
import androidx.test.platform.app.InstrumentationRegistry
import com.k2_9.omrekap.R
import com.k2_9.omrekap.data.models.ImageSaveData
import com.k2_9.omrekap.utils.CropHelper
import com.k2_9.omrekap.utils.PreprocessHelper
import com.k2_9.omrekap.utils.SaveHelper
import org.junit.After
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import org.opencv.android.OpenCVLoader
import org.opencv.android.Utils
import org.opencv.core.CvType
import org.opencv.core.Mat
@RunWith(JUnit4::class)
class CropHelperTest {
private val image: Mat
private val patternImage: Mat
private val imageBitmap: Bitmap
private val patternBitmap: Bitmap
private val appContext: Context
private var imageSaveData: ImageSaveData
init {
OpenCVLoader.initLocal()
appContext = InstrumentationRegistry.getInstrumentation().targetContext
image = Utils.loadResource(appContext, R.raw.example, CvType.CV_8UC1)
patternImage = Utils.loadResource(appContext, R.raw.corner_pattern, CvType.CV_8UC4)
patternBitmap = Bitmap.createBitmap(patternImage.width(), patternImage.height(), Bitmap.Config.ARGB_8888)
imageBitmap = Bitmap.createBitmap(image.width(), image.height(), Bitmap.Config.ARGB_8888)
Utils.matToBitmap(image, imageBitmap)
Utils.matToBitmap(patternImage, patternBitmap)
CropHelper.loadPattern(patternBitmap)
imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>())
}
@Before
fun beforeEachTest() {
imageSaveData = ImageSaveData(imageBitmap, imageBitmap, mutableMapOf<String, Int>())
}
@Test
fun test_preprocess_and_crop() {
CropHelper.loadPattern(patternBitmap)
imageSaveData = PreprocessHelper.preprocessImage(imageSaveData)
SaveHelper.saveImage(appContext, imageSaveData.rawImage, "test", "test_preprocess_raw")
SaveHelper.saveImage(appContext, imageSaveData.annotatedImage, "test", "test_preprocess_annotated")
}
@After
fun clear() {
}
}
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
android:theme="@style/Theme.Omrekap" android:theme="@style/Theme.Omrekap"
tools:targetApi="31"> tools:targetApi="31">
<activity <activity
android:name=".activities.HomeActivity" android:name=".views.activities.HomeActivity"
android:exported="true"> android:exported="true">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />
...@@ -25,11 +25,11 @@ ...@@ -25,11 +25,11 @@
<category android:name="android.intent.category.LAUNCHER" /> <category android:name="android.intent.category.LAUNCHER" />
</intent-filter> </intent-filter>
</activity> </activity>
<activity android:name=".activities.ExpandImageActivity" /> <activity android:name=".views.activities.ExpandImageActivity" />
<activity android:name=".activities.CameraActivity" /> <activity android:name=".views.activities.CameraActivity" />
<activity android:name=".activities.PreviewActivity" /> <activity android:name=".views.activities.PreviewActivity" />
<activity android:name=".activities.ResultFromCameraActivity" /> <activity android:name=".views.activities.ResultFromCameraActivity" />
<activity android:name=".activities.ResultFromGalleryActivity" /> <activity android:name=".views.activities.ResultFromGalleryActivity" />
<meta-data <meta-data
android:name="preloaded_fonts" android:name="preloaded_fonts"
android:resource="@array/preloaded_fonts" /> android:resource="@array/preloaded_fonts" />
......
{
"omrConfigs": {
"102": {
"contourOMRHelperConfig": {
"darkIntensityThreshold": 230,
"darkPercentageThreshold": 0.9,
"maxAspectRatio": 1.5,
"maxRadius": 30,
"minAspectRatio": 0.5,
"minRadius": 12,
"omrCropper": {
"config": {
"image": null,
"omrSectionPosition": {
"FIRST": {
"first": 780,
"second": 373
},
"SECOND": {
"first": 0,
"second": 0
},
"THIRD": {
"first": 0,
"second": 0
}
},
"omrSectionSize": {
"first": 140,
"second": 220
}
}
}
},
"templateMatchingOMRHelperConfig": {
"similarityThreshold": 0.7,
"templateLoader": null,
"omrCropper": {
"config": {
"image": null,
"omrSectionPosition": {
"FIRST": {
"first": 780,
"second": 373
},
"SECOND": {
"first": 0,
"second": 0
},
"THIRD": {
"first": 0,
"second": 0
}
},
"omrSectionSize": {
"first": 140,
"second": 220
}
}
}
}
}
}
}
package com.k2_9.omrekap.data.configs.omr
import android.content.Context
import org.opencv.core.Mat
import org.opencv.core.MatOfByte
import org.opencv.imgcodecs.Imgcodecs
import java.io.InputStream
class CircleTemplateLoader(private val appContext: Context, private val resId: Int) {
fun loadTemplateImage(): Mat {
val inputStream: InputStream = appContext.resources.openRawResource(resId)
val byteArray = inputStream.readBytes()
val imgBuffer = MatOfByte(*byteArray)
return Imgcodecs.imdecode(imgBuffer, Imgcodecs.IMREAD_GRAYSCALE)
}
}
package com.k2_9.omrekap.data.configs.omr
class ContourOMRHelperConfig(
omrCropper: OMRCropper,
minRadius: Int,
maxRadius: Int,
minAspectRatio: Float,
maxAspectRatio: Float,
darkPercentageThreshold: Float,
darkIntensityThreshold: Int,
) : OMRHelperConfig(omrCropper) {
var minRadius: Int
private set
var maxRadius: Int
private set
var minAspectRatio: Float
private set
var maxAspectRatio: Float
private set
var darkPercentageThreshold: Float
private set
var darkIntensityThreshold: Int
private set
init {
require(minRadius >= 0) { "minRadius must be non-negative" }
require(maxRadius >= minRadius) { "maxRadius must be greater than or equal to minRadius" }
require(minAspectRatio >= 0.0f) { "minAspectRatio must be non-negative" }
require(maxAspectRatio >= minAspectRatio) { "maxAspectRatio must be greater than or equal to minAspectRatio" }
require(darkPercentageThreshold in 0.0f..1.0f) { "darkPercentageThreshold must be between 0 and 1" }
require(darkIntensityThreshold >= 0) { "darkIntensityThreshold must be non-negative" }
this.minRadius = minRadius
this.maxRadius = maxRadius
this.minAspectRatio = minAspectRatio
this.maxAspectRatio = maxAspectRatio
this.darkPercentageThreshold = darkPercentageThreshold
this.darkIntensityThreshold = darkIntensityThreshold
}
}
package com.k2_9.omrekap.data.configs.omr
import org.opencv.core.Mat
import org.opencv.core.Rect
class OMRCropper(val config: OMRCropperConfig) {
fun crop(section: OMRSection): Mat {
val (x, y) = config.getSectionPosition(section)
val (width, height) = config.omrSectionSize
val roi = Rect(x, y, width, height)
return Mat(config.image, roi)
}
}
package com.k2_9.omrekap.data.configs.omr
import org.opencv.core.Mat
class OMRCropperConfig(
image: Mat,
val omrSectionSize: Pair<Int, Int>,
omrSectionPosition: Map<OMRSection, Pair<Int, Int>>,
) {
var image: Mat
private set
get() = field.clone()
// Check if all the sections are present
init {
// Note: Top-left corner and height must be in the way so that the section is cropped with additional top padding and no bottom padding
// Top padding must have the same size as gap between circles inside the section
require(omrSectionSize.first >= 0 && omrSectionSize.second >= 0) {
"OMR section size must be non-negative"
}
require(omrSectionSize.first <= image.width() && omrSectionSize.second <= image.height()) {
"OMR section size must be less than or equal to the image size"
}
require(omrSectionPosition.keys.containsAll(OMRSection.entries)) {
"All OMR sections must be present"
}
require(omrSectionPosition.values.all { it.first >= 0 && it.second >= 0 }) {
"OMR section position must be non-negative"
}
this.image = image.clone()
}
private val omrSectionPosition: Map<OMRSection, Pair<Int, Int>> = omrSectionPosition.toMap()
fun getSectionPosition(section: OMRSection): Pair<Int, Int> {
return omrSectionPosition[section]!!
}
fun setImage(image: Mat) {
this.image = image
}
}
package com.k2_9.omrekap.data.configs.omr
open class OMRHelperConfig(
val omrCropper: OMRCropper,
)
package com.k2_9.omrekap.data.configs.omr
enum class OMRSection {
FIRST,
SECOND,
THIRD,
}
package com.k2_9.omrekap.data.configs.omr
import org.opencv.core.Mat
class TemplateMatchingOMRHelperConfig(
omrCropper: OMRCropper,
templateLoader: CircleTemplateLoader,
similarityThreshold: Float,
) : OMRHelperConfig(omrCropper) {
var template: Mat
private set
get() = field.clone()
var similarityThreshold: Float
private set
init {
require(similarityThreshold in 0.0..1.0) {
"similarity_threshold must be between 0 and 1"
}
this.template = templateLoader.loadTemplateImage()
this.similarityThreshold = similarityThreshold
}
}
package com.k2_9.omrekap.models package com.k2_9.omrekap.data.models
import org.opencv.core.Point import org.opencv.core.Point
......
package com.k2_9.omrekap.models package com.k2_9.omrekap.data.models
import android.graphics.Bitmap import android.graphics.Bitmap
data class ImageSaveData( data class ImageSaveData(
val rawImage: Bitmap, val rawImage: Bitmap,
var annotatedImage: Bitmap, var annotatedImage: Bitmap,
var data: Map<String, Int>?, var data: Map<String, Int>,
) )
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment