diff --git a/app/build.gradle b/app/build.gradle
index 4459163..b9b9b7d 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -60,12 +60,14 @@ dependencies {
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version"
implementation "androidx.lifecycle:lifecycle-livedata-ktx:$lifecycle_version"
+ implementation "androidx.lifecycle:lifecycle-runtime-ktx:$lifecycle_version"
implementation "androidx.lifecycle:lifecycle-common-java8:$lifecycle_version"
- implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core-common:1.3.6'
- implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.6'
- implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.6'
- implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.3.1'
+ implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core-common:1.3.7'
+ implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.7'
+ implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.7'
+ implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.3.7'
+ implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-play-services:1.3.7'
implementation 'com.google.code.gson:gson:2.8.6'
implementation 'com.squareup.retrofit2:retrofit:2.9.0'
@@ -79,6 +81,15 @@ dependencies {
implementation 'org.openscience.cdk:cdk-io:2.3'
implementation 'org.openscience.cdk:cdk-builder3d:2.3'
+ def camerax_version = "1.0.0-beta06"
+ implementation "androidx.camera:camera-core:${camerax_version}"
+ implementation "androidx.camera:camera-camera2:${camerax_version}"
+ implementation "androidx.camera:camera-lifecycle:${camerax_version}"
+ implementation "androidx.camera:camera-view:1.0.0-alpha13"
+
+ implementation 'com.google.android.gms:play-services-mlkit-text-recognition:16.0.0'
+ implementation 'com.google.guava:guava:27.0.1-android'
+
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index 1a35edf..510a083 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -23,7 +23,14 @@
-
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/MainActivity.kt b/app/src/main/java/com/plweegie/magmolecular/MainActivity.kt
index 56ac361..d0ab0c9 100644
--- a/app/src/main/java/com/plweegie/magmolecular/MainActivity.kt
+++ b/app/src/main/java/com/plweegie/magmolecular/MainActivity.kt
@@ -1,10 +1,12 @@
package com.plweegie.magmolecular
+import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import androidx.activity.viewModels
import androidx.lifecycle.Observer
import com.plweegie.magmolecular.ar.MagMolActivity
+import com.plweegie.magmolecular.ocr.TextRecognitionActivity
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.android.synthetic.main.activity_main.*
@@ -21,6 +23,10 @@ class MainActivity : AppCompatActivity() {
viewModel.getSmilesForName(smiles_field?.text.toString())
}
+ get_atoms_from_camera_btn?.setOnClickListener {
+ Intent(this, TextRecognitionActivity::class.java).also { startActivity(it) }
+ }
+
viewModel.smiles.observe(this, Observer { smiles ->
if (smiles.isNotEmpty()) {
val intent = MagMolActivity.newIntent(this, smiles)
diff --git a/app/src/main/java/com/plweegie/magmolecular/ocr/TextAnalyzer.kt b/app/src/main/java/com/plweegie/magmolecular/ocr/TextAnalyzer.kt
new file mode 100644
index 0000000..db22ff0
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/ocr/TextAnalyzer.kt
@@ -0,0 +1,115 @@
+package com.plweegie.magmolecular.ocr
+
+import android.content.Context
+import android.graphics.Rect
+import android.util.Log
+import android.widget.Toast
+import androidx.camera.core.ImageAnalysis
+import androidx.camera.core.ImageProxy
+import androidx.lifecycle.Lifecycle
+import androidx.lifecycle.MutableLiveData
+import androidx.lifecycle.coroutineScope
+import com.google.mlkit.common.MlKitException
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.text.Text
+import com.google.mlkit.vision.text.TextRecognition
+import com.plweegie.magmolecular.utils.ImageUtils
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.launch
+import kotlinx.coroutines.plus
+import kotlinx.coroutines.tasks.await
+import kotlinx.coroutines.withContext
+
+
+class TextAnalyzer(
+ private val context: Context,
+ private val result: MutableLiveData,
+ private val imageCropPercentages: MutableLiveData>,
+ lifecycle: Lifecycle
+) : ImageAnalysis.Analyzer {
+
+ private companion object {
+ const val TAG = "TextAnalyzer"
+ }
+
+ private val detector = TextRecognition.getClient()
+ private val textAnalyzerScope = lifecycle.coroutineScope + Dispatchers.Default
+
+ init {
+ lifecycle.addObserver(detector)
+ }
+
+ @androidx.camera.core.ExperimentalGetImage
+ override fun analyze(imageProxy: ImageProxy) {
+ val mediaImage = imageProxy.image ?: return
+
+ val rotationDegrees = imageProxy.imageInfo.rotationDegrees
+
+ // We requested a setTargetAspectRatio, but it's not guaranteed that's what the camera
+ // stack is able to support, so we calculate the actual ratio from the first frame to
+ // know how to appropriately crop the image we want to analyze.
+ val imageHeight = mediaImage.height
+ val imageWidth = mediaImage.width
+
+ val actualAspectRatio = imageWidth / imageHeight
+
+ val convertImageToBitmap = ImageUtils.convertYuv420888ImageToBitmap(mediaImage)
+ val cropRect = Rect(0, 0, imageWidth, imageHeight)
+
+ // If the image has a way wider aspect ratio than expected, crop less of the height so we
+ // don't end up cropping too much of the image. If the image has a way taller aspect ratio
+ // than expected, we don't have to make any changes to our cropping so we don't handle it
+ // here.
+ val currentCropPercentages = imageCropPercentages.value ?: return
+ if (actualAspectRatio > 3) {
+ val originalHeightCropPercentage = currentCropPercentages.first
+ val originalWidthCropPercentage = currentCropPercentages.second
+ imageCropPercentages.value =
+ Pair(originalHeightCropPercentage / 2, originalWidthCropPercentage)
+ }
+
+ // If the image is rotated by 90 (or 270) degrees, swap height and width when calculating
+ // the crop.
+ val cropPercentages = imageCropPercentages.value ?: return
+ val heightCropPercent = cropPercentages.first
+ val widthCropPercent = cropPercentages.second
+ val (widthCrop, heightCrop) = when (rotationDegrees) {
+ 90, 270 -> Pair(heightCropPercent / 100f, widthCropPercent / 100f)
+ else -> Pair(widthCropPercent / 100f, heightCropPercent / 100f)
+ }
+
+ cropRect.inset(
+ (imageWidth * widthCrop / 2).toInt(),
+ (imageHeight * heightCrop / 2).toInt()
+ )
+ val croppedBitmap =
+ ImageUtils.rotateAndCrop(convertImageToBitmap, rotationDegrees, cropRect)
+
+ textAnalyzerScope.launch {
+ recognizeText(InputImage.fromBitmap(croppedBitmap, 0))
+ imageProxy.close()
+ }
+ }
+
+ private suspend fun recognizeText(image: InputImage): Text? =
+ try {
+ detector.process(image).await().also {
+ result.postValue(it.text)
+ }
+ } catch (e: Exception) {
+ Log.e(TAG, "Text recognition error", e)
+ getErrorMessage(e)?.let {
+ withContext(Dispatchers.Main) {
+ Toast.makeText(context, it, Toast.LENGTH_SHORT).show()
+ }
+ }
+ null
+ }
+
+ private fun getErrorMessage(exception: Exception): String? {
+ val mlKitException = exception as? MlKitException ?: return exception.message
+ return if (mlKitException.errorCode == MlKitException.UNAVAILABLE) {
+ "Waiting for text recognition model to be downloaded"
+ } else exception.message
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionActivity.kt b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionActivity.kt
new file mode 100644
index 0000000..9086224
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionActivity.kt
@@ -0,0 +1,22 @@
+package com.plweegie.magmolecular.ocr
+
+import android.os.Bundle
+import androidx.appcompat.app.AppCompatActivity
+import com.plweegie.magmolecular.R
+import dagger.hilt.android.AndroidEntryPoint
+
+
+@AndroidEntryPoint
+class TextRecognitionActivity : AppCompatActivity() {
+
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+ setContentView(R.layout.activity_text_recognition)
+
+ if (savedInstanceState == null) {
+ supportFragmentManager.beginTransaction()
+ .replace(R.id.container, TextRecognitionFragment.newInstance())
+ .commitNow()
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionFragment.kt b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionFragment.kt
new file mode 100644
index 0000000..e222796
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionFragment.kt
@@ -0,0 +1,187 @@
+package com.plweegie.magmolecular.ocr
+
+import android.Manifest
+import android.content.pm.PackageManager
+import android.os.Bundle
+import android.util.DisplayMetrics
+import android.util.Log
+import android.view.LayoutInflater
+import android.view.View
+import android.view.ViewGroup
+import androidx.camera.core.*
+import androidx.camera.lifecycle.ProcessCameraProvider
+import androidx.core.content.ContextCompat
+import androidx.fragment.app.Fragment
+import androidx.fragment.app.viewModels
+import androidx.lifecycle.Observer
+import com.plweegie.magmolecular.R
+import com.plweegie.magmolecular.utils.ScopedExecutor
+import dagger.hilt.android.AndroidEntryPoint
+import kotlinx.android.synthetic.main.fragment_text_recognition.*
+import java.util.concurrent.ExecutorService
+import java.util.concurrent.Executors
+import kotlin.math.abs
+import kotlin.math.ln
+import kotlin.math.max
+import kotlin.math.min
+
+
+@AndroidEntryPoint
+class TextRecognitionFragment : Fragment() {
+
+ companion object {
+ fun newInstance() = TextRecognitionFragment()
+
+ // We only need to analyze the part of the image that has text, so we set crop percentages
+ // to avoid analyze the entire image from the live camera feed.
+ const val DESIRED_WIDTH_CROP_PERCENT = 8
+ const val DESIRED_HEIGHT_CROP_PERCENT = 74
+
+ private const val REQUEST_CODE_PERMISSIONS = 10
+
+ private val REQUIRED_PERMISSIONS = arrayOf(Manifest.permission.CAMERA)
+ private const val RATIO_4_3_VALUE = 4.0 / 3.0
+ private const val RATIO_16_9_VALUE = 16.0 / 9.0
+ private const val TAG = "TextRecognitionFragment"
+ }
+
+ private var displayId: Int = -1
+ private var cameraProvider: ProcessCameraProvider? = null
+ private var camera: Camera? = null
+ private var imageAnalyzer: ImageAnalysis? = null
+
+ /** Blocking camera operations are performed using this executor */
+ private lateinit var cameraExecutor: ExecutorService
+
+ private lateinit var scopedExecutor: ScopedExecutor
+
+ private val viewModel: TextRecognitionViewModel by viewModels()
+
+ override fun onCreateView(
+ inflater: LayoutInflater, container: ViewGroup?,
+ savedInstanceState: Bundle?
+ ): View {
+ return inflater.inflate(R.layout.fragment_text_recognition, container, false)
+ }
+
+ override fun onDestroyView() {
+ super.onDestroyView()
+
+ // Shut down our background executor
+ cameraExecutor.shutdown()
+ scopedExecutor.shutdown()
+ }
+
+ override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
+ super.onViewCreated(view, savedInstanceState)
+
+ // Initialize our background executor
+ cameraExecutor = Executors.newSingleThreadExecutor()
+ scopedExecutor = ScopedExecutor(cameraExecutor)
+
+ // Request camera permissions
+ if (allPermissionsGranted()) {
+ // Wait for the views to be properly laid out
+ view_finder?.post {
+ // Keep track of the display in which this view is attached
+ displayId = view_finder.display.displayId
+
+ // Set up the camera and its use cases
+ setUpCamera()
+ }
+ } else {
+ requestPermissions(REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS)
+ }
+
+ }
+
+ /** Initialize CameraX, and prepare to bind the camera use cases */
+ private fun setUpCamera() {
+ val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext())
+ cameraProviderFuture.addListener(Runnable {
+
+ // CameraProvider
+ cameraProvider = cameraProviderFuture.get()
+ bindCameraUseCases()
+ }, ContextCompat.getMainExecutor(requireContext()))
+ }
+
+ private fun bindCameraUseCases() {
+ val localCameraProvider = cameraProvider
+ ?: throw IllegalStateException("Camera initialization failed.")
+
+ // Get screen metrics used to setup camera for full screen resolution
+ val metrics = DisplayMetrics().also { view_finder?.display?.getRealMetrics(it) }
+ Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
+
+ val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels)
+ Log.d(TAG, "Preview aspect ratio: $screenAspectRatio")
+
+ val rotation = view_finder?.display?.rotation ?: 0
+
+ val preview = Preview.Builder()
+ .setTargetAspectRatio(screenAspectRatio)
+ .setTargetRotation(rotation)
+ .build()
+
+ imageAnalyzer = ImageAnalysis.Builder()
+ .setTargetAspectRatio(screenAspectRatio)
+ .setTargetRotation(rotation)
+ .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
+ .build()
+ .also {
+ it.setAnalyzer(cameraExecutor,
+ TextAnalyzer(requireContext(), viewModel.sourceText, viewModel.imageCropPercentages, lifecycle))
+ }
+
+ viewModel.sourceText.observe(viewLifecycleOwner, Observer { src_text?.text = it })
+
+ // Select back camera since text detection does not work with front camera
+ val cameraSelector =
+ CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build()
+
+ try {
+ // Unbind use cases before rebinding
+ localCameraProvider.unbindAll()
+
+ // Bind use cases to camera
+ camera = localCameraProvider.bindToLifecycle(
+ this, cameraSelector, preview, imageAnalyzer
+ )
+ preview.setSurfaceProvider(view_finder.createSurfaceProvider())
+ } catch (exc: IllegalStateException) {
+ Log.e(TAG, "Use case binding failed. This must be running on main thread.", exc)
+ }
+ }
+
+ /**
+ * [androidx.camera.core.ImageAnalysisConfig] requires enum value of
+ * [androidx.camera.core.AspectRatio]. Currently it has values of 4:3 & 16:9.
+ *
+ * Detecting the most suitable ratio for dimensions provided in @params by comparing absolute
+ * of preview ratio to one of the provided values.
+ *
+ * @param width - preview width
+ * @param height - preview height
+ * @return suitable aspect ratio
+ */
+ private fun aspectRatio(width: Int, height: Int): Int {
+ val previewRatio = ln(max(width, height).toDouble() / min(width, height))
+ if (abs(previewRatio - ln(RATIO_4_3_VALUE))
+ <= abs(previewRatio - ln(RATIO_16_9_VALUE))
+ ) {
+ return AspectRatio.RATIO_4_3
+ }
+ return AspectRatio.RATIO_16_9
+ }
+
+
+ /**
+ * Check if all permission specified in the manifest have been granted
+ */
+ private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
+ ContextCompat.checkSelfPermission(
+ requireContext(), it
+ ) == PackageManager.PERMISSION_GRANTED
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionViewModel.kt b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionViewModel.kt
new file mode 100644
index 0000000..408042d
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/ocr/TextRecognitionViewModel.kt
@@ -0,0 +1,30 @@
+package com.plweegie.magmolecular.ocr
+
+import androidx.hilt.Assisted
+import androidx.hilt.lifecycle.ViewModelInject
+import androidx.lifecycle.MutableLiveData
+import androidx.lifecycle.SavedStateHandle
+import androidx.lifecycle.ViewModel
+import com.plweegie.magmolecular.ocr.TextRecognitionFragment.Companion.DESIRED_HEIGHT_CROP_PERCENT
+import com.plweegie.magmolecular.ocr.TextRecognitionFragment.Companion.DESIRED_WIDTH_CROP_PERCENT
+import com.plweegie.magmolecular.utils.SmoothedMutableLiveData
+
+
+class TextRecognitionViewModel @ViewModelInject constructor(
+ @Assisted private val savedStateHandle: SavedStateHandle
+) : ViewModel() {
+
+ private companion object {
+ // Amount of time (in milliseconds) to wait for detected text to settle
+ const val SMOOTHING_DURATION = 50L
+ }
+
+ // We set desired crop percentages to avoid having to analyze the whole image from the live
+ // camera feed. However, we are not guaranteed what aspect ratio we will get from the camera, so
+ // we use the first frame we get back from the camera to update these crop percentages based on
+ // the actual aspect ratio of images.
+ val imageCropPercentages = MutableLiveData>()
+ .apply { value = Pair(DESIRED_HEIGHT_CROP_PERCENT, DESIRED_WIDTH_CROP_PERCENT) }
+
+ val sourceText = SmoothedMutableLiveData(SMOOTHING_DURATION)
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/utils/ImageUtils.kt b/app/src/main/java/com/plweegie/magmolecular/utils/ImageUtils.kt
new file mode 100644
index 0000000..6632361
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/utils/ImageUtils.kt
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2020 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.plweegie.magmolecular.utils
+
+import android.graphics.Bitmap
+import android.graphics.ImageFormat
+import android.graphics.Matrix
+import android.graphics.Rect
+import android.media.Image
+import androidx.annotation.ColorInt
+
+/**
+ * Utility class for manipulating images.
+ */
+object ImageUtils {
+ private val CHANNEL_RANGE = 0 until (1 shl 18)
+
+ fun convertYuv420888ImageToBitmap(image: Image): Bitmap {
+ require(image.format == ImageFormat.YUV_420_888) {
+ "Unsupported image format $(image.format)"
+ }
+
+ val planes = image.planes
+
+ // Because of the variable row stride it's not possible to know in
+ // advance the actual necessary dimensions of the yuv planes.
+ val yuvBytes = planes.map { plane ->
+ val buffer = plane.buffer
+ val yuvBytes = ByteArray(buffer.capacity())
+ buffer[yuvBytes]
+ buffer.rewind() // Be kind…
+ yuvBytes
+ }
+
+ val yRowStride = planes[0].rowStride
+ val uvRowStride = planes[1].rowStride
+ val uvPixelStride = planes[1].pixelStride
+ val width = image.width
+ val height = image.height
+ @ColorInt val argb8888 = IntArray(width * height)
+ var i = 0
+ for (y in 0 until height) {
+ val pY = yRowStride * y
+ val uvRowStart = uvRowStride * (y shr 1)
+ for (x in 0 until width) {
+ val uvOffset = (x shr 1) * uvPixelStride
+ argb8888[i++] =
+ yuvToRgb(
+ yuvBytes[0][pY + x].toIntUnsigned(),
+ yuvBytes[1][uvRowStart + uvOffset].toIntUnsigned(),
+ yuvBytes[2][uvRowStart + uvOffset].toIntUnsigned()
+ )
+ }
+ }
+ val bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
+ bitmap.setPixels(argb8888, 0, width, 0, 0, width, height)
+ return bitmap
+ }
+
+ fun rotateAndCrop(
+ bitmap: Bitmap,
+ imageRotationDegrees: Int,
+ cropRect: Rect
+ ): Bitmap {
+ val matrix = Matrix()
+ matrix.preRotate(imageRotationDegrees.toFloat())
+ return Bitmap.createBitmap(
+ bitmap,
+ cropRect.left,
+ cropRect.top,
+ cropRect.width(),
+ cropRect.height(),
+ matrix,
+ true
+ )
+ }
+
+ @ColorInt
+ private fun yuvToRgb(nY: Int, nU: Int, nV: Int): Int {
+ var nY = nY
+ var nU = nU
+ var nV = nV
+ nY -= 16
+ nU -= 128
+ nV -= 128
+ nY = nY.coerceAtLeast(0)
+
+ // This is the floating point equivalent. We do the conversion in integer
+ // because some Android devices do not have floating point in hardware.
+ // nR = (int)(1.164 * nY + 2.018 * nU);
+ // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
+ // nB = (int)(1.164 * nY + 1.596 * nV);
+ var nR = 1192 * nY + 1634 * nV
+ var nG = 1192 * nY - 833 * nV - 400 * nU
+ var nB = 1192 * nY + 2066 * nU
+
+ // Clamp the values before normalizing them to 8 bits.
+ nR = nR.coerceIn(CHANNEL_RANGE) shr 10 and 0xff
+ nG = nG.coerceIn(CHANNEL_RANGE) shr 10 and 0xff
+ nB = nB.coerceIn(CHANNEL_RANGE) shr 10 and 0xff
+ return -0x1000000 or (nR shl 16) or (nG shl 8) or nB
+ }
+}
+
+private fun Byte.toIntUnsigned(): Int {
+ return toInt() and 0xFF
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/utils/ScopedExecutor.kt b/app/src/main/java/com/plweegie/magmolecular/utils/ScopedExecutor.kt
new file mode 100644
index 0000000..187bb82
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/utils/ScopedExecutor.kt
@@ -0,0 +1,20 @@
+package com.plweegie.magmolecular.utils
+
+import java.util.concurrent.Executor
+import java.util.concurrent.atomic.AtomicBoolean
+
+
+class ScopedExecutor(private val executor: Executor) : Executor {
+
+ private val isShutdown = AtomicBoolean()
+
+ fun shutdown() {
+ isShutdown.set(true)
+ }
+
+ override fun execute(command: Runnable) {
+ executor.execute {
+ if (!isShutdown.get()) command.run()
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/plweegie/magmolecular/utils/SmoothedMutableLiveData.kt b/app/src/main/java/com/plweegie/magmolecular/utils/SmoothedMutableLiveData.kt
new file mode 100644
index 0000000..9ad89c3
--- /dev/null
+++ b/app/src/main/java/com/plweegie/magmolecular/utils/SmoothedMutableLiveData.kt
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2019 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.plweegie.magmolecular.utils
+
+import android.os.Handler
+import androidx.lifecycle.MutableLiveData
+
+/**
+ * A {@link MutableLiveData} that only emits change events when the underlying data has been stable
+ * for the configured amount of time.
+ *
+ * @param duration time delay to wait in milliseconds
+ */
+class SmoothedMutableLiveData(private val duration: Long) : MutableLiveData() {
+ private var pendingValue: T? = null
+ private val runnable = Runnable {
+ super.setValue(pendingValue)
+ }
+
+ override fun setValue(value: T) {
+ if (value != pendingValue) {
+ pendingValue = value
+ Handler().removeCallbacks(runnable)
+ Handler().postDelayed(runnable, duration)
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/res/layout/activity_main.xml b/app/src/main/res/layout/activity_main.xml
index 02fcd70..7e5a200 100644
--- a/app/src/main/res/layout/activity_main.xml
+++ b/app/src/main/res/layout/activity_main.xml
@@ -37,6 +37,16 @@
android:layout_marginTop="32dp"
app:layout_constraintTop_toBottomOf="@+id/smiles_layout"
app:layout_constraintStart_toStartOf="parent"
- app:layout_constraintEnd_toEndOf="parent"/>
+ app:layout_constraintEnd_toStartOf="@+id/get_atoms_from_camera_btn" />
+
+
\ No newline at end of file
diff --git a/app/src/main/res/layout/activity_text_recognition.xml b/app/src/main/res/layout/activity_text_recognition.xml
new file mode 100644
index 0000000..54b71a1
--- /dev/null
+++ b/app/src/main/res/layout/activity_text_recognition.xml
@@ -0,0 +1,5 @@
+
+
\ No newline at end of file
diff --git a/app/src/main/res/layout/fragment_text_recognition.xml b/app/src/main/res/layout/fragment_text_recognition.xml
new file mode 100644
index 0000000..12a5e0a
--- /dev/null
+++ b/app/src/main/res/layout/fragment_text_recognition.xml
@@ -0,0 +1,49 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
index e058d3f..4f6b16a 100644
--- a/app/src/main/res/values/strings.xml
+++ b/app/src/main/res/values/strings.xml
@@ -1,4 +1,5 @@
MagMoleculArGet
+ Camera