0

I am using CameraX Analyzer use-case to detect circles in an image using OpenCV Hough Circles, but even though I believe I am using a separate thread to do this analysis, the camera preview gets really slow sometimes when it gets too many detected circles.

I get that my implementation of the detector isn't the most efficient, but shouldn't this processing time be reflected only in the analysis rate, and not on the preview?

Got great part of the code from crysxd CameraX-Object-Tracking.

This is my MainActivity:

class MainActivity : AppCompatActivity() {
    private lateinit var overlayTextureView: DetectionOverlayView

    private val camera
        get() = supportFragmentManager.findFragmentById(R.id.cameraFragment) as CameraFragment

    override fun onCreate(savedInstanceState: Bundle?) {
        OpenCVLoader.initDebug()

        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)

        if (Timber.treeCount() == 0) {
            Timber.plant(Timber.DebugTree())
        }

        overlayTextureView = findViewById(R.id.detectionOverlayView)
        camera.imageAnalyzer = ViewModelProviders.of(this).get(HoughPupilDetector::class.java)
    }
}

This is my CameraFragment initialization:

open class CameraFragment : Fragment() {
    var cameraRunning = false
        private set
    var imageAnalyzer: ThreadedImageAnalyzer? = null
        set(value) {
            field = value
            if (cameraRunning) {
                startCamera()
            }
        }

    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View =
        inflater.inflate(R.layout.fragment_camera, container, false)

    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)

        CameraPermissionHelper().requestCameraPermission(childFragmentManager) {
            if (it) {
                startCamera()
            } else {
                activity?.finish()
            }
        }
    }

    override fun onDestroyView() {
        super.onDestroyView()

        if (cameraRunning) {
            CameraX.unbindAll()
            cameraRunning = false
            Timber.i("Stopping camera")
        }
    }

    private fun startCamera() {
        preview.post {
            try {
                val usesCases = mutableListOf<UseCase>()

                // Make sure that there are no other use cases bound to CameraX
                CameraX.unbindAll()

                // Create configuration object for the viewfinder use case
                val previewConfig = onCreatePreivewConfigBuilder().build()
                usesCases.add(AutoFitPreviewBuilder.build(previewConfig, preview))

                // Setup image analysis pipeline that computes average pixel luminance in real time
                if (imageAnalyzer != null) {
                    val analyzerConfig = onCreateAnalyzerConfigBuilder().build()
                    usesCases.add(ImageAnalysis(analyzerConfig).apply {
                        analyzer = imageAnalyzer
                    })
                }

                // Bind use cases to lifecycle
                CameraX.bindToLifecycle(this, *usesCases.toTypedArray())
                cameraRunning = true
                Timber.i("Started camera with useCases=$usesCases")
            } catch (e: Exception) {
                Timber.e(e)
                AlertDialog.Builder(context)
                    .setMessage(getString(R.string.camera_error))
                    .setPositiveButton(android.R.string.ok) { _, _ ->
                        activity?.finish()
                    }
                    .create()
            }
        }
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreateAnalyzerConfigBuilder() = ImageAnalysisConfig.Builder().apply {

        // Use a worker thread for image analysis to prevent preview glitches
        setCallbackHandler(imageAnalyzer!!.getHandler())
        // In our analysis, we care more about the latest image than analyzing *every* image
        setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreatePreivewConfigBuilder() = PreviewConfig.Builder().apply {
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }
}

This is my analyzer interface initialization:

abstract class PupilDetector(listener: PupilDetectionListener? = null) : ViewModel(), ThreadedImageAnalyzer {
    private val listeners = ArrayList<PupilDetectionListener>().apply { listener?.let { add(it) } }
    private val isBusy = AtomicBoolean(false)
    private val handlerThread = HandlerThread("PupilDetector").apply { start() }

    fun addListener(listener: PupilDetectionListener) = listeners.add(listener)

    override fun analyze(image: ImageProxy, rotationDegrees: Int) {
        if (isBusy.compareAndSet(false, true)) {
            Timber.d("Running analysis...")
            val pupil = detect(image, rotationDegrees)
            Timber.d("Analysis done.")
            isBusy.set(false)

            // listeners.forEach { it(pupil) }
        }
    }

    override fun getHandler() = Handler(handlerThread.looper)

    abstract fun detect(image: ImageProxy, rotationDegrees: Int): Pupil?
}

And this is my Hough Circles analyzer:

class HoughPupilDetector(listener: PupilDetectionListener? = null): PupilDetector(listener) {
    val maxCircles = 5

    override fun detect(image: ImageProxy, rotationDegrees: Int): Pupil? {
            val bitmap = image.toBitmap(rotationDegrees)
            val circles = detectCircles(bitmap)
            if(circles.isNotEmpty()) {
                return Pupil(circles[0].point, circles[0].r)
            } else {
                return null
            }
    }

    private fun detectCircles(bitmap: Bitmap): List<Circle> {
        // Generate Mat object
        val img = Mat()
        Utils.bitmapToMat(bitmap, img)

        // Detect circles
        val cannyUpperThreshold = 100.0
        val minRadius = 10
        val maxRadius = 400
        val accumulator = 100.0
        val circles = Mat()
        Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY)
        Imgproc.GaussianBlur(img, img, org.opencv.core.Size(3.0, 3.0), 1.0)
        Imgproc.HoughCircles(img, circles, Imgproc.CV_HOUGH_GRADIENT,
            2.0, 2.0 / 8.0, cannyUpperThreshold, accumulator,
            minRadius, maxRadius)
        Imgproc.cvtColor(img, img, Imgproc.COLOR_GRAY2BGR)

        // Convert Mat to list of circles
        val result = toCircles(circles)

        // Return detection
        return result
    }

    private fun toCircles(circles: Mat): List<Circle>{
        if (circles.cols() > 0){
            return (0 until circles.cols().coerceAtMost(maxCircles)).map {
                val vCircle = circles.get(0, it)
                val pt = Point(vCircle[0].toInt(), vCircle[1].toInt())
                val radius = Math.round(vCircle[2]).toInt()
                // return circle
                Circle(pt, radius)
            }
        } else {
            return emptyList()
        }
    }
}
herculanodavi
  • 218
  • 2
  • 11
  • The CameraX library is in alpha stage, as its API surfaces aren't yet finalized. We do not recommend using Alpha libraries in production. Libraries should strictly avoid depending on Alpha libraries in production, as their API surfaces may change in source- and binary-incompatible ways. – Daniel Sep 12 '19 at 14:22
  • Yes, I get that. I'm just experimenting with CameraX and wanted to understand what I got wrong. – herculanodavi Sep 12 '19 at 14:27

1 Answers1

1

I updated CameraX dependencies from alpha01 to alpha05 and glitches stopped happening.

herculanodavi
  • 218
  • 2
  • 11