-
Notifications
You must be signed in to change notification settings - Fork 0
WIP queueEvent #4
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
hannesa2
wants to merge
4
commits into
master
Choose a base branch
from
queueEvent
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,27 +17,31 @@ | |
| package com.android.example.camerax.tflite | ||
|
|
||
| import android.Manifest | ||
| import android.annotation.SuppressLint | ||
| import android.content.Context | ||
| import android.content.pm.PackageManager | ||
| import android.graphics.Bitmap | ||
| import android.graphics.Matrix | ||
| import android.graphics.RectF | ||
| import android.media.Image | ||
| import android.opengl.GLES20 | ||
| import android.opengl.GLSurfaceView | ||
| import android.os.Bundle | ||
| import android.util.Log | ||
| import android.util.Size | ||
| import android.view.View | ||
| import android.view.ViewGroup | ||
| import androidx.appcompat.app.AppCompatActivity | ||
| import androidx.camera.core.* | ||
| import androidx.camera.core.AspectRatio | ||
| import androidx.camera.core.CameraSelector | ||
| import androidx.camera.core.Preview | ||
| import androidx.camera.lifecycle.ProcessCameraProvider | ||
| import androidx.constraintlayout.widget.ConstraintLayout | ||
| import androidx.core.app.ActivityCompat | ||
| import androidx.core.content.ContextCompat | ||
| import androidx.lifecycle.LifecycleOwner | ||
| import com.google.ar.core.* | ||
| import com.google.ar.core.Config | ||
| import com.google.ar.core.Frame | ||
| import com.google.ar.core.InstantPlacementPoint | ||
| import com.google.ar.core.Session | ||
| import com.google.ar.core.exceptions.NotYetAvailableException | ||
| import kotlinx.android.synthetic.main.activity_camera.* | ||
| import org.tensorflow.lite.DataType | ||
|
|
@@ -60,7 +64,7 @@ import kotlin.random.Random | |
| class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | ||
|
|
||
| private lateinit var container: ConstraintLayout | ||
| private lateinit var bitmapBuffer: Bitmap | ||
| private var bitmapBuffer: Bitmap? = null | ||
| private lateinit var surfaceView: GLSurfaceView | ||
|
|
||
| private val permissions = listOf(Manifest.permission.CAMERA) | ||
|
|
@@ -79,17 +83,19 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
|
|
||
|
|
||
| private val tfImageProcessor by lazy { | ||
| val cropSize = minOf(bitmapBuffer.width, bitmapBuffer.height) | ||
| ImageProcessor.Builder() | ||
| .add(ResizeWithCropOrPadOp(cropSize, cropSize)) | ||
| .add( | ||
| ResizeOp( | ||
| tfInputSize.height, tfInputSize.width, ResizeOp.ResizeMethod.NEAREST_NEIGHBOR | ||
| bitmapBuffer?.let { | ||
| val cropSize = minOf(it.width, it.height) | ||
| ImageProcessor.Builder() | ||
| .add(ResizeWithCropOrPadOp(cropSize, cropSize)) | ||
| .add( | ||
| ResizeOp( | ||
| tfInputSize.height, tfInputSize.width, ResizeOp.ResizeMethod.NEAREST_NEIGHBOR | ||
| ) | ||
| ) | ||
| ) | ||
| .add(Rot90Op(imageRotationDegrees / 90)) | ||
| .add(NormalizeOp(0f, 1f)) | ||
| .build() | ||
| .add(Rot90Op(imageRotationDegrees / 90)) | ||
| .add(NormalizeOp(0f, 1f)) | ||
| .build() | ||
| } | ||
| } | ||
|
|
||
| private val tflite by lazy { | ||
|
|
@@ -142,10 +148,10 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| postRotate(imageRotationDegrees.toFloat()) | ||
| if (isFrontFacing) postScale(-1f, 1f) | ||
| } | ||
| val uprightImage = Bitmap.createBitmap( | ||
| bitmapBuffer, 0, 0, bitmapBuffer.width, bitmapBuffer.height, matrix, true | ||
| ) | ||
| image_predicted.setImageBitmap(uprightImage) | ||
| bitmapBuffer?.let { | ||
| val uprightImage = Bitmap.createBitmap(it, 0, 0, it.width, it.height, matrix, true) | ||
| image_predicted.setImageBitmap(uprightImage) | ||
| } | ||
| image_predicted.visibility = View.VISIBLE | ||
| } | ||
|
|
||
|
|
@@ -156,8 +162,7 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| } | ||
|
|
||
| /** Declare and bind preview and analysis use cases */ | ||
| @SuppressLint("UnsafeExperimentalUsageError") | ||
| private fun bindCameraUseCases() = view_finder.post { | ||
| private fun bindCameraUseCases() { | ||
|
|
||
| val cameraProviderFuture = ProcessCameraProvider.getInstance(this) | ||
| cameraProviderFuture.addListener({ | ||
|
|
@@ -174,25 +179,25 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| // Obtain the current frame from ARSession. When the configuration is set to | ||
| // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the | ||
| // camera framerate. | ||
| val frame: Frame = session.update() | ||
| val image: Image | ||
| try { | ||
| frame.acquireCameraImage().use { image -> | ||
| if (!::bitmapBuffer.isInitialized) { | ||
| Log.w("sessionUpdate", Thread.currentThread().name) | ||
|
|
||
| surfaceView.queueEvent { | ||
| //session.setCameraTextureName(0) | ||
| session!!.setCameraTextureNames(intArrayOf(0)) | ||
| val frame: Frame = session.update() | ||
| try { | ||
| frame.acquireCameraImage().use { image -> | ||
| // The image rotation and RGB image buffer are initialized only once | ||
| // the analyzer has started running | ||
| imageRotationDegrees = 0 | ||
| bitmapBuffer = Bitmap.createBitmap( | ||
| image.width, image.height, Bitmap.Config.ARGB_8888 | ||
| ) | ||
| bitmapBuffer = Bitmap.createBitmap(image.width, image.height, Bitmap.Config.ARGB_8888) | ||
| } | ||
| } catch (e: NotYetAvailableException) { | ||
| // This normally means that depth data is not available yet. This is normal so we will not | ||
| // spam the logcat with this. | ||
| } | ||
| } catch (e: NotYetAvailableException) { | ||
| // This normally means that depth data is not available yet. This is normal so we will not | ||
| // spam the logcat with this. | ||
| } | ||
|
|
||
|
|
||
| // Set up the image analysis use case which will process frames in real time | ||
| /*val imageAnalysis = ImageAnalysis.Builder() | ||
| .setTargetAspectRatio(AspectRatio.RATIO_4_3) | ||
|
|
@@ -207,55 +212,59 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| //image.use { converter.yuvToRgb(image, bitmapBuffer) } HOW to convert from yuv to rgb | ||
|
|
||
| // Process the image in Tensorflow | ||
| val tfImage = tfImageProcessor.process(tfImageBuffer.apply { load(bitmapBuffer) }) | ||
| bitmapBuffer?.let { bitmap -> | ||
| val tfImage = tfImageProcessor?.process(tfImageBuffer.apply { load(bitmap) }) | ||
|
|
||
| // Perform the object detection for the current frame | ||
| val predictions = detector.predict(tfImage) | ||
|
|
||
| // Report only the top prediction | ||
| reportPrediction(predictions.maxBy { it.score }) | ||
| tfImage?.let { | ||
| // Perform the object detection for the current frame | ||
| val predictions = detector.predict(it) | ||
|
|
||
| /*imageAnalysis.setAnalyzer(executor, ImageAnalysis.Analyzer { image -> | ||
| if (!::bitmapBuffer.isInitialized) { | ||
| // The image rotation and RGB image buffer are initialized only once | ||
| // the analyzer has started running | ||
| imageRotationDegrees = image.imageInfo.rotationDegrees | ||
| bitmapBuffer = Bitmap.createBitmap( | ||
| image.width, image.height, Bitmap.Config.ARGB_8888 | ||
| ) | ||
| } | ||
| // Report only the top prediction | ||
| surfaceView.queueEvent { reportPrediction(predictions.maxByOrNull { it.score }) } | ||
|
|
||
| // Early exit: image analysis is in paused state | ||
| if (pauseAnalysis) { | ||
| image.close() | ||
| return@Analyzer | ||
| } | ||
| /*imageAnalysis.setAnalyzer(executor, ImageAnalysis.Analyzer { image -> | ||
| if (!::bitmapBuffer.isInitialized) { | ||
| // The image rotation and RGB image buffer are initialized only once | ||
| // the analyzer has started running | ||
| imageRotationDegrees = image.imageInfo.rotationDegrees | ||
| bitmapBuffer = Bitmap.createBitmap( | ||
| image.width, image.height, Bitmap.Config.ARGB_8888 | ||
| ) | ||
| } | ||
|
|
||
| // Convert the image to RGB and place it in our shared buffer | ||
| image.use { converter.yuvToRgb(image.image!!, bitmapBuffer) } | ||
| // Early exit: image analysis is in paused state | ||
| if (pauseAnalysis) { | ||
| image.close() | ||
| return@Analyzer | ||
| } | ||
|
|
||
| // Process the image in Tensorflow | ||
| val tfImage = tfImageProcessor.process(tfImageBuffer.apply { load(bitmapBuffer) }) | ||
| // Convert the image to RGB and place it in our shared buffer | ||
| image.use { converter.yuvToRgb(image.image!!, bitmapBuffer) } | ||
|
|
||
| // Perform the object detection for the current frame | ||
| val predictions = detector.predict(tfImage) | ||
| // Process the image in Tensorflow | ||
| val tfImage = tfImageProcessor.process(tfImageBuffer.apply { load(bitmapBuffer) }) | ||
|
|
||
| // Report only the top prediction | ||
| reportPrediction(predictions.maxBy { it.score }) | ||
| // Perform the object detection for the current frame | ||
| val predictions = detector.predict(tfImage) | ||
|
|
||
| // Compute the FPS of the entire pipeline | ||
| val frameCount = 10 | ||
| if (++frameCounter % frameCount == 0) { | ||
| frameCounter = 0 | ||
| val now = System.currentTimeMillis() | ||
| val delta = now - lastFpsTimestamp | ||
| val fps = 1000 * frameCount.toFloat() / delta | ||
| Log.d(TAG, "FPS: ${"%.02f".format(fps)}") | ||
| lastFpsTimestamp = now | ||
| } | ||
| }) | ||
| */ | ||
| // Report only the top prediction | ||
| reportPrediction(predictions.maxBy { it.score }) | ||
|
|
||
| // Compute the FPS of the entire pipeline | ||
| val frameCount = 10 | ||
| if (++frameCounter % frameCount == 0) { | ||
| frameCounter = 0 | ||
| val now = System.currentTimeMillis() | ||
| val delta = now - lastFpsTimestamp | ||
| val fps = 1000 * frameCount.toFloat() / delta | ||
| Log.d(TAG, "FPS: ${"%.02f".format(fps)}") | ||
| lastFpsTimestamp = now | ||
| } | ||
| }) | ||
| */ | ||
| } | ||
| } | ||
| // Create a new camera selector each time, enforcing lens facing | ||
| val cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build() | ||
|
|
||
|
|
@@ -271,13 +280,13 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| }, ContextCompat.getMainExecutor(this)) | ||
| } | ||
|
|
||
| private fun reportPrediction(prediction: ObjectDetectionHelper.ObjectPrediction?) = view_finder.post { | ||
| private fun reportPrediction(prediction: ObjectDetectionHelper.ObjectPrediction?) { | ||
|
|
||
| // Early exit: if prediction is not good enough, don't report it | ||
| if (prediction == null || prediction.score < ACCURACY_THRESHOLD) { | ||
| box_prediction.visibility = View.GONE | ||
| text_prediction.visibility = View.GONE | ||
| return@post | ||
| return | ||
| } | ||
|
|
||
| // Location has to be mapped to our local coordinates | ||
|
|
@@ -360,7 +369,7 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| if (!hasPermissions(this)) { | ||
| ActivityCompat.requestPermissions(this, permissions.toTypedArray(), permissionsRequestCode) | ||
| } else { | ||
| bindCameraUseCases() | ||
| surfaceView.queueEvent { bindCameraUseCases() } | ||
| } | ||
| session.resume() | ||
| surfaceView.onResume() | ||
|
|
@@ -375,14 +384,15 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| override fun onPause() { | ||
| super.onPause() | ||
| surfaceView.onPause() | ||
| GLES20.glGenTextures(1, IntArray(1), 0) | ||
| session.pause() | ||
| isFirstFrameAfterResume.set(true) | ||
| } | ||
|
|
||
| override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>, grantResults: IntArray) { | ||
| super.onRequestPermissionsResult(requestCode, permissions, grantResults) | ||
| if (requestCode == permissionsRequestCode && hasPermissions(this)) { | ||
| bindCameraUseCases() | ||
| surfaceView.queueEvent { bindCameraUseCases() } | ||
| } else { | ||
| finish() // If we don't have the required permissions, we can't run | ||
| } | ||
|
|
@@ -416,14 +426,16 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
|
|
||
| override fun onSurfaceCreated(p0: GL10?, p1: EGLConfig?) { | ||
| GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f) | ||
|
|
||
| } | ||
|
|
||
| override fun onSurfaceChanged(p0: GL10?, p1: Int, p2: Int) { | ||
| GLES20.glViewport(0, 0, p1, p2) | ||
| } | ||
|
|
||
| override fun onDrawFrame(p0: GL10?) { | ||
|
|
||
| Log.w("sessionDraw", Thread.currentThread().name) | ||
| session!!.setCameraTextureNames(intArrayOf(0)) | ||
| val frame = session.update() | ||
|
|
||
| // Place an object on tap. | ||
|
|
@@ -433,6 +445,8 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| val approximateDistanceMeters = 2.0f | ||
| // Performs a ray cast given a screen tap position. | ||
| val results = frame.hitTestInstantPlacement(0F, 0F, approximateDistanceMeters) | ||
| val displayRotation = display!!.rotation | ||
| session.setDisplayGeometry(displayRotation, 256, 256) | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. google-ar/arcore-android-sdk#413 temporary hack for "Invalid ray produced by view data" |
||
| if (results.isNotEmpty()) { | ||
| val point = results[0].trackable as InstantPlacementPoint | ||
| // Create an Anchor from the point's pose. | ||
|
|
@@ -451,3 +465,4 @@ class CameraActivity : AppCompatActivity(), GLSurfaceView.Renderer { | |
| } | ||
| } | ||
|
|
||
|
|
||
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think I need to use surfaceview here but I am not sure how to get texture ID for that. I don't see any similar helper function available.