Mark transform API public in 1.4
- Added a matrix to SurfaceRequest/SurfaceOutput. This matrix represents the mapping from the sensor coordinate system to the current buffer's coodinate system.
- Added a flag to SurfaceRequest, indicating whether the Surface contains the camera transformation. That information is used by UI elements such as TextureView/SurfaceView to transform the output. If the flag is present, it means that the caller needs to compensate for the camera orienation when using the TransformationInfo#getRotationDegrees() to transform the output. Otherwise, they can use the value of TransformationInfo#getRotationDegrees() directly.
Relnote: APIs for calculate coordinates transformation from sensor to the current buffer, with a flag indicating whether the Surface contains the camera orientation info.
Bug: 292286071
Test: manual test and ./gradlew bOS
Change-Id: I59096cca934ee680dc2d57ac52fe2a1a252a29d1
diff --git a/camera/camera-core/api/current.txt b/camera/camera-core/api/current.txt
index 57be5be..92f0b23 100644
--- a/camera/camera-core/api/current.txt
+++ b/camera/camera-core/api/current.txt
@@ -491,6 +491,7 @@
public interface SurfaceOutput extends java.io.Closeable {
method public void close();
+ method public default android.graphics.Matrix getSensorToBufferTransform();
method public android.util.Size getSize();
method public android.view.Surface getSurface(java.util.concurrent.Executor, androidx.core.util.Consumer<androidx.camera.core.SurfaceOutput.Event!>);
method public int getTargets();
@@ -531,7 +532,10 @@
@com.google.auto.value.AutoValue public abstract static class SurfaceRequest.TransformationInfo {
method public abstract android.graphics.Rect getCropRect();
+ method public abstract boolean getMirroring();
method public abstract int getRotationDegrees();
+ method public abstract android.graphics.Matrix getSensorToBufferTransform();
+ method public abstract boolean hasCameraTransform();
}
public static interface SurfaceRequest.TransformationInfoListener {
diff --git a/camera/camera-core/api/restricted_current.txt b/camera/camera-core/api/restricted_current.txt
index 57be5be..92f0b23 100644
--- a/camera/camera-core/api/restricted_current.txt
+++ b/camera/camera-core/api/restricted_current.txt
@@ -491,6 +491,7 @@
public interface SurfaceOutput extends java.io.Closeable {
method public void close();
+ method public default android.graphics.Matrix getSensorToBufferTransform();
method public android.util.Size getSize();
method public android.view.Surface getSurface(java.util.concurrent.Executor, androidx.core.util.Consumer<androidx.camera.core.SurfaceOutput.Event!>);
method public int getTargets();
@@ -531,7 +532,10 @@
@com.google.auto.value.AutoValue public abstract static class SurfaceRequest.TransformationInfo {
method public abstract android.graphics.Rect getCropRect();
+ method public abstract boolean getMirroring();
method public abstract int getRotationDegrees();
+ method public abstract android.graphics.Matrix getSensorToBufferTransform();
+ method public abstract boolean hasCameraTransform();
}
public static interface SurfaceRequest.TransformationInfoListener {
diff --git a/camera/camera-core/src/main/java/androidx/camera/core/SurfaceOutput.java b/camera/camera-core/src/main/java/androidx/camera/core/SurfaceOutput.java
index 1c24d84e..943b069 100644
--- a/camera/camera-core/src/main/java/androidx/camera/core/SurfaceOutput.java
+++ b/camera/camera-core/src/main/java/androidx/camera/core/SurfaceOutput.java
@@ -156,8 +156,7 @@
*
* <p>The value is a mapping from sensor coordinates to buffer coordinates, which is,
* from the rect of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE} to the
- * rect defined by {@code (0, 0, SurfaceRequest#getResolution#getWidth(),
- * SurfaceRequest#getResolution#getHeight())}. The matrix can
+ * rect defined by {@code (0, 0, #getSize()#getWidth(), #getSize()#getHeight())}. The matrix can
* be used to map the coordinates from one {@link UseCase} to another. For example,
* detecting face with {@link ImageAnalysis}, and then highlighting the face in
* {@link Preview}.
@@ -174,7 +173,6 @@
* analysisToEffect.postConcat(sensorToEffect);
* </pre></code>
*/
- @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@NonNull
default Matrix getSensorToBufferTransform() {
return new Matrix();
diff --git a/camera/camera-core/src/main/java/androidx/camera/core/SurfaceRequest.java b/camera/camera-core/src/main/java/androidx/camera/core/SurfaceRequest.java
index e2d2292..e110e52 100644
--- a/camera/camera-core/src/main/java/androidx/camera/core/SurfaceRequest.java
+++ b/camera/camera-core/src/main/java/androidx/camera/core/SurfaceRequest.java
@@ -892,23 +892,45 @@
/**
* Whether the {@link Surface} contains the camera transform.
*
- * <p>The {@link Surface} may contain a transformation, which will be used by Android
- * components such as {@link TextureView} and {@link SurfaceView} to transform the output.
- * The app may need to handle the transformation differently based on whether this value
- * exists.
+ * <p>When the Surface is connected to the camera directly, camera writes the
+ * camera orientation value to the Surface. For example, the value can be retrieved via
+ * {@link SurfaceTexture#getTransformMatrix(float[])}. Android components such
+ * as {@link TextureView} and {@link SurfaceView} use the value to transform the output.
+ * When the Surface is not connect to the camera directly, for example, when it was
+ * copied with OpenGL, the Surface will not contain the camera orientation value.
*
- * <ul>
- * <li>If the producer is the camera, then the {@link Surface} will contain a
- * transformation that represents the camera orientation. In that case, this method will
- * return {@code true}.
- * <li>If the producer is not the camera, for example, if the stream has been edited by
- * CameraX, then the {@link Surface} will not contain any transformation. In that case,
- * this method will return {@code false}.
- * </ul>
+ * <p>The app may need to transform the UI differently based on this flag. If this value
+ * is true, the app only needs to apply the Surface transformation; otherwise, the app
+ * needs to apply the value of {@link #getRotationDegrees()}. For example, if the preview
+ * is displayed in a {@link TextureView}:
*
- * @return true if the producer writes the camera transformation to the {@link Surface}.
+ * <pre><code>
+ * int rotationDegrees;
+ * if (surfaceRequest.hasCameraTransform()) {
+ * switch (textureView.getDisplay().getRotation()) {
+ * case Surface.ROTATION_0:
+ * rotationDegrees = 0;
+ * break;
+ * case Surface.ROTATION_90:
+ * rotationDegrees = 90;
+ * break;
+ * case Surface.ROTATION_180:
+ * rotationDegrees = 180;
+ * break;
+ * case Surface.ROTATION_270:
+ * rotationDegrees = 270;
+ * break;
+ * }
+ * } else {
+ * rotationDegrees = transformationInfo.getRotationDegrees();
+ * }
+ * Matrix textureViewTransform = new Matrix();
+ * textureViewTransform.postRotate(rotationDegrees);
+ * textureView.setTransform(textureViewTransform);
+ * </code></pre>
+ *
+ * @return true if the {@link Surface} contains the camera transformation.
*/
- @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public abstract boolean hasCameraTransform();
/**
@@ -916,15 +938,24 @@
*
* <p>The value is a mapping from sensor coordinates to buffer coordinates, which is,
* from the rect of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE} to the
- * rect defined by {@code (0, 0, SurfaceRequest#getResolution#getWidth(),
- * SurfaceRequest#getResolution#getHeight())}. The matrix can
- * be used to map the coordinates from one {@link UseCase} to another. For example,
- * detecting face with {@link ImageAnalysis}, and then highlighting the face in
+ * rect defined by {@code (0, 0, #getResolution#getWidth(), #getResolution#getHeight())}.
+ * The matrix can be used to map the coordinates from one {@link UseCase} to another. For
+ * example, detecting face with {@link ImageAnalysis}, and then highlighting the face in
* {@link Preview}.
+ *
+ * <p>Code sample
+ * <code><pre>
+ * // Get the transformation from sensor to effect input.
+ * Matrix sensorToEffect = surfaceRequest.getSensorToBufferTransform();
+ * // Get the transformation from sensor to ImageAnalysis.
+ * Matrix sensorToAnalysis = imageProxy.getSensorToBufferTransform();
+ * // Concatenate the two matrices to get the transformation from ImageAnalysis to effect.
+ * Matrix analysisToEffect = Matrix()
+ * sensorToAnalysis.invert(analysisToEffect);
+ * analysisToEffect.postConcat(sensorToEffect);
+ * </pre></code>
*/
- // TODO(b/292286071): make this public in 1.4 alpha.
@NonNull
- @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public abstract Matrix getSensorToBufferTransform();
/**
@@ -934,8 +965,6 @@
* example, for front camera preview, the buffer should usually be mirrored. The
* mirroring should be applied after the {@link #getRotationDegrees()} is applied.
*/
- // TODO(b/292286071): make this public in 1.4 alpha.
- @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public abstract boolean getMirroring();
/**