Add an instrumentation unit test for TransformationFrameProcessor.

This test tests the same cases as the FrameEditorDataProcessingTest
as currently the main FrameEditor functionality is to apply a
transformation matrix using a TransformationFrameProcessor.

PiperOrigin-RevId: 431642066
This commit is contained in:
hschlueter 2022-03-01 11:00:36 +00:00 committed by Ian Baker
parent d187df9afe
commit 422dfe0f95
3 changed files with 260 additions and 14 deletions

View File

@ -24,9 +24,13 @@ import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.PixelFormat;
import android.media.Image;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.annotation.Nullable;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import java.io.File;
import java.io.FileOutputStream;
@ -51,6 +55,19 @@ public class BitmapTestUtil {
"media/bitmap/sample_mp4_first_frame_scale_narrow.png";
public static final String ROTATE_90_EXPECTED_OUTPUT_PNG_ASSET_STRING =
"media/bitmap/sample_mp4_first_frame_rotate90.png";
/**
* Maximum allowed average pixel difference between the expected and actual edited images for the
* test to pass. The value is chosen so that differences in decoder behavior across emulator
* versions don't affect whether the test passes for most emulators, but substantial distortions
* introduced by changes in the behavior of the frame editor will cause the test to fail.
*
* <p>To run this test on physical devices, please use a value of 5f, rather than 0.1f. This
* higher value will ignore some very small errors, but will allow for some differences caused by
* graphics implementations to be ignored. When the difference is close to the threshold, manually
* inspect expected/actual bitmaps to confirm failure, as it's possible this is caused by a
* difference in the codec or graphics implementation as opposed to a FrameEditor issue.
*/
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 0.1f;
/**
* Reads a bitmap from the specified asset location.
@ -176,5 +193,56 @@ public class BitmapTestUtil {
}
}
/**
* Creates a bitmap with the values of the current OpenGL framebuffer.
*
* <p>This method may block until any previously called OpenGL commands are complete.
*
* @param width The width of the pixel rectangle to read.
* @param height The height of the pixel rectangle to read.
* @return A {@link Bitmap} with the framebuffer's values.
*/
public static Bitmap createArgb8888BitmapFromCurrentGlFramebuffer(int width, int height) {
ByteBuffer rgba8888Buffer = ByteBuffer.allocateDirect(width * height * 4);
GLES20.glReadPixels(
0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgba8888Buffer);
GlUtil.checkGlError();
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
// According to https://www.khronos.org/opengl/wiki/Pixel_Transfer#Endian_issues,
// the colors will have the order RGBA in client memory. This is what the bitmap expects:
// https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888.
bitmap.copyPixelsFromBuffer(rgba8888Buffer);
// Flip the bitmap as its positive y-axis points down while OpenGL's positive y-axis points up.
return flipBitmapVertically(bitmap);
}
/**
* Creates a {@link GLES20#GL_TEXTURE_2D 2-dimensional OpenGL texture} with the bitmap's contents.
*
* @param bitmap A {@link Bitmap}.
* @return The identifier of the newly created texture.
*/
public static int createGlTextureFromBitmap(Bitmap bitmap) {
int texId = GlUtil.createTexture(bitmap.getWidth(), bitmap.getHeight());
// Put the flipped bitmap in the OpenGL texture as the bitmap's positive y-axis points down
// while OpenGL's positive y-axis points up.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, flipBitmapVertically(bitmap), 0);
GlUtil.checkGlError();
return texId;
}
private static Bitmap flipBitmapVertically(Bitmap bitmap) {
Matrix flip = new Matrix();
flip.postScale(1f, -1f);
return Bitmap.createBitmap(
bitmap,
/* x= */ 0,
/* y= */ 0,
bitmap.getWidth(),
bitmap.getHeight(),
flip,
/* filter= */ true);
}
private BitmapTestUtil() {}
}

View File

@ -17,6 +17,7 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.BitmapTestUtil.FIRST_FRAME_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
import static androidx.media3.transformer.BitmapTestUtil.ROTATE_90_EXPECTED_OUTPUT_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.SCALE_NARROW_EXPECTED_OUTPUT_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.TRANSLATE_RIGHT_EXPECTED_OUTPUT_PNG_ASSET_STRING;
@ -47,26 +48,17 @@ import org.junit.runner.RunWith;
*
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* #MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output bitmaps.
* BitmapTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps.
*/
@RunWith(AndroidJUnit4.class)
public final class FrameEditorDataProcessingTest {
// TODO(b/214975934): Once FrameEditor is converted to a FrameProcessorChain, replace these tests
// with a test for a few example combinations of GlFrameProcessors rather than testing all use
// cases of TransformationFrameProcessor.
/** Input video of which we only use the first frame. */
private static final String INPUT_MP4_ASSET_STRING = "media/mp4/sample.mp4";
/**
* Maximum allowed average pixel difference between the expected and actual edited images for the
* test to pass. The value is chosen so that differences in decoder behavior across emulator
* versions shouldn't affect whether the test passes, but substantial distortions introduced by
* changes in the behavior of the frame editor will cause the test to fail.
*
* <p>To run this test on physical devices, please use a value of 5f, rather than 0.1f. This
* higher value will ignore some very small errors, but will allow for some differences caused by
* graphics implementations to be ignored. When the difference is close to the threshold, manually
* inspect expected/actual bitmaps to confirm failure, as it's possible this is caused by a
* difference in the codec or graphics implementation as opposed to a FrameEditor issue.
*/
private static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 0.1f;
/** Timeout for dequeueing buffers from the codec, in microseconds. */
private static final int DEQUEUE_TIMEOUT_US = 5_000_000;
/** Time to wait for the frame editor's input to be populated by the decoder, in milliseconds. */

View File

@ -0,0 +1,186 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.transformer.BitmapTestUtil.FIRST_FRAME_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
import static androidx.media3.transformer.BitmapTestUtil.ROTATE_90_EXPECTED_OUTPUT_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.SCALE_NARROW_EXPECTED_OUTPUT_PNG_ASSET_STRING;
import static androidx.media3.transformer.BitmapTestUtil.TRANSLATE_RIGHT_EXPECTED_OUTPUT_PNG_ASSET_STRING;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.util.GlUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Pixel test for frame processing via {@link TransformationFrameProcessor}.
*
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output bitmaps
* as recommended in {@link FrameEditorDataProcessingTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class TransformationFrameProcessorTest {
static {
GlUtil.glAssertionsEnabled = true;
}
private final EGLDisplay eglDisplay = GlUtil.createEglDisplay();
private final EGLContext eglContext = GlUtil.createEglContext(eglDisplay);
private @MonotonicNonNull GlFrameProcessor transformationFrameProcessor;
private int inputTexId;
private int outputTexId;
// TODO(b/214975934): Once the frame processors are allowed to have different input and output
// dimensions, get the output dimensions from the frame processor.
private int width;
private int height;
@Before
public void createTextures() throws IOException {
Bitmap inputBitmap = BitmapTestUtil.readBitmap(FIRST_FRAME_PNG_ASSET_STRING);
width = inputBitmap.getWidth();
height = inputBitmap.getHeight();
// This surface is needed for focussing a render target, but the tests don't write output to it.
// The frame processor's output is written to a framebuffer instead.
EGLSurface eglSurface = GlUtil.getEglSurface(eglDisplay, new SurfaceTexture(false));
GlUtil.focusEglSurface(eglDisplay, eglContext, eglSurface, width, height);
inputTexId =
BitmapTestUtil.createGlTextureFromBitmap(
BitmapTestUtil.readBitmap(FIRST_FRAME_PNG_ASSET_STRING));
outputTexId = GlUtil.createTexture(width, height);
int frameBuffer = GlUtil.createFboForTexture(outputTexId);
GlUtil.focusFramebuffer(eglDisplay, eglContext, eglSurface, frameBuffer, width, height);
}
@After
public void release() {
if (transformationFrameProcessor != null) {
transformationFrameProcessor.release();
}
GlUtil.destroyEglContext(eglDisplay, eglContext);
}
@Test
public void updateProgramAndDraw_noEdits_producesExpectedOutput() throws Exception {
final String testId = "updateProgramAndDraw_noEdits";
Matrix identityMatrix = new Matrix();
transformationFrameProcessor =
new TransformationFrameProcessor(getApplicationContext(), identityMatrix);
transformationFrameProcessor.initialize();
Bitmap expectedBitmap = BitmapTestUtil.readBitmap(FIRST_FRAME_PNG_ASSET_STRING);
transformationFrameProcessor.updateProgramAndDraw(inputTexId, /* presentationTimeNs= */ 0);
Bitmap actualBitmap =
BitmapTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapTestUtil.getAveragePixelAbsoluteDifferenceArgb8888(
expectedBitmap, actualBitmap, testId);
BitmapTestUtil.saveTestBitmapToCacheDirectory(
testId, /* bitmapLabel= */ "actual", actualBitmap, /* throwOnFailure= */ false);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
@Test
public void updateProgramAndDraw_translateRight_producesExpectedOutput() throws Exception {
final String testId = "updateProgramAndDraw_translateRight";
Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
transformationFrameProcessor =
new TransformationFrameProcessor(getApplicationContext(), translateRightMatrix);
transformationFrameProcessor.initialize();
Bitmap expectedBitmap =
BitmapTestUtil.readBitmap(TRANSLATE_RIGHT_EXPECTED_OUTPUT_PNG_ASSET_STRING);
transformationFrameProcessor.updateProgramAndDraw(inputTexId, /* presentationTimeNs= */ 0);
Bitmap actualBitmap =
BitmapTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapTestUtil.getAveragePixelAbsoluteDifferenceArgb8888(
expectedBitmap, actualBitmap, testId);
BitmapTestUtil.saveTestBitmapToCacheDirectory(
testId, /* bitmapLabel= */ "actual", actualBitmap, /* throwOnFailure= */ false);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
@Test
public void updateProgramAndDraw_scaleNarrow_producesExpectedOutput() throws Exception {
final String testId = "updateProgramAndDraw_scaleNarrow";
Matrix scaleNarrowMatrix = new Matrix();
scaleNarrowMatrix.postScale(.5f, 1.2f);
transformationFrameProcessor =
new TransformationFrameProcessor(getApplicationContext(), scaleNarrowMatrix);
transformationFrameProcessor.initialize();
Bitmap expectedBitmap =
BitmapTestUtil.readBitmap(SCALE_NARROW_EXPECTED_OUTPUT_PNG_ASSET_STRING);
transformationFrameProcessor.updateProgramAndDraw(inputTexId, /* presentationTimeNs= */ 0);
Bitmap actualBitmap =
BitmapTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapTestUtil.getAveragePixelAbsoluteDifferenceArgb8888(
expectedBitmap, actualBitmap, testId);
BitmapTestUtil.saveTestBitmapToCacheDirectory(
testId, /* bitmapLabel= */ "actual", actualBitmap, /* throwOnFailure= */ false);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
@Test
public void updateProgramAndDraw_rotate90_producesExpectedOutput() throws Exception {
final String testId = "updateProgramAndDraw_rotate90";
// TODO(b/213190310): After creating a Presentation class, move VideoSamplePipeline
// resolution-based adjustments (ex. in cl/419619743) to that Presentation class, so we can
// test that rotation doesn't distort the image.
Matrix rotate90Matrix = new Matrix();
rotate90Matrix.postRotate(/* degrees= */ 90);
transformationFrameProcessor =
new TransformationFrameProcessor(getApplicationContext(), rotate90Matrix);
transformationFrameProcessor.initialize();
Bitmap expectedBitmap = BitmapTestUtil.readBitmap(ROTATE_90_EXPECTED_OUTPUT_PNG_ASSET_STRING);
transformationFrameProcessor.updateProgramAndDraw(inputTexId, /* presentationTimeNs= */ 0);
Bitmap actualBitmap =
BitmapTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapTestUtil.getAveragePixelAbsoluteDifferenceArgb8888(
expectedBitmap, actualBitmap, testId);
BitmapTestUtil.saveTestBitmapToCacheDirectory(
testId, /* bitmapLabel= */ "actual", actualBitmap, /* throwOnFailure= */ false);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
}