Compositor: Add support for multiple bitmaps, and add testrunner

* Allow more than one input bitmap at a time.
* Allow Compositor to take in and set up an Executor. Otherwise,
Compositor resources may be created on one thread and accessed on another.
* Add a Compositor TestRunner to reuse test code more.
* Update VideoFrameProcessingTaskExecutor to use a new onError listener, so
that it's more reusable in non-DVFP contexts, like for Compositor.

PiperOrigin-RevId: 547206053
This commit is contained in:
huangdarwin 2023-07-11 17:03:09 +01:00 committed by Rohit Singh
parent 4983d4f339
commit c122c0ca05
10 changed files with 345 additions and 151 deletions

View File

@ -132,8 +132,7 @@ public interface VideoFrameProcessor {
/**
* Called when an exception occurs during asynchronous video frame processing.
*
* <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link VideoFrameProcessor} should be released.
* <p>Using {@code VideoFrameProcessor} after an error happens is undefined behavior.
*/
void onError(VideoFrameProcessingException exception);

View File

@ -690,6 +690,9 @@ public final class GlUtil {
/**
* Destroys the {@link EGLContext} identified by the provided {@link EGLDisplay} and {@link
* EGLContext}.
*
* <p>This is a no-op if called on already-destroyed {@link EGLDisplay} and {@link EGLContext}
* instances.
*/
@RequiresApi(17)
public static void destroyEglContext(

View File

@ -99,7 +99,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A factory for {@link DefaultVideoFrameProcessor} instances. */
public static final class Factory implements VideoFrameProcessor.Factory {
private static final String THREAD_NAME = "Effect:GlThread";
private static final String THREAD_NAME = "Effect:DefaultVideoFrameProcessor:GlThread";
/** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */
public static final class Builder {
@ -285,7 +285,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService;
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
instanceExecutorService, shouldShutdownExecutorService, listener);
instanceExecutorService, shouldShutdownExecutorService, listener::onError);
Future<DefaultVideoFrameProcessor> defaultVideoFrameProcessorFuture =
instanceExecutorService.submit(
@ -554,9 +554,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void release() {
try {
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ this::releaseGlObjects);
} catch (InterruptedException unexpected) {
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
throw new IllegalStateException(e);
}
}

View File

@ -19,35 +19,52 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.GLES20;
import androidx.annotation.GuardedBy;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ExecutorService;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A basic VideoCompositor that takes in frames from exactly 2 input sources and combines it to one
* output. Only tested for 2 frames in, 1 frame out for now.
* output.
*/
@UnstableApi
public final class VideoCompositor {
// TODO: b/262694346 - Flesh out this implementation by doing the following:
// * Create on a shared VideoFrameProcessingTaskExecutor with VideoFrameProcessor instances.
// * >1 input/output frame per source.
// * Handle matched timestamps.
// * Handle mismatched timestamps
// * Before allowing customization of this class, add an interface, and rename this class to
// DefaultCompositor.
/** Listener for errors. */
public interface ErrorListener {
/**
* Called when an exception occurs during asynchronous frame compositing.
*
* <p>Using {@code VideoCompositor} after an error happens is undefined behavior.
*/
void onError(VideoFrameProcessingException exception);
}
private static final String THREAD_NAME = "Effect:VideoCompositor:GlThread";
private static final String TAG = "VideoCompositor";
private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl";
private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl";
@ -55,17 +72,30 @@ public final class VideoCompositor {
private final Context context;
private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
private final GlObjectsProvider glObjectsProvider;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// List of queues of unprocessed frames for each input source.
@GuardedBy("this")
private final List<Queue<InputFrameInfo>> inputFrameInfos;
private final TexturePool outputTexturePool;
// Only used on the GL Thread.
private @MonotonicNonNull EGLContext eglContext;
private @MonotonicNonNull EGLDisplay eglDisplay;
private @MonotonicNonNull GlProgram glProgram;
private long syncObject;
/**
* Creates an instance.
*
* <p>If a non-null {@code executorService} is set, the {@link ExecutorService} must be
* {@linkplain ExecutorService#shutdown shut down} by the caller.
*/
public VideoCompositor(
Context context,
GlObjectsProvider glObjectsProvider,
@Nullable ExecutorService executorService,
ErrorListener errorListener,
DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener,
@IntRange(from = 1) int textureOutputCapacity) {
this.context = context;
@ -75,6 +105,16 @@ public final class VideoCompositor {
inputFrameInfos = new ArrayList<>();
outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
boolean ownsExecutor = executorService == null;
ExecutorService instanceExecutorService =
ownsExecutor ? Util.newSingleThreadExecutor(THREAD_NAME) : checkNotNull(executorService);
videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
instanceExecutorService,
/* shouldShutdownExecutorService= */ ownsExecutor,
errorListener::onError);
videoFrameProcessingTaskExecutor.submit(this::setupGlObjects);
}
/**
@ -86,7 +126,6 @@ public final class VideoCompositor {
return inputFrameInfos.size() - 1;
}
// Below methods must be called on the GL thread.
/**
* Queues an input texture to be composited, for example from an upstream {@link
* DefaultVideoFrameProcessor.TextureOutputListener}.
@ -94,7 +133,7 @@ public final class VideoCompositor {
* <p>Each input source must have a unique {@code inputId} returned from {@link
* #registerInputSource}.
*/
public void queueInputTexture(
public synchronized void queueInputTexture(
int inputId,
GlTextureInfo inputTexture,
long presentationTimeUs,
@ -104,14 +143,35 @@ public final class VideoCompositor {
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo);
if (isReadyToComposite()) {
compositeToOutputTexture();
videoFrameProcessingTaskExecutor.submit(
() -> {
if (isReadyToComposite()) {
compositeToOutputTexture();
}
});
}
public void release() {
try {
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ this::releaseGlObjects);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
}
}
private boolean isReadyToComposite() {
// Below methods must be called on the GL thread.
private void setupGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
EGLContext eglContext =
glObjectsProvider.createEglContext(
eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
}
private synchronized boolean isReadyToComposite() {
// TODO: b/262694346 - Use timestamps to determine when to composite instead of number of
// frames.
// frames.
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
if (checkNotNull(inputFrameInfos.get(inputId)).isEmpty()) {
return false;
@ -120,13 +180,14 @@ public final class VideoCompositor {
return true;
}
private void compositeToOutputTexture() throws VideoFrameProcessingException {
private synchronized void compositeToOutputTexture() throws VideoFrameProcessingException {
List<InputFrameInfo> framesToComposite = new ArrayList<>();
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove());
}
ensureGlProgramConfigured();
// TODO: b/262694346 -
// * Support an arbitrary number of inputs.
// * Allow different frame dimensions.
@ -150,8 +211,9 @@ public final class VideoCompositor {
// TODO: b/262694346 - Use presentationTimeUs here for freeing textures.
textureOutputListener.onTextureRendered(
checkNotNull(outputTexture),
/* presentationTimeUs= */ 0,
(presentationTimeUs) -> outputTexturePool.freeTexture(),
/* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
(presentationTimeUs) ->
videoFrameProcessingTaskExecutor.submit(outputTexturePool::freeTexture),
syncObject);
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
@ -194,6 +256,24 @@ public final class VideoCompositor {
glProgram.bindAttributesAndUniforms();
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError();
}
private void releaseGlObjects() {
try {
outputTexturePool.deleteAllTextures();
if (glProgram != null) {
glProgram.delete();
}
} catch (Exception e) {
Log.e(TAG, "Error releasing GL resources", e);
} finally {
try {
GlUtil.destroyEglContext(eglDisplay, eglContext);
} catch (GlUtil.GlException e) {
Log.e(TAG, "Error releasing GL context", e);
}
}
}
/** Holds information on a frame and how to release it. */

View File

@ -20,7 +20,6 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import java.util.ArrayDeque;
@ -36,11 +35,8 @@ import java.util.concurrent.RejectedExecutionException;
*
* <p>Public methods can be called from any thread.
*
* <p>The wrapper handles calling {@link
* VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during
* these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs.
* <p>Calls {@link ErrorListener#onError} for errors that occur during these tasks. The listener is
* invoked from the {@link ExecutorService}.
*
* <p>{@linkplain #submitWithHighPriority(Task) High priority tasks} are always executed before
* {@linkplain #submit(Task) default priority tasks}. Tasks with equal priority are executed in FIFO
@ -52,16 +48,27 @@ import java.util.concurrent.RejectedExecutionException;
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* VideoFrameProcessingException}.
*/
public interface Task {
interface Task {
/** Runs the task. */
void run() throws VideoFrameProcessingException, GlUtil.GlException;
}
/** Listener for errors. */
interface ErrorListener {
/**
* Called when an exception occurs while executing submitted tasks.
*
* <p>Using the {@link VideoFrameProcessingTaskExecutor} after an error happens is undefined
* behavior.
*/
void onError(VideoFrameProcessingException exception);
}
private static final long RELEASE_WAIT_TIME_MS = 500;
private final boolean shouldShutdownExecutorService;
private final ExecutorService singleThreadExecutorService;
private final VideoFrameProcessor.Listener listener;
private final ErrorListener errorListener;
private final Object lock;
@GuardedBy("lock")
@ -74,10 +81,10 @@ import java.util.concurrent.RejectedExecutionException;
public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService,
boolean shouldShutdownExecutorService,
VideoFrameProcessor.Listener listener) {
ErrorListener errorListener) {
this.singleThreadExecutorService = singleThreadExecutorService;
this.shouldShutdownExecutorService = shouldShutdownExecutorService;
this.listener = listener;
this.errorListener = errorListener;
lock = new Object();
highPriorityTasks = new ArrayDeque<>();
}
@ -186,7 +193,7 @@ import java.util.concurrent.RejectedExecutionException;
if (shouldShutdownExecutorService) {
singleThreadExecutorService.shutdown();
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
listener.onError(
errorListener.onError(
new VideoFrameProcessingException(
"Release timed out. OpenGL resources may not be cleaned up properly."));
}
@ -231,6 +238,6 @@ import java.util.concurrent.RejectedExecutionException;
}
shouldCancelTasks = true;
}
listener.onError(VideoFrameProcessingException.from(exception));
errorListener.onError(VideoFrameProcessingException.from(exception));
}
}

View File

@ -21,7 +21,6 @@ import static org.mockito.Mockito.verify;
import androidx.media3.common.C;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.After;
@ -33,13 +32,13 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final VideoFrameProcessor.Listener mockFrameProcessorListener =
mock(VideoFrameProcessor.Listener.class);
private final VideoFrameProcessingTaskExecutor.ErrorListener mockErrorListener =
mock(VideoFrameProcessingTaskExecutor.ErrorListener.class);
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"),
/* shouldShutdownExecutorService= */ true,
mockFrameProcessorListener);
mockErrorListener);
private final GlObjectsProvider mockGlObjectsProvider = mock(GlObjectsProvider.class);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);

View File

@ -242,7 +242,7 @@ public final class VideoFrameProcessorTestRunner {
* Time to wait for the decoded frame to populate the {@link VideoFrameProcessor} instance's input
* surface and the {@link VideoFrameProcessor} to finish processing the frame, in milliseconds.
*/
public static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5000;
public static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5_000;
private final String testId;
private final @MonotonicNonNull String videoAssetPath;

View File

@ -557,11 +557,11 @@ public final class AndroidTestUtil {
*/
public static EGLContext createOpenGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
int[] configAttributes = GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
EGLContext eglContext =
glObjectsProvider.createEglContext(eglDisplay, /* openGlVersion= */ 2, configAttributes);
glObjectsProvider.createEglContext(
eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
return eglContext;
}

View File

@ -87,7 +87,7 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
throws VideoFrameProcessingException, GlUtil.GlException {
throws VideoFrameProcessingException {
readBitmap(outputTexture, presentationTimeUs);
releaseOutputTextureCallback.release(presentationTimeUs);
}

View File

@ -43,6 +43,10 @@ import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -55,8 +59,6 @@ import org.junit.runners.Parameterized;
/** Pixel test for {@link VideoCompositor} compositing 2 input frames into 1 output frame. */
@RunWith(Parameterized.class)
public final class VideoCompositorPixelTest {
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner1;
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner2;
private static final String ORIGINAL_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
private static final String GRAYSCALE_PNG_ASSET_PATH =
@ -66,10 +68,6 @@ public final class VideoCompositorPixelTest {
private static final String GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscaleAndRotate180Composite.png";
private static final Effect ROTATE_180 =
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
private static final Effect GRAYSCALE = RgbFilter.createGrayscaleFilter();
@Parameterized.Parameters(name = "useSharedExecutor={0}")
public static ImmutableList<Boolean> useSharedExecutor() {
return ImmutableList.of(true, false);
@ -77,48 +75,26 @@ public final class VideoCompositorPixelTest {
@Parameterized.Parameter public boolean useSharedExecutor;
public @Nullable ExecutorService executorService;
private @MonotonicNonNull VideoCompositorTestRunner videoCompositorTestRunner;
@After
public void tearDown() {
if (inputVfpTestRunner1 != null) {
inputVfpTestRunner1.release();
}
if (inputVfpTestRunner2 != null) {
inputVfpTestRunner2.release();
}
if (executorService != null) {
try {
executorService.shutdown();
if (!executorService.awaitTermination(/* timeout= */ 5000, MILLISECONDS)) {
throw new IllegalStateException("Missed shutdown timeout.");
}
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
if (videoCompositorTestRunner != null) {
videoCompositorTestRunner.release();
}
}
@Test
public void compositeTwoFrames_matchesExpected() throws Exception {
public void compositeTwoInputs_withOneFrameFromEach_matchesExpectedBitmap() throws Exception {
String testId =
"compositeTwoFrames_matchesExpected[useSharedExecutor=" + useSharedExecutor + "]";
executorService = useSharedExecutor ? Util.newSingleThreadExecutor("Effect:GlThread") : null;
// Arrange VideoCompositor and VideoFrameProcessor instances.
EGLContext sharedEglContext = AndroidTestUtil.createOpenGlObjects();
GlObjectsProvider sharedGlObjectsProvider = new DefaultGlObjectsProvider(sharedEglContext);
"compositeTwoInputs_withOneFrameFromEach_matchesExpectedBitmap[useSharedExecutor="
+ useSharedExecutor
+ "]";
AtomicReference<Bitmap> compositedOutputBitmap = new AtomicReference<>();
VideoCompositor videoCompositor =
new VideoCompositor(
getApplicationContext(),
sharedGlObjectsProvider,
/* textureOutputListener= */ (outputTexture,
presentationTimeUs,
releaseOutputTextureCallback,
syncObject) -> {
videoCompositorTestRunner =
new VideoCompositorTestRunner(
testId,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
try {
if (useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject);
@ -134,65 +110,228 @@ public final class VideoCompositorPixelTest {
releaseOutputTextureCallback.release(presentationTimeUs);
}
},
/* textureOutputCapacity= */ 1);
TextureBitmapReader inputTextureBitmapReader1 = new TextureBitmapReader();
VideoFrameProcessorTestRunner inputVfpTestRunner1 =
getFrameProcessorTestRunnerBuilder(
testId,
inputTextureBitmapReader1,
videoCompositor,
executorService,
sharedGlObjectsProvider)
.setEffects(GRAYSCALE)
.build();
this.inputVfpTestRunner1 = inputVfpTestRunner1;
TextureBitmapReader inputTextureBitmapReader2 = new TextureBitmapReader();
VideoFrameProcessorTestRunner inputVfpTestRunner2 =
getFrameProcessorTestRunnerBuilder(
testId,
inputTextureBitmapReader2,
videoCompositor,
executorService,
sharedGlObjectsProvider)
.setEffects(ROTATE_180)
.build();
this.inputVfpTestRunner2 = inputVfpTestRunner2;
useSharedExecutor);
// Queue 1 input bitmap from each input VideoFrameProcessor source.
inputVfpTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVfpTestRunner1.endFrameProcessing();
inputVfpTestRunner2.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVfpTestRunner2.endFrameProcessing();
videoCompositorTestRunner.queueBitmapsToBothInputs(/* count= */ 1);
// Check that VideoFrameProcessor and VideoCompositor outputs match expected bitmaps.
Bitmap actualCompositorInputBitmap1 = checkNotNull(inputTextureBitmapReader1).getBitmap();
Bitmap actualCompositorInputBitmap1 = videoCompositorTestRunner.inputBitmapReader1.getBitmap();
saveAndAssertBitmapMatchesExpected(
testId,
actualCompositorInputBitmap1,
/* actualBitmapLabel= */ "actualCompositorInputBitmap1",
GRAYSCALE_PNG_ASSET_PATH);
Bitmap actualCompositorInputBitmap2 = checkNotNull(inputTextureBitmapReader2).getBitmap();
Bitmap actualCompositorInputBitmap2 = videoCompositorTestRunner.inputBitmapReader2.getBitmap();
saveAndAssertBitmapMatchesExpected(
testId,
actualCompositorInputBitmap2,
/* actualBitmapLabel= */ "actualCompositorInputBitmap2",
ROTATE180_PNG_ASSET_PATH);
Bitmap compositorOutputBitmap = compositedOutputBitmap.get();
saveAndAssertBitmapMatchesExpected(
testId,
compositorOutputBitmap,
compositedOutputBitmap.get(),
/* actualBitmapLabel= */ "compositorOutputBitmap",
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
@Test
public void compositeTwoInputs_withFiveFramesFromEach_matchesExpectedTimestamps()
throws Exception {
String testId =
"compositeTwoInputs_withFiveFramesFromEach_matchesExpectedTimestamps[useSharedExecutor="
+ useSharedExecutor
+ "]";
List<Long> compositorTimestamps = new CopyOnWriteArrayList<>();
AtomicReference<Bitmap> compositedFirstOutputBitmap = new AtomicReference<>();
videoCompositorTestRunner =
new VideoCompositorTestRunner(
testId,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
try {
if (useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject);
} else {
GlUtil.awaitSyncObject(syncObject);
}
if (compositedFirstOutputBitmap.get() == null) {
compositedFirstOutputBitmap.set(
BitmapPixelTestUtil.createArgb8888BitmapFromFocusedGlFramebuffer(
outputTexture.width, outputTexture.height));
}
compositorTimestamps.add(presentationTimeUs);
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
} finally {
releaseOutputTextureCallback.release(presentationTimeUs);
}
},
useSharedExecutor);
videoCompositorTestRunner.queueBitmapsToBothInputs(/* count= */ 5);
ImmutableList<Long> expectedTimestamps =
ImmutableList.of(
0L,
1L * C.MICROS_PER_SECOND,
2L * C.MICROS_PER_SECOND,
3L * C.MICROS_PER_SECOND,
4L * C.MICROS_PER_SECOND);
Set<Long> inputTimestampsSource1 =
videoCompositorTestRunner.inputBitmapReader1.getOutputTimestamps();
assertThat(inputTimestampsSource1).containsExactlyElementsIn(expectedTimestamps).inOrder();
Set<Long> inputTimestampsSource2 =
videoCompositorTestRunner.inputBitmapReader2.getOutputTimestamps();
assertThat(inputTimestampsSource2).containsExactlyElementsIn(expectedTimestamps).inOrder();
assertThat(compositorTimestamps).containsExactlyElementsIn(expectedTimestamps).inOrder();
saveAndAssertBitmapMatchesExpected(
testId,
compositedFirstOutputBitmap.get(),
/* actualBitmapLabel= */ "compositorOutputBitmap",
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
/**
* A test runner for {@link VideoCompositor tests} tests.
*
* <p>Composites input bitmaps from two input sources.
*/
private static final class VideoCompositorTestRunner {
private static final int COMPOSITOR_TIMEOUT_MS = 5_000;
private static final Effect ROTATE_180_EFFECT =
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
private static final Effect GRAYSCALE_EFFECT = RgbFilter.createGrayscaleFilter();
public final TextureBitmapReader inputBitmapReader1;
public final TextureBitmapReader inputBitmapReader2;
private final VideoFrameProcessorTestRunner inputVideoFrameProcessorTestRunner1;
private final VideoFrameProcessorTestRunner inputVideoFrameProcessorTestRunner2;
private final VideoCompositor videoCompositor;
private final @Nullable ExecutorService sharedExecutorService;
private final AtomicReference<VideoFrameProcessingException> compositionException;
private @MonotonicNonNull CountDownLatch compositorEnded;
public VideoCompositorTestRunner(
String testId,
DefaultVideoFrameProcessor.TextureOutputListener compositorTextureOutputListener,
boolean useSharedExecutor)
throws GlUtil.GlException, VideoFrameProcessingException {
sharedExecutorService =
useSharedExecutor ? Util.newSingleThreadExecutor("Effect:Shared:GlThread") : null;
EGLContext sharedEglContext = AndroidTestUtil.createOpenGlObjects();
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(
/* sharedEglContext= */ useSharedExecutor ? null : sharedEglContext);
compositionException = new AtomicReference<>();
videoCompositor =
new VideoCompositor(
getApplicationContext(),
glObjectsProvider,
sharedExecutorService,
/* errorListener= */ compositionException::set,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
compositorTextureOutputListener.onTextureRendered(
outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject);
checkNotNull(compositorEnded).countDown();
},
/* textureOutputCapacity= */ 1);
inputBitmapReader1 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner1 =
createVideoFrameProcessorTestRunnerBuilder(
testId,
inputBitmapReader1,
videoCompositor,
sharedExecutorService,
glObjectsProvider)
.setEffects(GRAYSCALE_EFFECT)
.build();
inputBitmapReader2 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner2 =
createVideoFrameProcessorTestRunnerBuilder(
testId,
inputBitmapReader2,
videoCompositor,
sharedExecutorService,
glObjectsProvider)
.setEffects(ROTATE_180_EFFECT)
.build();
}
/**
* Queues {@code count} bitmaps, with one bitmap per second, starting from and including 0
* seconds.
*/
public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException {
compositorEnded = new CountDownLatch(count);
inputVideoFrameProcessorTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner2.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner1.endFrameProcessing();
inputVideoFrameProcessorTestRunner2.endFrameProcessing();
compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS);
assertThat(compositionException.get()).isNull();
}
public void release() {
inputVideoFrameProcessorTestRunner1.release();
inputVideoFrameProcessorTestRunner2.release();
videoCompositor.release();
if (sharedExecutorService != null) {
try {
sharedExecutorService.shutdown();
if (!sharedExecutorService.awaitTermination(COMPOSITOR_TIMEOUT_MS, MILLISECONDS)) {
throw new IllegalStateException("Missed shutdown timeout.");
}
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
}
}
private static VideoFrameProcessorTestRunner.Builder createVideoFrameProcessorTestRunnerBuilder(
String testId,
TextureBitmapReader textureBitmapReader,
VideoCompositor videoCompositor,
@Nullable ExecutorService executorService,
GlObjectsProvider glObjectsProvider) {
int inputId = videoCompositor.registerInputSource();
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider)
.setTextureOutput(
/* textureOutputListener= */ (GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback
releaseOutputTextureCallback,
long syncObject) -> {
GlUtil.awaitSyncObject(syncObject);
textureBitmapReader.readBitmap(outputTexture, presentationTimeUs);
videoCompositor.queueInputTexture(
inputId, outputTexture, presentationTimeUs, releaseOutputTextureCallback);
},
/* textureOutputCapacity= */ 1);
if (executorService != null) {
defaultVideoFrameProcessorFactoryBuilder.setExecutorService(executorService);
}
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader);
}
}
private void saveAndAssertBitmapMatchesExpected(
String testId, Bitmap actualBitmap, String actualBitmapLabel, String expectedBitmapAssetPath)
throws IOException {
@ -203,37 +342,4 @@ public final class VideoCompositorPixelTest {
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
private static VideoFrameProcessorTestRunner.Builder getFrameProcessorTestRunnerBuilder(
String testId,
TextureBitmapReader textureBitmapReader,
VideoCompositor videoCompositor,
@Nullable ExecutorService executorService,
GlObjectsProvider glObjectsProvider) {
int inputId = videoCompositor.registerInputSource();
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider)
.setTextureOutput(
/* textureOutputListener= */ (GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback
releaseOutputTextureCallback,
long syncObject) -> {
GlUtil.awaitSyncObject(syncObject);
textureBitmapReader.readBitmap(outputTexture, presentationTimeUs);
videoCompositor.queueInputTexture(
inputId, outputTexture, presentationTimeUs, releaseOutputTextureCallback);
},
/* textureOutputCapacity= */ 1);
if (executorService != null) {
defaultVideoFrameProcessorFactoryBuilder.setExecutorService(executorService);
}
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader);
}
}