Compositor: Add support for multiple bitmaps, and add testrunner

* Allow more than one input bitmap at a time.
* Allow Compositor to take in and set up an Executor. Otherwise,
Compositor resources may be created on one thread and accessed on another.
* Add a Compositor TestRunner to reuse test code more.
* Update VideoFrameProcessingTaskExecutor to use a new onError listener, so
that it's more reusable in non-DVFP contexts, like for Compositor.

PiperOrigin-RevId: 547206053
This commit is contained in:
huangdarwin 2023-07-11 17:03:09 +01:00 committed by Rohit Singh
parent 4983d4f339
commit c122c0ca05
10 changed files with 345 additions and 151 deletions

View File

@ -132,8 +132,7 @@ public interface VideoFrameProcessor {
/** /**
* Called when an exception occurs during asynchronous video frame processing. * Called when an exception occurs during asynchronous video frame processing.
* *
* <p>If an error occurred, consuming and producing further frames will not work as expected and * <p>Using {@code VideoFrameProcessor} after an error happens is undefined behavior.
* the {@link VideoFrameProcessor} should be released.
*/ */
void onError(VideoFrameProcessingException exception); void onError(VideoFrameProcessingException exception);

View File

@ -690,6 +690,9 @@ public final class GlUtil {
/** /**
* Destroys the {@link EGLContext} identified by the provided {@link EGLDisplay} and {@link * Destroys the {@link EGLContext} identified by the provided {@link EGLDisplay} and {@link
* EGLContext}. * EGLContext}.
*
* <p>This is a no-op if called on already-destroyed {@link EGLDisplay} and {@link EGLContext}
* instances.
*/ */
@RequiresApi(17) @RequiresApi(17)
public static void destroyEglContext( public static void destroyEglContext(

View File

@ -99,7 +99,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A factory for {@link DefaultVideoFrameProcessor} instances. */ /** A factory for {@link DefaultVideoFrameProcessor} instances. */
public static final class Factory implements VideoFrameProcessor.Factory { public static final class Factory implements VideoFrameProcessor.Factory {
private static final String THREAD_NAME = "Effect:GlThread"; private static final String THREAD_NAME = "Effect:DefaultVideoFrameProcessor:GlThread";
/** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */ /** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */
public static final class Builder { public static final class Builder {
@ -285,7 +285,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService; executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService;
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor = VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
instanceExecutorService, shouldShutdownExecutorService, listener); instanceExecutorService, shouldShutdownExecutorService, listener::onError);
Future<DefaultVideoFrameProcessor> defaultVideoFrameProcessorFuture = Future<DefaultVideoFrameProcessor> defaultVideoFrameProcessorFuture =
instanceExecutorService.submit( instanceExecutorService.submit(
@ -554,9 +554,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void release() { public void release() {
try { try {
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ this::releaseGlObjects); videoFrameProcessingTaskExecutor.release(/* releaseTask= */ this::releaseGlObjects);
} catch (InterruptedException unexpected) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected); throw new IllegalStateException(e);
} }
} }

View File

@ -19,35 +19,52 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.annotation.GuardedBy;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayDeque; import java.util.ArrayDeque;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.ExecutorService;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* A basic VideoCompositor that takes in frames from exactly 2 input sources and combines it to one * A basic VideoCompositor that takes in frames from exactly 2 input sources and combines it to one
* output. Only tested for 2 frames in, 1 frame out for now. * output.
*/ */
@UnstableApi @UnstableApi
public final class VideoCompositor { public final class VideoCompositor {
// TODO: b/262694346 - Flesh out this implementation by doing the following: // TODO: b/262694346 - Flesh out this implementation by doing the following:
// * Create on a shared VideoFrameProcessingTaskExecutor with VideoFrameProcessor instances.
// * >1 input/output frame per source.
// * Handle matched timestamps. // * Handle matched timestamps.
// * Handle mismatched timestamps // * Handle mismatched timestamps
// * Before allowing customization of this class, add an interface, and rename this class to // * Before allowing customization of this class, add an interface, and rename this class to
// DefaultCompositor. // DefaultCompositor.
/** Listener for errors. */
public interface ErrorListener {
/**
* Called when an exception occurs during asynchronous frame compositing.
*
* <p>Using {@code VideoCompositor} after an error happens is undefined behavior.
*/
void onError(VideoFrameProcessingException exception);
}
private static final String THREAD_NAME = "Effect:VideoCompositor:GlThread";
private static final String TAG = "VideoCompositor";
private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl"; private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl";
private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl"; private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl";
@ -55,17 +72,30 @@ public final class VideoCompositor {
private final Context context; private final Context context;
private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener; private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
private final GlObjectsProvider glObjectsProvider; private final GlObjectsProvider glObjectsProvider;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// List of queues of unprocessed frames for each input source. // List of queues of unprocessed frames for each input source.
@GuardedBy("this")
private final List<Queue<InputFrameInfo>> inputFrameInfos; private final List<Queue<InputFrameInfo>> inputFrameInfos;
private final TexturePool outputTexturePool; private final TexturePool outputTexturePool;
// Only used on the GL Thread. // Only used on the GL Thread.
private @MonotonicNonNull EGLContext eglContext;
private @MonotonicNonNull EGLDisplay eglDisplay;
private @MonotonicNonNull GlProgram glProgram; private @MonotonicNonNull GlProgram glProgram;
private long syncObject; private long syncObject;
/**
* Creates an instance.
*
* <p>If a non-null {@code executorService} is set, the {@link ExecutorService} must be
* {@linkplain ExecutorService#shutdown shut down} by the caller.
*/
public VideoCompositor( public VideoCompositor(
Context context, Context context,
GlObjectsProvider glObjectsProvider, GlObjectsProvider glObjectsProvider,
@Nullable ExecutorService executorService,
ErrorListener errorListener,
DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener, DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener,
@IntRange(from = 1) int textureOutputCapacity) { @IntRange(from = 1) int textureOutputCapacity) {
this.context = context; this.context = context;
@ -75,6 +105,16 @@ public final class VideoCompositor {
inputFrameInfos = new ArrayList<>(); inputFrameInfos = new ArrayList<>();
outputTexturePool = outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity); new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
boolean ownsExecutor = executorService == null;
ExecutorService instanceExecutorService =
ownsExecutor ? Util.newSingleThreadExecutor(THREAD_NAME) : checkNotNull(executorService);
videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
instanceExecutorService,
/* shouldShutdownExecutorService= */ ownsExecutor,
errorListener::onError);
videoFrameProcessingTaskExecutor.submit(this::setupGlObjects);
} }
/** /**
@ -86,7 +126,6 @@ public final class VideoCompositor {
return inputFrameInfos.size() - 1; return inputFrameInfos.size() - 1;
} }
// Below methods must be called on the GL thread.
/** /**
* Queues an input texture to be composited, for example from an upstream {@link * Queues an input texture to be composited, for example from an upstream {@link
* DefaultVideoFrameProcessor.TextureOutputListener}. * DefaultVideoFrameProcessor.TextureOutputListener}.
@ -94,7 +133,7 @@ public final class VideoCompositor {
* <p>Each input source must have a unique {@code inputId} returned from {@link * <p>Each input source must have a unique {@code inputId} returned from {@link
* #registerInputSource}. * #registerInputSource}.
*/ */
public void queueInputTexture( public synchronized void queueInputTexture(
int inputId, int inputId,
GlTextureInfo inputTexture, GlTextureInfo inputTexture,
long presentationTimeUs, long presentationTimeUs,
@ -104,12 +143,33 @@ public final class VideoCompositor {
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback); new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo); checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo);
videoFrameProcessingTaskExecutor.submit(
() -> {
if (isReadyToComposite()) { if (isReadyToComposite()) {
compositeToOutputTexture(); compositeToOutputTexture();
} }
});
} }
private boolean isReadyToComposite() { public void release() {
try {
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ this::releaseGlObjects);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
}
}
// Below methods must be called on the GL thread.
private void setupGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
EGLContext eglContext =
glObjectsProvider.createEglContext(
eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
}
private synchronized boolean isReadyToComposite() {
// TODO: b/262694346 - Use timestamps to determine when to composite instead of number of // TODO: b/262694346 - Use timestamps to determine when to composite instead of number of
// frames. // frames.
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) { for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
@ -120,13 +180,14 @@ public final class VideoCompositor {
return true; return true;
} }
private void compositeToOutputTexture() throws VideoFrameProcessingException { private synchronized void compositeToOutputTexture() throws VideoFrameProcessingException {
List<InputFrameInfo> framesToComposite = new ArrayList<>(); List<InputFrameInfo> framesToComposite = new ArrayList<>();
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) { for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove()); framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove());
} }
ensureGlProgramConfigured(); ensureGlProgramConfigured();
// TODO: b/262694346 - // TODO: b/262694346 -
// * Support an arbitrary number of inputs. // * Support an arbitrary number of inputs.
// * Allow different frame dimensions. // * Allow different frame dimensions.
@ -150,8 +211,9 @@ public final class VideoCompositor {
// TODO: b/262694346 - Use presentationTimeUs here for freeing textures. // TODO: b/262694346 - Use presentationTimeUs here for freeing textures.
textureOutputListener.onTextureRendered( textureOutputListener.onTextureRendered(
checkNotNull(outputTexture), checkNotNull(outputTexture),
/* presentationTimeUs= */ 0, /* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
(presentationTimeUs) -> outputTexturePool.freeTexture(), (presentationTimeUs) ->
videoFrameProcessingTaskExecutor.submit(outputTexturePool::freeTexture),
syncObject); syncObject);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
@ -194,6 +256,24 @@ public final class VideoCompositor {
glProgram.bindAttributesAndUniforms(); glProgram.bindAttributesAndUniforms();
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError();
}
private void releaseGlObjects() {
try {
outputTexturePool.deleteAllTextures();
if (glProgram != null) {
glProgram.delete();
}
} catch (Exception e) {
Log.e(TAG, "Error releasing GL resources", e);
} finally {
try {
GlUtil.destroyEglContext(eglDisplay, eglContext);
} catch (GlUtil.GlException e) {
Log.e(TAG, "Error releasing GL context", e);
}
}
} }
/** Holds information on a frame and how to release it. */ /** Holds information on a frame and how to release it. */

View File

@ -20,7 +20,6 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import java.util.ArrayDeque; import java.util.ArrayDeque;
@ -36,11 +35,8 @@ import java.util.concurrent.RejectedExecutionException;
* *
* <p>Public methods can be called from any thread. * <p>Public methods can be called from any thread.
* *
* <p>The wrapper handles calling {@link * <p>Calls {@link ErrorListener#onError} for errors that occur during these tasks. The listener is
* VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during * invoked from the {@link ExecutorService}.
* these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs.
* *
* <p>{@linkplain #submitWithHighPriority(Task) High priority tasks} are always executed before * <p>{@linkplain #submitWithHighPriority(Task) High priority tasks} are always executed before
* {@linkplain #submit(Task) default priority tasks}. Tasks with equal priority are executed in FIFO * {@linkplain #submit(Task) default priority tasks}. Tasks with equal priority are executed in FIFO
@ -52,16 +48,27 @@ import java.util.concurrent.RejectedExecutionException;
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link * Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* VideoFrameProcessingException}. * VideoFrameProcessingException}.
*/ */
public interface Task { interface Task {
/** Runs the task. */ /** Runs the task. */
void run() throws VideoFrameProcessingException, GlUtil.GlException; void run() throws VideoFrameProcessingException, GlUtil.GlException;
} }
/** Listener for errors. */
interface ErrorListener {
/**
* Called when an exception occurs while executing submitted tasks.
*
* <p>Using the {@link VideoFrameProcessingTaskExecutor} after an error happens is undefined
* behavior.
*/
void onError(VideoFrameProcessingException exception);
}
private static final long RELEASE_WAIT_TIME_MS = 500; private static final long RELEASE_WAIT_TIME_MS = 500;
private final boolean shouldShutdownExecutorService; private final boolean shouldShutdownExecutorService;
private final ExecutorService singleThreadExecutorService; private final ExecutorService singleThreadExecutorService;
private final VideoFrameProcessor.Listener listener; private final ErrorListener errorListener;
private final Object lock; private final Object lock;
@GuardedBy("lock") @GuardedBy("lock")
@ -74,10 +81,10 @@ import java.util.concurrent.RejectedExecutionException;
public VideoFrameProcessingTaskExecutor( public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, ExecutorService singleThreadExecutorService,
boolean shouldShutdownExecutorService, boolean shouldShutdownExecutorService,
VideoFrameProcessor.Listener listener) { ErrorListener errorListener) {
this.singleThreadExecutorService = singleThreadExecutorService; this.singleThreadExecutorService = singleThreadExecutorService;
this.shouldShutdownExecutorService = shouldShutdownExecutorService; this.shouldShutdownExecutorService = shouldShutdownExecutorService;
this.listener = listener; this.errorListener = errorListener;
lock = new Object(); lock = new Object();
highPriorityTasks = new ArrayDeque<>(); highPriorityTasks = new ArrayDeque<>();
} }
@ -186,7 +193,7 @@ import java.util.concurrent.RejectedExecutionException;
if (shouldShutdownExecutorService) { if (shouldShutdownExecutorService) {
singleThreadExecutorService.shutdown(); singleThreadExecutorService.shutdown();
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) { if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
listener.onError( errorListener.onError(
new VideoFrameProcessingException( new VideoFrameProcessingException(
"Release timed out. OpenGL resources may not be cleaned up properly.")); "Release timed out. OpenGL resources may not be cleaned up properly."));
} }
@ -231,6 +238,6 @@ import java.util.concurrent.RejectedExecutionException;
} }
shouldCancelTasks = true; shouldCancelTasks = true;
} }
listener.onError(VideoFrameProcessingException.from(exception)); errorListener.onError(VideoFrameProcessingException.from(exception));
} }
} }

View File

@ -21,7 +21,6 @@ import static org.mockito.Mockito.verify;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.After; import org.junit.After;
@ -33,13 +32,13 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest { public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100; private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final VideoFrameProcessor.Listener mockFrameProcessorListener = private final VideoFrameProcessingTaskExecutor.ErrorListener mockErrorListener =
mock(VideoFrameProcessor.Listener.class); mock(VideoFrameProcessingTaskExecutor.ErrorListener.class);
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor = private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), Util.newSingleThreadExecutor("Test"),
/* shouldShutdownExecutorService= */ true, /* shouldShutdownExecutorService= */ true,
mockFrameProcessorListener); mockErrorListener);
private final GlObjectsProvider mockGlObjectsProvider = mock(GlObjectsProvider.class); private final GlObjectsProvider mockGlObjectsProvider = mock(GlObjectsProvider.class);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);

View File

@ -242,7 +242,7 @@ public final class VideoFrameProcessorTestRunner {
* Time to wait for the decoded frame to populate the {@link VideoFrameProcessor} instance's input * Time to wait for the decoded frame to populate the {@link VideoFrameProcessor} instance's input
* surface and the {@link VideoFrameProcessor} to finish processing the frame, in milliseconds. * surface and the {@link VideoFrameProcessor} to finish processing the frame, in milliseconds.
*/ */
public static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5000; public static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5_000;
private final String testId; private final String testId;
private final @MonotonicNonNull String videoAssetPath; private final @MonotonicNonNull String videoAssetPath;

View File

@ -557,11 +557,11 @@ public final class AndroidTestUtil {
*/ */
public static EGLContext createOpenGlObjects() throws GlUtil.GlException { public static EGLContext createOpenGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay(); EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
int[] configAttributes = GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
GlObjectsProvider glObjectsProvider = GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(/* sharedEglContext= */ null); new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
EGLContext eglContext = EGLContext eglContext =
glObjectsProvider.createEglContext(eglDisplay, /* openGlVersion= */ 2, configAttributes); glObjectsProvider.createEglContext(
eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay); glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
return eglContext; return eglContext;
} }

View File

@ -87,7 +87,7 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
GlTextureInfo outputTexture, GlTextureInfo outputTexture,
long presentationTimeUs, long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback) DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
throws VideoFrameProcessingException, GlUtil.GlException { throws VideoFrameProcessingException {
readBitmap(outputTexture, presentationTimeUs); readBitmap(outputTexture, presentationTimeUs);
releaseOutputTextureCallback.release(presentationTimeUs); releaseOutputTextureCallback.release(presentationTimeUs);
} }

View File

@ -43,6 +43,10 @@ import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -55,8 +59,6 @@ import org.junit.runners.Parameterized;
/** Pixel test for {@link VideoCompositor} compositing 2 input frames into 1 output frame. */ /** Pixel test for {@link VideoCompositor} compositing 2 input frames into 1 output frame. */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public final class VideoCompositorPixelTest { public final class VideoCompositorPixelTest {
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner1;
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner2;
private static final String ORIGINAL_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png"; private static final String ORIGINAL_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
private static final String GRAYSCALE_PNG_ASSET_PATH = private static final String GRAYSCALE_PNG_ASSET_PATH =
@ -66,10 +68,6 @@ public final class VideoCompositorPixelTest {
private static final String GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH = private static final String GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscaleAndRotate180Composite.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/grayscaleAndRotate180Composite.png";
private static final Effect ROTATE_180 =
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
private static final Effect GRAYSCALE = RgbFilter.createGrayscaleFilter();
@Parameterized.Parameters(name = "useSharedExecutor={0}") @Parameterized.Parameters(name = "useSharedExecutor={0}")
public static ImmutableList<Boolean> useSharedExecutor() { public static ImmutableList<Boolean> useSharedExecutor() {
return ImmutableList.of(true, false); return ImmutableList.of(true, false);
@ -77,48 +75,26 @@ public final class VideoCompositorPixelTest {
@Parameterized.Parameter public boolean useSharedExecutor; @Parameterized.Parameter public boolean useSharedExecutor;
public @Nullable ExecutorService executorService; private @MonotonicNonNull VideoCompositorTestRunner videoCompositorTestRunner;
@After @After
public void tearDown() { public void tearDown() {
if (inputVfpTestRunner1 != null) { if (videoCompositorTestRunner != null) {
inputVfpTestRunner1.release(); videoCompositorTestRunner.release();
}
if (inputVfpTestRunner2 != null) {
inputVfpTestRunner2.release();
}
if (executorService != null) {
try {
executorService.shutdown();
if (!executorService.awaitTermination(/* timeout= */ 5000, MILLISECONDS)) {
throw new IllegalStateException("Missed shutdown timeout.");
}
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
} }
} }
@Test @Test
public void compositeTwoFrames_matchesExpected() throws Exception { public void compositeTwoInputs_withOneFrameFromEach_matchesExpectedBitmap() throws Exception {
String testId = String testId =
"compositeTwoFrames_matchesExpected[useSharedExecutor=" + useSharedExecutor + "]"; "compositeTwoInputs_withOneFrameFromEach_matchesExpectedBitmap[useSharedExecutor="
executorService = useSharedExecutor ? Util.newSingleThreadExecutor("Effect:GlThread") : null; + useSharedExecutor
+ "]";
// Arrange VideoCompositor and VideoFrameProcessor instances.
EGLContext sharedEglContext = AndroidTestUtil.createOpenGlObjects();
GlObjectsProvider sharedGlObjectsProvider = new DefaultGlObjectsProvider(sharedEglContext);
AtomicReference<Bitmap> compositedOutputBitmap = new AtomicReference<>(); AtomicReference<Bitmap> compositedOutputBitmap = new AtomicReference<>();
VideoCompositor videoCompositor = videoCompositorTestRunner =
new VideoCompositor( new VideoCompositorTestRunner(
getApplicationContext(), testId,
sharedGlObjectsProvider, (outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
/* textureOutputListener= */ (outputTexture,
presentationTimeUs,
releaseOutputTextureCallback,
syncObject) -> {
try { try {
if (useSharedExecutor) { if (useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject); GlUtil.deleteSyncObject(syncObject);
@ -134,77 +110,195 @@ public final class VideoCompositorPixelTest {
releaseOutputTextureCallback.release(presentationTimeUs); releaseOutputTextureCallback.release(presentationTimeUs);
} }
}, },
/* textureOutputCapacity= */ 1); useSharedExecutor);
TextureBitmapReader inputTextureBitmapReader1 = new TextureBitmapReader();
VideoFrameProcessorTestRunner inputVfpTestRunner1 =
getFrameProcessorTestRunnerBuilder(
testId,
inputTextureBitmapReader1,
videoCompositor,
executorService,
sharedGlObjectsProvider)
.setEffects(GRAYSCALE)
.build();
this.inputVfpTestRunner1 = inputVfpTestRunner1;
TextureBitmapReader inputTextureBitmapReader2 = new TextureBitmapReader();
VideoFrameProcessorTestRunner inputVfpTestRunner2 =
getFrameProcessorTestRunnerBuilder(
testId,
inputTextureBitmapReader2,
videoCompositor,
executorService,
sharedGlObjectsProvider)
.setEffects(ROTATE_180)
.build();
this.inputVfpTestRunner2 = inputVfpTestRunner2;
// Queue 1 input bitmap from each input VideoFrameProcessor source. videoCompositorTestRunner.queueBitmapsToBothInputs(/* count= */ 1);
inputVfpTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVfpTestRunner1.endFrameProcessing();
inputVfpTestRunner2.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVfpTestRunner2.endFrameProcessing();
// Check that VideoFrameProcessor and VideoCompositor outputs match expected bitmaps. Bitmap actualCompositorInputBitmap1 = videoCompositorTestRunner.inputBitmapReader1.getBitmap();
Bitmap actualCompositorInputBitmap1 = checkNotNull(inputTextureBitmapReader1).getBitmap();
saveAndAssertBitmapMatchesExpected( saveAndAssertBitmapMatchesExpected(
testId, testId,
actualCompositorInputBitmap1, actualCompositorInputBitmap1,
/* actualBitmapLabel= */ "actualCompositorInputBitmap1", /* actualBitmapLabel= */ "actualCompositorInputBitmap1",
GRAYSCALE_PNG_ASSET_PATH); GRAYSCALE_PNG_ASSET_PATH);
Bitmap actualCompositorInputBitmap2 = checkNotNull(inputTextureBitmapReader2).getBitmap(); Bitmap actualCompositorInputBitmap2 = videoCompositorTestRunner.inputBitmapReader2.getBitmap();
saveAndAssertBitmapMatchesExpected( saveAndAssertBitmapMatchesExpected(
testId, testId,
actualCompositorInputBitmap2, actualCompositorInputBitmap2,
/* actualBitmapLabel= */ "actualCompositorInputBitmap2", /* actualBitmapLabel= */ "actualCompositorInputBitmap2",
ROTATE180_PNG_ASSET_PATH); ROTATE180_PNG_ASSET_PATH);
Bitmap compositorOutputBitmap = compositedOutputBitmap.get();
saveAndAssertBitmapMatchesExpected( saveAndAssertBitmapMatchesExpected(
testId, testId,
compositorOutputBitmap, compositedOutputBitmap.get(),
/* actualBitmapLabel= */ "compositorOutputBitmap", /* actualBitmapLabel= */ "compositorOutputBitmap",
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH); GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
} }
private void saveAndAssertBitmapMatchesExpected( @Test
String testId, Bitmap actualBitmap, String actualBitmapLabel, String expectedBitmapAssetPath) public void compositeTwoInputs_withFiveFramesFromEach_matchesExpectedTimestamps()
throws IOException { throws Exception {
maybeSaveTestBitmap(testId, actualBitmapLabel, actualBitmap, /* path= */ null); String testId =
float averagePixelAbsoluteDifference = "compositeTwoInputs_withFiveFramesFromEach_matchesExpectedTimestamps[useSharedExecutor="
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888( + useSharedExecutor
readBitmap(expectedBitmapAssetPath), actualBitmap, testId); + "]";
assertThat(averagePixelAbsoluteDifference) List<Long> compositorTimestamps = new CopyOnWriteArrayList<>();
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
AtomicReference<Bitmap> compositedFirstOutputBitmap = new AtomicReference<>();
videoCompositorTestRunner =
new VideoCompositorTestRunner(
testId,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
try {
if (useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject);
} else {
GlUtil.awaitSyncObject(syncObject);
}
if (compositedFirstOutputBitmap.get() == null) {
compositedFirstOutputBitmap.set(
BitmapPixelTestUtil.createArgb8888BitmapFromFocusedGlFramebuffer(
outputTexture.width, outputTexture.height));
}
compositorTimestamps.add(presentationTimeUs);
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
} finally {
releaseOutputTextureCallback.release(presentationTimeUs);
}
},
useSharedExecutor);
videoCompositorTestRunner.queueBitmapsToBothInputs(/* count= */ 5);
ImmutableList<Long> expectedTimestamps =
ImmutableList.of(
0L,
1L * C.MICROS_PER_SECOND,
2L * C.MICROS_PER_SECOND,
3L * C.MICROS_PER_SECOND,
4L * C.MICROS_PER_SECOND);
Set<Long> inputTimestampsSource1 =
videoCompositorTestRunner.inputBitmapReader1.getOutputTimestamps();
assertThat(inputTimestampsSource1).containsExactlyElementsIn(expectedTimestamps).inOrder();
Set<Long> inputTimestampsSource2 =
videoCompositorTestRunner.inputBitmapReader2.getOutputTimestamps();
assertThat(inputTimestampsSource2).containsExactlyElementsIn(expectedTimestamps).inOrder();
assertThat(compositorTimestamps).containsExactlyElementsIn(expectedTimestamps).inOrder();
saveAndAssertBitmapMatchesExpected(
testId,
compositedFirstOutputBitmap.get(),
/* actualBitmapLabel= */ "compositorOutputBitmap",
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
} }
private static VideoFrameProcessorTestRunner.Builder getFrameProcessorTestRunnerBuilder( /**
* A test runner for {@link VideoCompositor tests} tests.
*
* <p>Composites input bitmaps from two input sources.
*/
private static final class VideoCompositorTestRunner {
private static final int COMPOSITOR_TIMEOUT_MS = 5_000;
private static final Effect ROTATE_180_EFFECT =
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
private static final Effect GRAYSCALE_EFFECT = RgbFilter.createGrayscaleFilter();
public final TextureBitmapReader inputBitmapReader1;
public final TextureBitmapReader inputBitmapReader2;
private final VideoFrameProcessorTestRunner inputVideoFrameProcessorTestRunner1;
private final VideoFrameProcessorTestRunner inputVideoFrameProcessorTestRunner2;
private final VideoCompositor videoCompositor;
private final @Nullable ExecutorService sharedExecutorService;
private final AtomicReference<VideoFrameProcessingException> compositionException;
private @MonotonicNonNull CountDownLatch compositorEnded;
public VideoCompositorTestRunner(
String testId,
DefaultVideoFrameProcessor.TextureOutputListener compositorTextureOutputListener,
boolean useSharedExecutor)
throws GlUtil.GlException, VideoFrameProcessingException {
sharedExecutorService =
useSharedExecutor ? Util.newSingleThreadExecutor("Effect:Shared:GlThread") : null;
EGLContext sharedEglContext = AndroidTestUtil.createOpenGlObjects();
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(
/* sharedEglContext= */ useSharedExecutor ? null : sharedEglContext);
compositionException = new AtomicReference<>();
videoCompositor =
new VideoCompositor(
getApplicationContext(),
glObjectsProvider,
sharedExecutorService,
/* errorListener= */ compositionException::set,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
compositorTextureOutputListener.onTextureRendered(
outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject);
checkNotNull(compositorEnded).countDown();
},
/* textureOutputCapacity= */ 1);
inputBitmapReader1 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner1 =
createVideoFrameProcessorTestRunnerBuilder(
testId,
inputBitmapReader1,
videoCompositor,
sharedExecutorService,
glObjectsProvider)
.setEffects(GRAYSCALE_EFFECT)
.build();
inputBitmapReader2 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner2 =
createVideoFrameProcessorTestRunnerBuilder(
testId,
inputBitmapReader2,
videoCompositor,
sharedExecutorService,
glObjectsProvider)
.setEffects(ROTATE_180_EFFECT)
.build();
}
/**
* Queues {@code count} bitmaps, with one bitmap per second, starting from and including 0
* seconds.
*/
public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException {
compositorEnded = new CountDownLatch(count);
inputVideoFrameProcessorTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner2.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner1.endFrameProcessing();
inputVideoFrameProcessorTestRunner2.endFrameProcessing();
compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS);
assertThat(compositionException.get()).isNull();
}
public void release() {
inputVideoFrameProcessorTestRunner1.release();
inputVideoFrameProcessorTestRunner2.release();
videoCompositor.release();
if (sharedExecutorService != null) {
try {
sharedExecutorService.shutdown();
if (!sharedExecutorService.awaitTermination(COMPOSITOR_TIMEOUT_MS, MILLISECONDS)) {
throw new IllegalStateException("Missed shutdown timeout.");
}
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
}
}
private static VideoFrameProcessorTestRunner.Builder createVideoFrameProcessorTestRunnerBuilder(
String testId, String testId,
TextureBitmapReader textureBitmapReader, TextureBitmapReader textureBitmapReader,
VideoCompositor videoCompositor, VideoCompositor videoCompositor,
@ -236,4 +330,16 @@ public final class VideoCompositorPixelTest {
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader); .setBitmapReader(textureBitmapReader);
} }
}
private void saveAndAssertBitmapMatchesExpected(
String testId, Bitmap actualBitmap, String actualBitmapLabel, String expectedBitmapAssetPath)
throws IOException {
maybeSaveTestBitmap(testId, actualBitmapLabel, actualBitmap, /* path= */ null);
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(expectedBitmapAssetPath), actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
} }