diff --git a/libraries/effect/src/main/java/androidx/media3/effect/VideoCompositor.java b/libraries/effect/src/main/java/androidx/media3/effect/VideoCompositor.java
index 7015d777d9..ecd21ccd4b 100644
--- a/libraries/effect/src/main/java/androidx/media3/effect/VideoCompositor.java
+++ b/libraries/effect/src/main/java/androidx/media3/effect/VideoCompositor.java
@@ -59,29 +59,34 @@ public final class VideoCompositor {
// * Use a lock to synchronize inputFrameInfos more narrowly, to reduce blocking.
/** Listener for errors. */
- public interface ErrorListener {
+ public interface Listener {
/**
* Called when an exception occurs during asynchronous frame compositing.
*
- *
Using {@code VideoCompositor} after an error happens is undefined behavior.
+ *
Using {@link VideoCompositor} after an error happens is undefined behavior.
*/
void onError(VideoFrameProcessingException exception);
+
+ /** Called after {@link VideoCompositor} has output its final output frame. */
+ void onEnded();
}
private static final String THREAD_NAME = "Effect:VideoCompositor:GlThread";
private static final String TAG = "VideoCompositor";
private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl";
-
private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl";
+ private static final int PRIMARY_INPUT_ID = 0;
private final Context context;
+ private final Listener listener;
private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
private final GlObjectsProvider glObjectsProvider;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
- // List of queues of unprocessed frames for each input source.
@GuardedBy("this")
- private final List> inputFrameInfos;
+ private final List inputSources;
+
+ private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
private final TexturePool outputTexturePool;
private final Queue outputTextureTimestamps; // Synchronized with outputTexturePool.
@@ -102,14 +107,15 @@ public final class VideoCompositor {
Context context,
GlObjectsProvider glObjectsProvider,
@Nullable ExecutorService executorService,
- ErrorListener errorListener,
+ Listener listener,
DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener,
@IntRange(from = 1) int textureOutputCapacity) {
this.context = context;
+ this.listener = listener;
this.textureOutputListener = textureOutputListener;
this.glObjectsProvider = glObjectsProvider;
- inputFrameInfos = new ArrayList<>();
+ inputSources = new ArrayList<>();
outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
outputTextureTimestamps = new ArrayDeque<>(textureOutputCapacity);
@@ -122,7 +128,7 @@ public final class VideoCompositor {
new VideoFrameProcessingTaskExecutor(
instanceExecutorService,
/* shouldShutdownExecutorService= */ ownsExecutor,
- errorListener::onError);
+ listener::onError);
videoFrameProcessingTaskExecutor.submit(this::setupGlObjects);
}
@@ -131,8 +137,28 @@ public final class VideoCompositor {
* source, to be used in {@link #queueInputTexture}.
*/
public synchronized int registerInputSource() {
- inputFrameInfos.add(new ArrayDeque<>());
- return inputFrameInfos.size() - 1;
+ inputSources.add(new InputSource());
+ return inputSources.size() - 1;
+ }
+
+ /**
+ * Signals that no more frames will come from the upstream {@link
+ * DefaultVideoFrameProcessor.TextureOutputListener}.
+ *
+ * Each input source must have a unique {@code inputId} returned from {@link
+ * #registerInputSource}.
+ */
+ public synchronized void signalEndOfInputSource(int inputId) {
+ inputSources.get(inputId).isInputEnded = true;
+ for (int i = 0; i < inputSources.size(); i++) {
+ if (!inputSources.get(i).isInputEnded) {
+ return;
+ }
+ }
+ allInputsEnded = true;
+ if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
+ listener.onEnded();
+ }
}
/**
@@ -148,9 +174,10 @@ public final class VideoCompositor {
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseTextureCallback)
throws VideoFrameProcessingException {
+ checkState(!inputSources.get(inputId).isInputEnded);
InputFrameInfo inputFrameInfo =
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
- checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo);
+ inputSources.get(inputId).frameInfos.add(inputFrameInfo);
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
}
@@ -180,8 +207,8 @@ public final class VideoCompositor {
}
List framesToComposite = new ArrayList<>();
- for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
- framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove());
+ for (int inputId = 0; inputId < inputSources.size(); inputId++) {
+ framesToComposite.add(inputSources.get(inputId).frameInfos.remove());
}
ensureGlProgramConfigured();
@@ -196,14 +223,17 @@ public final class VideoCompositor {
outputTexturePool.ensureConfigured(
glObjectsProvider, inputFrame1.texture.width, inputFrame1.texture.height);
GlTextureInfo outputTexture = outputTexturePool.useTexture();
- long outputPresentationTimestampUs = framesToComposite.get(0).presentationTimeUs;
+ long outputPresentationTimestampUs = framesToComposite.get(PRIMARY_INPUT_ID).presentationTimeUs;
outputTextureTimestamps.add(outputPresentationTimestampUs);
drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture);
long syncObject = GlUtil.createGlSyncFence();
syncObjects.add(syncObject);
textureOutputListener.onTextureRendered(
- outputTexture, outputPresentationTimestampUs, this::releaseOutputFrame, syncObject);
+ outputTexture,
+ /* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
+ this::releaseOutputFrame,
+ syncObject);
for (int i = 0; i < framesToComposite.size(); i++) {
InputFrameInfo inputFrameInfo = framesToComposite.get(i);
inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs);
@@ -215,14 +245,14 @@ public final class VideoCompositor {
return false;
}
long compositeTimestampUs = C.TIME_UNSET;
- for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
- Queue inputFrameInfoQueue = checkNotNull(inputFrameInfos.get(inputId));
- if (inputFrameInfoQueue.isEmpty()) {
+ for (int inputId = 0; inputId < inputSources.size(); inputId++) {
+ Queue inputFrameInfos = inputSources.get(inputId).frameInfos;
+ if (inputFrameInfos.isEmpty()) {
return false;
}
- long inputTimestampUs = checkNotNull(inputFrameInfoQueue.peek()).presentationTimeUs;
- if (inputId == 0) {
+ long inputTimestampUs = checkNotNull(inputFrameInfos.peek()).presentationTimeUs;
+ if (inputId == PRIMARY_INPUT_ID) {
compositeTimestampUs = inputTimestampUs;
}
// TODO: b/262694346 - Allow for different frame-rates to be composited, by potentially
@@ -291,6 +321,7 @@ public final class VideoCompositor {
private void releaseGlObjects() {
try {
+ checkState(allInputsEnded);
outputTexturePool.deleteAllTextures();
GlUtil.destroyEglSurface(eglDisplay, placeholderEglSurface);
if (glProgram != null) {
@@ -307,6 +338,16 @@ public final class VideoCompositor {
}
}
+ /** Holds information on an input source. */
+ private static final class InputSource {
+ public final Queue frameInfos;
+ public boolean isInputEnded;
+
+ public InputSource() {
+ frameInfos = new ArrayDeque<>();
+ }
+ }
+
/** Holds information on a frame and how to release it. */
private static final class InputFrameInfo {
public final GlTextureInfo texture;
diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/VideoCompositorPixelTest.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/VideoCompositorPixelTest.java
index 195c155fe6..308356ff20 100644
--- a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/VideoCompositorPixelTest.java
+++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/VideoCompositorPixelTest.java
@@ -16,7 +16,6 @@
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
-import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
@@ -248,7 +247,7 @@ public final class VideoCompositorPixelTest {
private final VideoCompositor videoCompositor;
private final @Nullable ExecutorService sharedExecutorService;
private final AtomicReference compositionException;
- private @MonotonicNonNull CountDownLatch compositorEnded;
+ private final CountDownLatch compositorEnded;
public VideoCompositorTestRunner(
String testId,
@@ -263,17 +262,25 @@ public final class VideoCompositorPixelTest {
/* sharedEglContext= */ useSharedExecutor ? null : sharedEglContext);
compositionException = new AtomicReference<>();
+ compositorEnded = new CountDownLatch(1);
videoCompositor =
new VideoCompositor(
getApplicationContext(),
glObjectsProvider,
sharedExecutorService,
- /* errorListener= */ compositionException::set,
- (outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
- compositorTextureOutputListener.onTextureRendered(
- outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject);
- checkNotNull(compositorEnded).countDown();
+ new VideoCompositor.Listener() {
+ @Override
+ public void onError(VideoFrameProcessingException exception) {
+ compositionException.set(exception);
+ compositorEnded.countDown();
+ }
+
+ @Override
+ public void onEnded() {
+ compositorEnded.countDown();
+ }
},
+ compositorTextureOutputListener,
/* textureOutputCapacity= */ 1);
inputBitmapReader1 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner1 =
@@ -302,7 +309,6 @@ public final class VideoCompositorPixelTest {
* seconds.
*/
public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException {
- compositorEnded = new CountDownLatch(count);
inputVideoFrameProcessorTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
@@ -315,9 +321,21 @@ public final class VideoCompositorPixelTest {
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner1.endFrameProcessing();
inputVideoFrameProcessorTestRunner2.endFrameProcessing();
- compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS);
+
+ videoCompositor.signalEndOfInputSource(/* inputId= */ 0);
+ videoCompositor.signalEndOfInputSource(/* inputId= */ 1);
+ @Nullable Exception endCompositingException = null;
+ try {
+ if (!compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS)) {
+ endCompositingException = new IllegalStateException("Compositing timed out.");
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ endCompositingException = e;
+ }
assertThat(compositionException.get()).isNull();
+ assertThat(endCompositingException).isNull();
}
public void release() {