Effect: Add Compositor signalEndOfInputStream and onEnded.

signalEndOfInputStream is needed for when streams have different amounts of
frames, so that if the primary stream finishes after a secondary stream, it
can end without waiting indefinitely for the secondary stream's matching
timestamps.

onEnded mirrors this API on the output side, which will be necessary to
know when to call signalEndOfInput on downstream components (ex. on downstream)
VideoFrameProcessors

PiperOrigin-RevId: 549969933
This commit is contained in:
huangdarwin 2023-07-21 17:35:40 +01:00 committed by Rohit Singh
parent c2615a679f
commit 5858723a06
2 changed files with 88 additions and 29 deletions

View File

@ -59,29 +59,34 @@ public final class VideoCompositor {
// * Use a lock to synchronize inputFrameInfos more narrowly, to reduce blocking. // * Use a lock to synchronize inputFrameInfos more narrowly, to reduce blocking.
/** Listener for errors. */ /** Listener for errors. */
public interface ErrorListener { public interface Listener {
/** /**
* Called when an exception occurs during asynchronous frame compositing. * Called when an exception occurs during asynchronous frame compositing.
* *
* <p>Using {@code VideoCompositor} after an error happens is undefined behavior. * <p>Using {@link VideoCompositor} after an error happens is undefined behavior.
*/ */
void onError(VideoFrameProcessingException exception); void onError(VideoFrameProcessingException exception);
/** Called after {@link VideoCompositor} has output its final output frame. */
void onEnded();
} }
private static final String THREAD_NAME = "Effect:VideoCompositor:GlThread"; private static final String THREAD_NAME = "Effect:VideoCompositor:GlThread";
private static final String TAG = "VideoCompositor"; private static final String TAG = "VideoCompositor";
private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl"; private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl";
private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl"; private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl";
private static final int PRIMARY_INPUT_ID = 0;
private final Context context; private final Context context;
private final Listener listener;
private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener; private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
private final GlObjectsProvider glObjectsProvider; private final GlObjectsProvider glObjectsProvider;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// List of queues of unprocessed frames for each input source.
@GuardedBy("this") @GuardedBy("this")
private final List<Queue<InputFrameInfo>> inputFrameInfos; private final List<InputSource> inputSources;
private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
private final TexturePool outputTexturePool; private final TexturePool outputTexturePool;
private final Queue<Long> outputTextureTimestamps; // Synchronized with outputTexturePool. private final Queue<Long> outputTextureTimestamps; // Synchronized with outputTexturePool.
@ -102,14 +107,15 @@ public final class VideoCompositor {
Context context, Context context,
GlObjectsProvider glObjectsProvider, GlObjectsProvider glObjectsProvider,
@Nullable ExecutorService executorService, @Nullable ExecutorService executorService,
ErrorListener errorListener, Listener listener,
DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener, DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener,
@IntRange(from = 1) int textureOutputCapacity) { @IntRange(from = 1) int textureOutputCapacity) {
this.context = context; this.context = context;
this.listener = listener;
this.textureOutputListener = textureOutputListener; this.textureOutputListener = textureOutputListener;
this.glObjectsProvider = glObjectsProvider; this.glObjectsProvider = glObjectsProvider;
inputFrameInfos = new ArrayList<>(); inputSources = new ArrayList<>();
outputTexturePool = outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity); new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
outputTextureTimestamps = new ArrayDeque<>(textureOutputCapacity); outputTextureTimestamps = new ArrayDeque<>(textureOutputCapacity);
@ -122,7 +128,7 @@ public final class VideoCompositor {
new VideoFrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
instanceExecutorService, instanceExecutorService,
/* shouldShutdownExecutorService= */ ownsExecutor, /* shouldShutdownExecutorService= */ ownsExecutor,
errorListener::onError); listener::onError);
videoFrameProcessingTaskExecutor.submit(this::setupGlObjects); videoFrameProcessingTaskExecutor.submit(this::setupGlObjects);
} }
@ -131,8 +137,28 @@ public final class VideoCompositor {
* source, to be used in {@link #queueInputTexture}. * source, to be used in {@link #queueInputTexture}.
*/ */
public synchronized int registerInputSource() { public synchronized int registerInputSource() {
inputFrameInfos.add(new ArrayDeque<>()); inputSources.add(new InputSource());
return inputFrameInfos.size() - 1; return inputSources.size() - 1;
}
/**
* Signals that no more frames will come from the upstream {@link
* DefaultVideoFrameProcessor.TextureOutputListener}.
*
* <p>Each input source must have a unique {@code inputId} returned from {@link
* #registerInputSource}.
*/
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(i).isInputEnded) {
return;
}
}
allInputsEnded = true;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
listener.onEnded();
}
} }
/** /**
@ -148,9 +174,10 @@ public final class VideoCompositor {
long presentationTimeUs, long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseTextureCallback) DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseTextureCallback)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
checkState(!inputSources.get(inputId).isInputEnded);
InputFrameInfo inputFrameInfo = InputFrameInfo inputFrameInfo =
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback); new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo); inputSources.get(inputId).frameInfos.add(inputFrameInfo);
videoFrameProcessingTaskExecutor.submit(this::maybeComposite); videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
} }
@ -180,8 +207,8 @@ public final class VideoCompositor {
} }
List<InputFrameInfo> framesToComposite = new ArrayList<>(); List<InputFrameInfo> framesToComposite = new ArrayList<>();
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) { for (int inputId = 0; inputId < inputSources.size(); inputId++) {
framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove()); framesToComposite.add(inputSources.get(inputId).frameInfos.remove());
} }
ensureGlProgramConfigured(); ensureGlProgramConfigured();
@ -196,14 +223,17 @@ public final class VideoCompositor {
outputTexturePool.ensureConfigured( outputTexturePool.ensureConfigured(
glObjectsProvider, inputFrame1.texture.width, inputFrame1.texture.height); glObjectsProvider, inputFrame1.texture.width, inputFrame1.texture.height);
GlTextureInfo outputTexture = outputTexturePool.useTexture(); GlTextureInfo outputTexture = outputTexturePool.useTexture();
long outputPresentationTimestampUs = framesToComposite.get(0).presentationTimeUs; long outputPresentationTimestampUs = framesToComposite.get(PRIMARY_INPUT_ID).presentationTimeUs;
outputTextureTimestamps.add(outputPresentationTimestampUs); outputTextureTimestamps.add(outputPresentationTimestampUs);
drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture); drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture);
long syncObject = GlUtil.createGlSyncFence(); long syncObject = GlUtil.createGlSyncFence();
syncObjects.add(syncObject); syncObjects.add(syncObject);
textureOutputListener.onTextureRendered( textureOutputListener.onTextureRendered(
outputTexture, outputPresentationTimestampUs, this::releaseOutputFrame, syncObject); outputTexture,
/* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
this::releaseOutputFrame,
syncObject);
for (int i = 0; i < framesToComposite.size(); i++) { for (int i = 0; i < framesToComposite.size(); i++) {
InputFrameInfo inputFrameInfo = framesToComposite.get(i); InputFrameInfo inputFrameInfo = framesToComposite.get(i);
inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs); inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs);
@ -215,14 +245,14 @@ public final class VideoCompositor {
return false; return false;
} }
long compositeTimestampUs = C.TIME_UNSET; long compositeTimestampUs = C.TIME_UNSET;
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) { for (int inputId = 0; inputId < inputSources.size(); inputId++) {
Queue<InputFrameInfo> inputFrameInfoQueue = checkNotNull(inputFrameInfos.get(inputId)); Queue<InputFrameInfo> inputFrameInfos = inputSources.get(inputId).frameInfos;
if (inputFrameInfoQueue.isEmpty()) { if (inputFrameInfos.isEmpty()) {
return false; return false;
} }
long inputTimestampUs = checkNotNull(inputFrameInfoQueue.peek()).presentationTimeUs; long inputTimestampUs = checkNotNull(inputFrameInfos.peek()).presentationTimeUs;
if (inputId == 0) { if (inputId == PRIMARY_INPUT_ID) {
compositeTimestampUs = inputTimestampUs; compositeTimestampUs = inputTimestampUs;
} }
// TODO: b/262694346 - Allow for different frame-rates to be composited, by potentially // TODO: b/262694346 - Allow for different frame-rates to be composited, by potentially
@ -291,6 +321,7 @@ public final class VideoCompositor {
private void releaseGlObjects() { private void releaseGlObjects() {
try { try {
checkState(allInputsEnded);
outputTexturePool.deleteAllTextures(); outputTexturePool.deleteAllTextures();
GlUtil.destroyEglSurface(eglDisplay, placeholderEglSurface); GlUtil.destroyEglSurface(eglDisplay, placeholderEglSurface);
if (glProgram != null) { if (glProgram != null) {
@ -307,6 +338,16 @@ public final class VideoCompositor {
} }
} }
/** Holds information on an input source. */
private static final class InputSource {
public final Queue<InputFrameInfo> frameInfos;
public boolean isInputEnded;
public InputSource() {
frameInfos = new ArrayDeque<>();
}
}
/** Holds information on a frame and how to release it. */ /** Holds information on a frame and how to release it. */
private static final class InputFrameInfo { private static final class InputFrameInfo {
public final GlTextureInfo texture; public final GlTextureInfo texture;

View File

@ -16,7 +16,6 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP; import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE; import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
@ -248,7 +247,7 @@ public final class VideoCompositorPixelTest {
private final VideoCompositor videoCompositor; private final VideoCompositor videoCompositor;
private final @Nullable ExecutorService sharedExecutorService; private final @Nullable ExecutorService sharedExecutorService;
private final AtomicReference<VideoFrameProcessingException> compositionException; private final AtomicReference<VideoFrameProcessingException> compositionException;
private @MonotonicNonNull CountDownLatch compositorEnded; private final CountDownLatch compositorEnded;
public VideoCompositorTestRunner( public VideoCompositorTestRunner(
String testId, String testId,
@ -263,17 +262,25 @@ public final class VideoCompositorPixelTest {
/* sharedEglContext= */ useSharedExecutor ? null : sharedEglContext); /* sharedEglContext= */ useSharedExecutor ? null : sharedEglContext);
compositionException = new AtomicReference<>(); compositionException = new AtomicReference<>();
compositorEnded = new CountDownLatch(1);
videoCompositor = videoCompositor =
new VideoCompositor( new VideoCompositor(
getApplicationContext(), getApplicationContext(),
glObjectsProvider, glObjectsProvider,
sharedExecutorService, sharedExecutorService,
/* errorListener= */ compositionException::set, new VideoCompositor.Listener() {
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> { @Override
compositorTextureOutputListener.onTextureRendered( public void onError(VideoFrameProcessingException exception) {
outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject); compositionException.set(exception);
checkNotNull(compositorEnded).countDown(); compositorEnded.countDown();
}
@Override
public void onEnded() {
compositorEnded.countDown();
}
}, },
compositorTextureOutputListener,
/* textureOutputCapacity= */ 1); /* textureOutputCapacity= */ 1);
inputBitmapReader1 = new TextureBitmapReader(); inputBitmapReader1 = new TextureBitmapReader();
inputVideoFrameProcessorTestRunner1 = inputVideoFrameProcessorTestRunner1 =
@ -302,7 +309,6 @@ public final class VideoCompositorPixelTest {
* seconds. * seconds.
*/ */
public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException { public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException {
compositorEnded = new CountDownLatch(count);
inputVideoFrameProcessorTestRunner1.queueInputBitmap( inputVideoFrameProcessorTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND, /* durationUs= */ count * C.MICROS_PER_SECOND,
@ -315,9 +321,21 @@ public final class VideoCompositorPixelTest {
/* frameRate= */ 1); /* frameRate= */ 1);
inputVideoFrameProcessorTestRunner1.endFrameProcessing(); inputVideoFrameProcessorTestRunner1.endFrameProcessing();
inputVideoFrameProcessorTestRunner2.endFrameProcessing(); inputVideoFrameProcessorTestRunner2.endFrameProcessing();
compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS);
videoCompositor.signalEndOfInputSource(/* inputId= */ 0);
videoCompositor.signalEndOfInputSource(/* inputId= */ 1);
@Nullable Exception endCompositingException = null;
try {
if (!compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS)) {
endCompositingException = new IllegalStateException("Compositing timed out.");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
endCompositingException = e;
}
assertThat(compositionException.get()).isNull(); assertThat(compositionException.get()).isNull();
assertThat(endCompositingException).isNull();
} }
public void release() { public void release() {