Compositor: Add support for mismatched timestamps.

This means we now require 2+ input frames per input, and compare the primary
stream timestamp with secondary stream timestamps in order to select the
correct output timestamp. We also must release frames and back-pressure as
soon as possible to avoid blocking upstream VFPs.

Also, improve signalling of VFP onReadyToAcceptInputFrame

PiperOrigin-RevId: 553448965
This commit is contained in:
huangdarwin 2023-08-03 12:43:54 +00:00 committed by Tianyi Feng
parent ed1ff222bb
commit 05782a7e99
5 changed files with 374 additions and 70 deletions

View File

@ -17,6 +17,8 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import android.content.Context;
import android.opengl.EGLContext;
@ -35,9 +37,12 @@ import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ExecutorService;
@ -53,10 +58,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@UnstableApi
public final class DefaultVideoCompositor implements VideoCompositor {
// TODO: b/262694346 - Flesh out this implementation by doing the following:
// * Handle mismatched timestamps
// * Use a lock to synchronize inputFrameInfos more narrowly, to reduce blocking.
// * If the primary stream ends, consider setting the secondary stream as the new primary stream,
// so that secondary stream frames aren't dropped.
// * Consider adding info about the timestamps for each input frame used to composite an output
// frame, to aid debugging and testing.
private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor";
@ -73,6 +79,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
@GuardedBy("this")
private final List<InputSource> inputSources;
@GuardedBy("this")
private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
private final TexturePool outputTexturePool;
@ -120,6 +127,13 @@ public final class DefaultVideoCompositor implements VideoCompositor {
videoFrameProcessingTaskExecutor.submit(this::setupGlObjects);
}
/**
* {@inheritDoc}
*
* <p>The input source must be able to have at least two {@linkplain #queueInputTexture queued
* textures} before one texture is {@linkplain
* DefaultVideoFrameProcessor.ReleaseOutputTextureCallback released}.
*/
@Override
public synchronized int registerInputSource() {
inputSources.add(new InputSource());
@ -129,14 +143,28 @@ public final class DefaultVideoCompositor implements VideoCompositor {
@Override
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(i).isInputEnded) {
allInputsEnded = false;
break;
}
}
this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) {
releaseExcessFramesInAllSecondaryStreams();
}
if (allInputsEnded) {
listener.onEnded();
return;
}
}
allInputsEnded = true;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
listener.onEnded();
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the
// stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
}
}
@ -146,10 +174,19 @@ public final class DefaultVideoCompositor implements VideoCompositor {
GlTextureInfo inputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseTextureCallback) {
checkState(!inputSources.get(inputId).isInputEnded);
InputSource inputSource = inputSources.get(inputId);
checkState(!inputSource.isInputEnded);
InputFrameInfo inputFrameInfo =
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
inputSources.get(inputId).frameInfos.add(inputFrameInfo);
inputSource.frameInfos.add(inputFrameInfo);
if (inputId == PRIMARY_INPUT_ID) {
releaseExcessFramesInAllSecondaryStreams();
} else {
releaseExcessFramesInSecondaryStream(inputSource);
}
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
}
@ -163,6 +200,56 @@ public final class DefaultVideoCompositor implements VideoCompositor {
}
}
private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_ID) {
continue;
}
releaseExcessFramesInSecondaryStream(inputSources.get(i));
}
}
/**
* Release unneeded frames from the {@link InputSource} secondary stream.
*
* <p>After this method returns, there should be exactly zero or one frames left with a timestamp
* less than the primary stream's next timestamp that were present when the method execution
* began.
*/
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
// If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames(
secondaryInputSource,
/* numberOfFramesToRelease= */ secondaryInputSource.frameInfos.size());
return;
}
// Release frames until the secondary stream has 0-2 frames with time <=
// nextTimestampToComposite.
@Nullable InputFrameInfo nextPrimaryFrame = primaryInputSource.frameInfos.peek();
long nextTimestampToComposite =
nextPrimaryFrame != null ? nextPrimaryFrame.presentationTimeUs : C.TIME_UNSET;
int numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp =
Iterables.size(
Iterables.filter(
secondaryInputSource.frameInfos,
frame -> frame.presentationTimeUs <= nextTimestampToComposite));
releaseFrames(
secondaryInputSource,
/* numberOfFramesToRelease= */ max(
numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp - 1, 0));
}
private synchronized void releaseFrames(InputSource inputSource, int numberOfFramesToRelease) {
for (int i = 0; i < numberOfFramesToRelease; i++) {
InputFrameInfo frameInfoToRelease = inputSource.frameInfos.remove();
frameInfoToRelease.releaseCallback.release(frameInfoToRelease.presentationTimeUs);
}
}
// Below methods must be called on the GL thread.
private void setupGlObjects() throws GlUtil.GlException {
eglDisplay = GlUtil.getDefaultEglDisplay();
@ -175,15 +262,11 @@ public final class DefaultVideoCompositor implements VideoCompositor {
private synchronized void maybeComposite()
throws VideoFrameProcessingException, GlUtil.GlException {
if (!isReadyToComposite()) {
ImmutableList<InputFrameInfo> framesToComposite = getFramesToComposite();
if (framesToComposite.isEmpty()) {
return;
}
List<InputFrameInfo> framesToComposite = new ArrayList<>();
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
framesToComposite.add(inputSources.get(inputId).frameInfos.remove());
}
ensureGlProgramConfigured();
// TODO: b/262694346 -
@ -204,40 +287,81 @@ public final class DefaultVideoCompositor implements VideoCompositor {
syncObjects.add(syncObject);
textureOutputListener.onTextureRendered(
outputTexture,
/* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
/* presentationTimeUs= */ outputPresentationTimestampUs,
this::releaseOutputFrame,
syncObject);
for (int i = 0; i < framesToComposite.size(); i++) {
InputFrameInfo inputFrameInfo = framesToComposite.get(i);
inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs);
}
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams();
if (allInputsEnded && inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
listener.onEnded();
}
}
private synchronized boolean isReadyToComposite() {
/**
* Checks whether {@code inputSources} is able to composite, and if so, returns a list of {@link
* InputFrameInfo}s that should be composited next.
*
* <p>The first input frame info in the list is from the the primary source. An empty list is
* returned if {@code inputSources} cannot composite now.
*/
private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
if (outputTexturePool.freeTextureCount() == 0) {
return false;
return ImmutableList.of();
}
long compositeTimestampUs = C.TIME_UNSET;
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
Queue<InputFrameInfo> inputFrameInfos = inputSources.get(inputId).frameInfos;
if (inputFrameInfos.isEmpty()) {
return false;
if (inputSources.get(inputId).frameInfos.isEmpty()) {
return ImmutableList.of();
}
}
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
framesToComposite.add(primaryFrameToComposite);
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputId == PRIMARY_INPUT_ID) {
continue;
}
// Select the secondary streams' frame that would be composited next. The frame selected is
// the closest-timestamp frame from the primary stream's frame, if all secondary streams have:
// 1. One or more frames, and the secondary stream has ended, or
// 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputId);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of();
}
long inputTimestampUs = checkNotNull(inputFrameInfos.peek()).presentationTimeUs;
if (inputId == PRIMARY_INPUT_ID) {
compositeTimestampUs = inputTimestampUs;
}
// TODO: b/262694346 - Allow for different frame-rates to be composited, by potentially
// dropping some frames in non-primary streams.
if (inputTimestampUs != compositeTimestampUs) {
throw new IllegalStateException("Non-matched timestamps not yet supported.");
long minTimeDiffFromPrimaryUs = Long.MAX_VALUE;
@Nullable InputFrameInfo secondaryFrameToComposite = null;
Iterator<InputFrameInfo> frameInfosIterator = secondaryInputSource.frameInfos.iterator();
while (frameInfosIterator.hasNext()) {
InputFrameInfo candidateFrame = frameInfosIterator.next();
long candidateTimestampUs = candidateFrame.presentationTimeUs;
long candidateAbsDistance =
abs(candidateTimestampUs - primaryFrameToComposite.presentationTimeUs);
if (candidateAbsDistance < minTimeDiffFromPrimaryUs) {
minTimeDiffFromPrimaryUs = candidateAbsDistance;
secondaryFrameToComposite = candidateFrame;
}
if (candidateTimestampUs > primaryFrameToComposite.presentationTimeUs
|| (!frameInfosIterator.hasNext() && secondaryInputSource.isInputEnded)) {
framesToComposite.add(checkNotNull(secondaryFrameToComposite));
break;
}
}
}
return true;
ImmutableList<InputFrameInfo> framesToCompositeList = framesToComposite.build();
if (framesToCompositeList.size() != inputSources.size()) {
return ImmutableList.of();
}
return framesToCompositeList;
}
private void releaseOutputFrame(long presentationTimeUs) {
@ -295,7 +419,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
GlUtil.checkGlError();
}
private void releaseGlObjects() {
private synchronized void releaseGlObjects() {
try {
checkState(allInputsEnded);
outputTexturePool.deleteAllTextures();
@ -316,7 +440,10 @@ public final class DefaultVideoCompositor implements VideoCompositor {
/** Holds information on an input source. */
private static final class InputSource {
// A queue of {link InputFrameInfo}s, inserted in order from lower to higher {@code
// presentationTimeUs} values.
public final Queue<InputFrameInfo> frameInfos;
public boolean isInputEnded;
public InputSource() {

View File

@ -72,6 +72,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private static final String TAG = "FinalShaderWrapper";
private static final int SURFACE_INPUT_CAPACITY = 1;
private final Context context;
private final List<GlMatrixTransformation> matrixTransformations;
@ -154,7 +155,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void setInputListener(InputListener inputListener) {
this.inputListener = inputListener;
maybeOnReadyToAcceptInputFrame();
int inputCapacity =
textureOutputListener == null
? SURFACE_INPUT_CAPACITY
: outputTexturePool.freeTextureCount();
for (int i = 0; i < inputCapacity; i++) {
inputListener.onReadyToAcceptInputFrame();
}
}
@Override
@ -196,6 +203,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
}
inputListener.onReadyToAcceptInputFrame();
} else {
checkState(outputTexturePool.freeTextureCount() > 0);
renderFrame(
@ -204,7 +212,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
presentationTimeUs,
/* renderTimeNs= */ presentationTimeUs * 1000);
}
maybeOnReadyToAcceptInputFrame();
}
@Override
@ -218,12 +225,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private void releaseOutputFrameInternal(long presentationTimeUs) throws GlUtil.GlException {
checkState(textureOutputListener != null);
while (outputTexturePool.freeTextureCount() < outputTexturePool.capacity()
&& checkNotNull(outputTextureTimestamps.peek()) <= presentationTimeUs) {
outputTexturePool.freeTexture();
outputTextureTimestamps.remove();
GlUtil.deleteSyncObject(syncObjects.remove());
maybeOnReadyToAcceptInputFrame();
inputListener.onReadyToAcceptInputFrame();
}
}
@ -251,7 +259,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
defaultShaderProgram.flush();
}
inputListener.onFlush();
maybeOnReadyToAcceptInputFrame();
if (textureOutputListener == null) {
// TODO: b/293572152 - Add texture output flush() support, propagating the flush() signal to
// downstream components so that they can release TexturePool resources and FinalWrapper can
// call onReadyToAcceptInputFrame().
inputListener.onReadyToAcceptInputFrame();
}
}
@Override
@ -310,12 +323,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.outputSurfaceInfo = outputSurfaceInfo;
}
private void maybeOnReadyToAcceptInputFrame() {
if (textureOutputListener == null || outputTexturePool.freeTextureCount() > 0) {
inputListener.onReadyToAcceptInputFrame();
}
}
private synchronized void renderFrame(
GlObjectsProvider glObjectsProvider,
GlTextureInfo inputTexture,

View File

@ -73,11 +73,13 @@ public final class VideoFrameProcessorTestRunner {
private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull ColorInfo outputColorInfo;
private OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener;
private OnVideoFrameProcessingEndedListener onEndedListener;
/** Creates a new instance with default values. */
public Builder() {
pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO;
onOutputFrameAvailableListener = unused -> {};
onEndedListener = () -> {};
}
/**
@ -206,6 +208,17 @@ public final class VideoFrameProcessorTestRunner {
return this;
}
/**
* Sets the method to be called in {@link VideoFrameProcessor.Listener#onEnded}.
*
* <p>The default value is a no-op.
*/
@CanIgnoreReturnValue
public Builder setOnEndedListener(OnVideoFrameProcessingEndedListener onEndedListener) {
this.onEndedListener = onEndedListener;
return this;
}
public VideoFrameProcessorTestRunner build() throws VideoFrameProcessingException {
checkStateNotNull(testId, "testId must be set.");
checkStateNotNull(videoFrameProcessorFactory, "videoFrameProcessorFactory must be set.");
@ -220,7 +233,8 @@ public final class VideoFrameProcessorTestRunner {
pixelWidthHeightRatio,
inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo,
outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo,
onOutputFrameAvailableListener);
onOutputFrameAvailableListener,
onEndedListener);
}
}
@ -251,7 +265,8 @@ public final class VideoFrameProcessorTestRunner {
float pixelWidthHeightRatio,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener)
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener,
OnVideoFrameProcessingEndedListener onEndedListener)
throws VideoFrameProcessingException {
this.testId = testId;
this.bitmapReader = bitmapReader;
@ -298,6 +313,7 @@ public final class VideoFrameProcessorTestRunner {
@Override
public void onEnded() {
checkNotNull(videoFrameProcessingEndedLatch).countDown();
onEndedListener.onEnded();
}
});
this.effects = effects;
@ -361,14 +377,32 @@ public final class VideoFrameProcessorTestRunner {
}
/** {@link #endFrameProcessing(long)} with {@link #VIDEO_FRAME_PROCESSING_WAIT_MS} applied. */
public void endFrameProcessing() throws InterruptedException {
public void endFrameProcessing() {
endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS);
}
/** Have the {@link VideoFrameProcessor} finish processing. */
public void endFrameProcessing(long videoFrameProcessingWaitTimeMs) throws InterruptedException {
videoFrameProcessor.signalEndOfInput();
/**
* Ends {@link VideoFrameProcessor} frame processing.
*
* <p>Waits for frame processing to end, for {@code videoFrameProcessingWaitTimeMs}.
*/
public void endFrameProcessing(long videoFrameProcessingWaitTimeMs) {
signalEndOfInput();
awaitFrameProcessingEnd(videoFrameProcessingWaitTimeMs);
}
/**
* Calls {@link VideoFrameProcessor#signalEndOfInput}.
*
* <p>Calling this and {@link #awaitFrameProcessingEnd} is an alternative to {@link
* #endFrameProcessing}.
*/
public void signalEndOfInput() {
videoFrameProcessor.signalEndOfInput();
}
/** After {@link #signalEndOfInput}, is called, wait for this instance to end. */
public void awaitFrameProcessingEnd(long videoFrameProcessingWaitTimeMs) {
@Nullable Exception endFrameProcessingException = null;
try {
if (!checkNotNull(videoFrameProcessingEndedLatch)
@ -377,6 +411,7 @@ public final class VideoFrameProcessorTestRunner {
new IllegalStateException("Video frame processing timed out.");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
endFrameProcessingException = e;
}
assertThat(videoFrameProcessingException.get()).isNull();
@ -404,6 +439,10 @@ public final class VideoFrameProcessorTestRunner {
void onFrameAvailableForRendering(long presentationTimeUs);
}
public interface OnVideoFrameProcessingEndedListener {
void onEnded();
}
/** Reads a {@link Bitmap} from {@link VideoFrameProcessor} output. */
public interface BitmapReader {

View File

@ -18,6 +18,7 @@ package androidx.media3.transformer;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.test.utils.VideoFrameProcessorTestRunner.VIDEO_FRAME_PROCESSING_WAIT_MS;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
@ -100,7 +101,7 @@ public final class DefaultVideoCompositorPixelTest {
public void compositeTwoInputs_withOneFrameFromEach_matchesExpectedBitmap() throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(/* count= */ 1);
compositorTestRunner.queueBitmapsToBothInputs(/* durationSec= */ 1);
saveAndAssertBitmapMatchesExpected(
testId,
@ -122,7 +123,7 @@ public final class DefaultVideoCompositorPixelTest {
throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(/* count= */ 5);
compositorTestRunner.queueBitmapsToBothInputs(/* durationSec= */ 5);
ImmutableList<Long> expectedTimestamps =
ImmutableList.of(
@ -144,6 +145,128 @@ public final class DefaultVideoCompositorPixelTest {
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
// TODO: b/262694346 - Add tests for:
// * variable frame-rate.
// * checking correct input frames are composited.
@Test
@RequiresNonNull("testId")
public void composite_onePrimaryAndFiveSecondaryFrames_matchesExpectedTimestamps()
throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(
/* durationSec= */ 1, /* secondarySourceFrameRate= */ 5f);
ImmutableList<Long> primaryTimestamps = ImmutableList.of(0 * C.MICROS_PER_SECOND);
ImmutableList<Long> secondaryTimestamps =
ImmutableList.of(
0 * C.MICROS_PER_SECOND,
1 * C.MICROS_PER_SECOND / 5,
2 * C.MICROS_PER_SECOND / 5,
3 * C.MICROS_PER_SECOND / 5,
4 * C.MICROS_PER_SECOND / 5);
assertThat(compositorTestRunner.inputBitmapReader1.getOutputTimestamps())
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.inputBitmapReader2.getOutputTimestamps())
.containsExactlyElementsIn(secondaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.compositedTimestamps)
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
compositorTestRunner.saveAndAssertFirstCompositedBitmapMatchesExpected(
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
@Test
@RequiresNonNull("testId")
public void composite_fivePrimaryAndOneSecondaryFrames_matchesExpectedTimestamps()
throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(
/* durationSec= */ 5, /* secondarySourceFrameRate= */ .2f);
ImmutableList<Long> primaryTimestamps =
ImmutableList.of(
0 * C.MICROS_PER_SECOND,
1 * C.MICROS_PER_SECOND,
2 * C.MICROS_PER_SECOND,
3 * C.MICROS_PER_SECOND,
4 * C.MICROS_PER_SECOND);
ImmutableList<Long> secondaryTimestamps = ImmutableList.of(0 * C.MICROS_PER_SECOND);
assertThat(compositorTestRunner.inputBitmapReader1.getOutputTimestamps())
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.inputBitmapReader2.getOutputTimestamps())
.containsExactlyElementsIn(secondaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.compositedTimestamps)
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
compositorTestRunner.saveAndAssertFirstCompositedBitmapMatchesExpected(
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
@Test
@RequiresNonNull("testId")
public void composite_primaryDoubleSecondaryFrameRate_matchesExpectedTimestamps()
throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(
/* durationSec= */ 4, /* secondarySourceFrameRate= */ .5f);
ImmutableList<Long> primaryTimestamps =
ImmutableList.of(
0 * C.MICROS_PER_SECOND,
1 * C.MICROS_PER_SECOND,
2 * C.MICROS_PER_SECOND,
3 * C.MICROS_PER_SECOND);
ImmutableList<Long> secondaryTimestamps =
ImmutableList.of(0 * C.MICROS_PER_SECOND, 2 * C.MICROS_PER_SECOND);
assertThat(compositorTestRunner.inputBitmapReader1.getOutputTimestamps())
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.inputBitmapReader2.getOutputTimestamps())
.containsExactlyElementsIn(secondaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.compositedTimestamps)
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
compositorTestRunner.saveAndAssertFirstCompositedBitmapMatchesExpected(
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
@Test
@RequiresNonNull("testId")
public void composite_primaryHalfSecondaryFrameRate_matchesExpectedTimestamps() throws Exception {
compositorTestRunner = new VideoCompositorTestRunner(testId, useSharedExecutor);
compositorTestRunner.queueBitmapsToBothInputs(
/* durationSec= */ 2, /* secondarySourceFrameRate= */ 2f);
ImmutableList<Long> primaryTimestamps =
ImmutableList.of(0 * C.MICROS_PER_SECOND, 1 * C.MICROS_PER_SECOND);
ImmutableList<Long> secondaryTimestamps =
ImmutableList.of(
0 * C.MICROS_PER_SECOND,
1 * C.MICROS_PER_SECOND / 2,
2 * C.MICROS_PER_SECOND / 2,
3 * C.MICROS_PER_SECOND / 2);
assertThat(compositorTestRunner.inputBitmapReader1.getOutputTimestamps())
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.inputBitmapReader2.getOutputTimestamps())
.containsExactlyElementsIn(secondaryTimestamps)
.inOrder();
assertThat(compositorTestRunner.compositedTimestamps)
.containsExactlyElementsIn(primaryTimestamps)
.inOrder();
compositorTestRunner.saveAndAssertFirstCompositedBitmapMatchesExpected(
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
}
@Test
@RequiresNonNull("testId")
public void compositeTwoInputs_withTenFramesFromEach_matchesExpectedFrameCount()
@ -168,7 +291,8 @@ public final class DefaultVideoCompositorPixelTest {
* <p>Composites input bitmaps from two input sources.
*/
private static final class VideoCompositorTestRunner {
private static final int COMPOSITOR_TIMEOUT_MS = 5_000;
// Compositor tests rely on 2 VideoFrameProcessor instances, plus the compositor.
private static final int COMPOSITOR_TIMEOUT_MS = 2 * VIDEO_FRAME_PROCESSING_WAIT_MS;
private static final Effect ROTATE_180_EFFECT =
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
private static final Effect GRAYSCALE_EFFECT = RgbFilter.createGrayscaleFilter();
@ -256,25 +380,36 @@ public final class DefaultVideoCompositorPixelTest {
}
/**
* Queues {@code count} bitmaps, with one bitmap per second, starting from and including 0
* seconds.
* Queues {@code durationSec} bitmaps, with one bitmap per second, starting from and including
* {@code 0} seconds. Both sources have a {@code frameRate} of {@code 1}.
*/
public void queueBitmapsToBothInputs(int count) throws IOException, InterruptedException {
public void queueBitmapsToBothInputs(int durationSec) throws IOException {
queueBitmapsToBothInputs(durationSec, /* secondarySourceFrameRate= */ 1);
}
/**
* Queues {@code durationSec} bitmaps, with one bitmap per second, starting from and including
* {@code 0} seconds. The primary source has a {@code frameRate} of {@code 1}, while secondary
* sources have a {@code frameRate} of {@code secondarySourceFrameRate}.
*/
public void queueBitmapsToBothInputs(int durationSec, float secondarySourceFrameRate)
throws IOException {
inputVideoFrameProcessorTestRunner1.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* durationUs= */ durationSec * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner2.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ count * C.MICROS_PER_SECOND,
/* durationUs= */ durationSec * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0,
/* frameRate= */ 1);
inputVideoFrameProcessorTestRunner1.endFrameProcessing();
inputVideoFrameProcessorTestRunner2.endFrameProcessing();
/* frameRate= */ secondarySourceFrameRate);
videoCompositor.signalEndOfInputSource(/* inputId= */ 0);
videoCompositor.signalEndOfInputSource(/* inputId= */ 1);
inputVideoFrameProcessorTestRunner1.signalEndOfInput();
inputVideoFrameProcessorTestRunner2.signalEndOfInput();
inputVideoFrameProcessorTestRunner1.awaitFrameProcessingEnd(COMPOSITOR_TIMEOUT_MS);
inputVideoFrameProcessorTestRunner2.awaitFrameProcessingEnd(COMPOSITOR_TIMEOUT_MS);
@Nullable Exception endCompositingException = null;
try {
if (!compositorEnded.await(COMPOSITOR_TIMEOUT_MS, MILLISECONDS)) {
@ -337,7 +472,7 @@ public final class DefaultVideoCompositorPixelTest {
videoCompositor.queueInputTexture(
inputId, outputTexture, presentationTimeUs, releaseOutputTextureCallback);
},
/* textureOutputCapacity= */ 1);
/* textureOutputCapacity= */ 2);
if (executorService != null) {
defaultVideoFrameProcessorFactoryBuilder.setExecutorService(executorService);
}
@ -345,7 +480,8 @@ public final class DefaultVideoCompositorPixelTest {
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader);
.setBitmapReader(textureBitmapReader)
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputId));
}
}

View File

@ -554,12 +554,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.build();
GlUtil.awaitSyncObject(syncObject);
videoFrameProcessorTestRunner.queueInputTexture(texture, presentationTimeUs);
try {
videoFrameProcessorTestRunner.endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS / 2);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new VideoFrameProcessingException(e);
}
videoFrameProcessorTestRunner.endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS / 2);
releaseOutputTextureCallback.release(presentationTimeUs);
}