Effect: Delay end of stream until all frames are rendered.

Previously, if renderFramesAutomatically = false, DefaultVideoFrameProcessor
may call onInputStreamProcessedListener after all frames have been
onOutputFrameAvailableForRendering, but before they had all been rendered and
freed.

Delay onInputStreamProcessedListener being called and subsequent DVFP
reconfiguration of effects, until all frames are rendered.

Tested using exoplayer setVideoEffects demo with playlist

PiperOrigin-RevId: 642963100
This commit is contained in:
claincly 2024-06-13 06:18:03 -07:00 committed by Copybara-Service
parent a0312615f2
commit 206d2ce8b8
3 changed files with 140 additions and 38 deletions

View File

@ -16,17 +16,23 @@
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmapUnpremultipliedAlpha;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.graphics.Bitmap;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.common.util.SystemClock;
import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
@ -35,8 +41,10 @@ import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -46,20 +54,30 @@ import org.junit.runner.RunWith;
public class DefaultVideoFrameProcessorTest {
private static final long INPUT_REGISTRATION_TIMEOUT_MS = 1_000L;
private static final String ORIGINAL_PNG_ASSET_PATH = "media/png/media3test_srgb.png";
private static final long TEST_TIMEOUT_MS = 10_000L;
private DefaultVideoFrameProcessor.@MonotonicNonNull Factory factory;
private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
@Before
public void setUp() {
factory = new DefaultVideoFrameProcessor.Factory.Builder().build();
}
@After
public void tearDown() {
if (defaultVideoFrameProcessor != null) {
defaultVideoFrameProcessor.release();
}
}
@Test
public void registerInputStream_withBlockingVideoFrameProcessorConfiguration_succeeds()
throws Exception {
AtomicReference<Exception> videoFrameProcessingException = new AtomicReference<>();
CountDownLatch inputStreamRegisteredCountDownLatch = new CountDownLatch(1);
DefaultVideoFrameProcessor defaultVideoFrameProcessor =
defaultVideoFrameProcessor =
createDefaultVideoFrameProcessor(
new VideoFrameProcessor.Listener() {
@Override
@ -119,7 +137,7 @@ public class DefaultVideoFrameProcessorTest {
AtomicReference<Exception> videoFrameProcessingException = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(3);
Queue<InputStreamInfo> registeredInputStreamInfoWidths = new ConcurrentLinkedQueue<>();
DefaultVideoFrameProcessor defaultVideoFrameProcessor =
defaultVideoFrameProcessor =
createDefaultVideoFrameProcessor(
new VideoFrameProcessor.Listener() {
@Override
@ -177,6 +195,99 @@ public class DefaultVideoFrameProcessorTest {
.inOrder();
}
@Test
public void
registerInputStream_withManualFrameRendering_configuresTheSecondStreamAfterRenderingAllFramesFromTheFirst()
throws Exception {
AtomicReference<Exception> videoFrameProcessingException = new AtomicReference<>();
AtomicLong firstStreamLastFrameAvailableTimeMs = new AtomicLong();
AtomicLong secondStreamConfigurationTimeMs = new AtomicLong();
ConditionVariable inputStreamRegisteredCondition = new ConditionVariable();
CountDownLatch frameProcessorEnded = new CountDownLatch(1);
defaultVideoFrameProcessor =
factory.create(
getApplicationContext(),
DebugViewProvider.NONE,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* renderFramesAutomatically= */ false,
Util.newSingleThreadExecutor("DVFPTest"),
new VideoFrameProcessor.Listener() {
int outputFrameCount = 0;
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
List<Effect> effects,
FrameInfo frameInfo) {
inputStreamRegisteredCondition.open();
}
@Override
public void onOutputSizeChanged(int width, int height) {}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
outputFrameCount++;
if (outputFrameCount == 30) {
firstStreamLastFrameAvailableTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
}
defaultVideoFrameProcessor.renderOutputFrame(
VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY);
}
@Override
public void onError(VideoFrameProcessingException exception) {
videoFrameProcessingException.set(exception);
}
@Override
public void onEnded() {
frameProcessorEnded.countDown();
}
});
Bitmap bitmap1 = readBitmapUnpremultipliedAlpha(ORIGINAL_PNG_ASSET_PATH);
// Needs a different bitmap as the bitmap is recycled after single use.
Bitmap bitmap2 = readBitmapUnpremultipliedAlpha(ORIGINAL_PNG_ASSET_PATH);
// First image
inputStreamRegisteredCondition.close();
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
ImmutableList.of(),
new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap1.getWidth(), bitmap1.getHeight())
.build());
inputStreamRegisteredCondition.block();
defaultVideoFrameProcessor.queueInputBitmap(
bitmap1, new ConstantRateTimestampIterator(C.MICROS_PER_SECOND, 30.f));
// Second image
inputStreamRegisteredCondition.close();
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_BITMAP,
ImmutableList.of(
(GlEffect)
(context, useHdr) -> {
secondStreamConfigurationTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
return new PassthroughShaderProgram();
}),
new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap2.getWidth(), bitmap2.getHeight())
.build());
inputStreamRegisteredCondition.block();
defaultVideoFrameProcessor.queueInputBitmap(
bitmap2, new ConstantRateTimestampIterator(C.MICROS_PER_SECOND, 30.f));
defaultVideoFrameProcessor.signalEndOfInput();
if (!frameProcessorEnded.await(TEST_TIMEOUT_MS, MILLISECONDS)) {
throw new IllegalStateException("Test timeout", videoFrameProcessingException.get());
}
assertThat(secondStreamConfigurationTimeMs.get())
.isAtLeast(firstStreamLastFrameAvailableTimeMs.get());
}
private DefaultVideoFrameProcessor createDefaultVideoFrameProcessor(
VideoFrameProcessor.Listener listener) throws Exception {
return checkNotNull(factory)

View File

@ -56,6 +56,7 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
private static final int WIDTH = 200;
private static final int HEIGHT = 100;
private static final long TEST_TIMEOUT_MS = 10_000L;
/**
* Time to wait between rendering frames to avoid frame drops between GL and the {@link
@ -238,37 +239,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
assertThat(actualRenderTimesNs).containsExactlyElementsIn(renderTimesNs).inOrder();
}
@Test
public void controlledFrameRendering_withThreeFramesAtOnce_usesGivenTimestamps()
throws Exception {
ImmutableList<Long> originalPresentationTimesUs = ImmutableList.of(1234L, 3456L, 4567L);
long offsetNs = System.nanoTime();
ImmutableList<Long> renderTimesNs =
ImmutableList.of(offsetNs + 123456, offsetNs + 234567, offsetNs + 345678);
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesUs::add,
/* renderFramesAutomatically= */ false);
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior.
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs.get(0));
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs.get(1));
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs.get(2));
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
assertThat(actualPresentationTimesUs)
.containsExactlyElementsIn(originalPresentationTimesUs)
.inOrder();
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 3);
assertThat(actualRenderTimesNs).containsExactlyElementsIn(renderTimesNs).inOrder();
}
private interface OnOutputFrameAvailableForRenderingListener {
void onFrameAvailableForRendering(long presentationTimeUs);
}
@ -347,14 +317,19 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
INPUT_TYPE_SURFACE,
/* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
new FrameInfo.Builder(ColorInfo.SDR_BT709_LIMITED, WIDTH, HEIGHT).build());
videoFrameProcessorReadyCountDownLatch.await();
boolean testTimedOut = false;
if (!videoFrameProcessorReadyCountDownLatch.await(TEST_TIMEOUT_MS, MILLISECONDS)) {
testTimedOut = true;
}
blankFrameProducer.produceBlankFrames(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput();
videoFrameProcessingEndedCountDownLatch.await();
if (!videoFrameProcessingEndedCountDownLatch.await(TEST_TIMEOUT_MS, MILLISECONDS)) {
testTimedOut = true;
}
@Nullable
Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get();
if (videoFrameProcessingException != null) {
throw videoFrameProcessingException;
if (videoFrameProcessingException != null || testTimedOut) {
throw new IllegalStateException(videoFrameProcessingException);
}
}

View File

@ -102,6 +102,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private int outputHeight;
@Nullable private DefaultShaderProgram defaultShaderProgram;
@Nullable private SurfaceViewWrapper debugSurfaceViewWrapper;
// Whether the input stream has ended, but not all input has been released. This is relevant only
// when renderFramesAutomatically is false. Ensures all frames are rendered before reporting
// onInputStreamProcessed.
// TODO: b/320481157 - Apply isInputStreamEnded to texture output as well.
private boolean isInputStreamEndedWithPendingAvailableFrames;
private InputListener inputListener;
private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation;
@Nullable private SurfaceView debugSurfaceView;
@ -183,7 +188,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void signalEndOfCurrentInputStream() {
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
if (availableFrames.isEmpty()) {
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
isInputStreamEndedWithPendingAvailableFrames = false;
} else {
checkState(!renderFramesAutomatically);
isInputStreamEndedWithPendingAvailableFrames = true;
}
}
// Methods that must be called on the GL thread.
@ -266,6 +277,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Drops all frames that aren't rendered yet.
availableFrames.clear();
isInputStreamEndedWithPendingAvailableFrames = false;
if (defaultShaderProgram != null) {
defaultShaderProgram.flush();
}
@ -308,6 +320,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/* inputTexture= */ oldestAvailableFrame.first,
/* presentationTimeUs= */ oldestAvailableFrame.second,
renderTimeNs);
if (availableFrames.isEmpty() && isInputStreamEndedWithPendingAvailableFrames) {
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
isInputStreamEndedWithPendingAvailableFrames = false;
}
}
/** See {@link DefaultVideoFrameProcessor#setOutputSurfaceInfo} */