TimestampWrapper: fix signaling input capacity

fixes https://github.com/androidx/media/issues/821

PiperOrigin-RevId: 626407880
This commit is contained in:
tofunmi 2024-04-19 10:38:55 -07:00 committed by Copybara-Service
parent 0bc5ac24b0
commit fb037b9847
34 changed files with 182 additions and 34 deletions

View File

@ -27,6 +27,9 @@
`DefaultDrmSessionManagerProvider` `DefaultDrmSessionManagerProvider`
([#1271](https://github.com/androidx/media/issues/1271)). ([#1271](https://github.com/androidx/media/issues/1271)).
* Effect: * Effect:
* Fix bug where `TimestampWrapper` crashes when used with
`ExoPlayer#setVideoEffects`
([#821](https://github.com/androidx/media/issues/821)).
* Muxers: * Muxers:
* IMA extension: * IMA extension:
* Promote API that is required for apps to play * Promote API that is required for apps to play

View File

@ -26,8 +26,6 @@ import androidx.media3.common.util.UnstableApi;
/** /**
* Applies a {@link GlEffect} from {@code startTimeUs} to {@code endTimeUs}, and no change on all * Applies a {@link GlEffect} from {@code startTimeUs} to {@code endTimeUs}, and no change on all
* other timestamps. * other timestamps.
*
* <p>This currently does not work with {@code ExoPlayer#setVideoEffects}.
*/ */
@UnstableApi @UnstableApi
public final class TimestampWrapper implements GlEffect { public final class TimestampWrapper implements GlEffect {

View File

@ -15,24 +15,30 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Applies a {@link TimestampWrapper} to apply a wrapped {@link GlEffect} on certain timestamps. */ /** Applies a {@link TimestampWrapper} to apply a wrapped {@link GlEffect} on certain timestamps. */
@UnstableApi @UnstableApi
/* package */ final class TimestampWrapperShaderProgram implements GlShaderProgram { /* package */ final class TimestampWrapperShaderProgram
implements GlShaderProgram, GlShaderProgram.InputListener {
private final GlShaderProgram copyGlShaderProgram;
private int pendingCopyGlShaderProgramFrames;
private final GlShaderProgram wrappedGlShaderProgram;
private int pendingWrappedGlShaderProgramFrames;
private final long startTimeUs; private final long startTimeUs;
private final long endTimeUs; private final long endTimeUs;
private final WrappedShaderProgramInputListener wrappedShaderProgramInputListener;
private final GlShaderProgram wrappedShaderProgram;
private final GlShaderProgram copyShaderProgram;
private int pendingWrappedGlShaderProgramFrames;
private int pendingCopyGlShaderProgramFrames;
/** /**
* Creates a {@code TimestampWrapperShaderProgram} instance. * Creates a {@code TimestampWrapperShaderProgram} instance.
@ -45,53 +51,54 @@ import java.util.concurrent.Executor;
public TimestampWrapperShaderProgram( public TimestampWrapperShaderProgram(
Context context, boolean useHdr, TimestampWrapper timestampWrapper) Context context, boolean useHdr, TimestampWrapper timestampWrapper)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
copyGlShaderProgram = new FrameCache(/* capacity= */ 1).toGlShaderProgram(context, useHdr);
wrappedGlShaderProgram = timestampWrapper.glEffect.toGlShaderProgram(context, useHdr);
startTimeUs = timestampWrapper.startTimeUs; startTimeUs = timestampWrapper.startTimeUs;
endTimeUs = timestampWrapper.endTimeUs; endTimeUs = timestampWrapper.endTimeUs;
wrappedShaderProgram = timestampWrapper.glEffect.toGlShaderProgram(context, useHdr);
wrappedShaderProgramInputListener = new WrappedShaderProgramInputListener();
wrappedShaderProgram.setInputListener(wrappedShaderProgramInputListener);
copyShaderProgram =
new FrameCache(/* capacity= */ wrappedShaderProgramInputListener.readyFrameCount)
.toGlShaderProgram(context, useHdr);
} }
@Override @Override
public void setInputListener(InputListener inputListener) { public void setInputListener(InputListener inputListener) {
// TODO(b/277726418) Fix over-reported input capacity. wrappedShaderProgramInputListener.setListener(inputListener);
copyGlShaderProgram.setInputListener(inputListener); wrappedShaderProgramInputListener.setToForwardingMode(true);
wrappedGlShaderProgram.setInputListener(inputListener); copyShaderProgram.setInputListener(inputListener);
} }
@Override @Override
public void setOutputListener(OutputListener outputListener) { public void setOutputListener(OutputListener outputListener) {
copyGlShaderProgram.setOutputListener(outputListener); wrappedShaderProgram.setOutputListener(outputListener);
wrappedGlShaderProgram.setOutputListener(outputListener); copyShaderProgram.setOutputListener(outputListener);
} }
@Override @Override
public void setErrorListener(Executor errorListenerExecutor, ErrorListener errorListener) { public void setErrorListener(Executor errorListenerExecutor, ErrorListener errorListener) {
copyGlShaderProgram.setErrorListener(errorListenerExecutor, errorListener); wrappedShaderProgram.setErrorListener(errorListenerExecutor, errorListener);
wrappedGlShaderProgram.setErrorListener(errorListenerExecutor, errorListener); copyShaderProgram.setErrorListener(errorListenerExecutor, errorListener);
} }
@Override @Override
public void queueInputFrame( public void queueInputFrame(
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) { GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
// TODO(b/277726418) Properly report shader program capacity when switching from wrapped shader if (startTimeUs <= presentationTimeUs && presentationTimeUs <= endTimeUs) {
// program to copying shader program.
if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) {
pendingWrappedGlShaderProgramFrames++; pendingWrappedGlShaderProgramFrames++;
wrappedGlShaderProgram.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs); wrappedShaderProgram.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
} else { } else {
pendingCopyGlShaderProgramFrames++; pendingCopyGlShaderProgramFrames++;
copyGlShaderProgram.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs); copyShaderProgram.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
} }
} }
@Override @Override
public void releaseOutputFrame(GlTextureInfo outputTexture) { public void releaseOutputFrame(GlTextureInfo outputTexture) {
if (pendingCopyGlShaderProgramFrames > 0) { if (pendingCopyGlShaderProgramFrames > 0) {
copyGlShaderProgram.releaseOutputFrame(outputTexture); copyShaderProgram.releaseOutputFrame(outputTexture);
pendingCopyGlShaderProgramFrames--; pendingCopyGlShaderProgramFrames--;
} else if (pendingWrappedGlShaderProgramFrames > 0) { } else if (pendingWrappedGlShaderProgramFrames > 0) {
wrappedGlShaderProgram.releaseOutputFrame(outputTexture); wrappedShaderProgram.releaseOutputFrame(outputTexture);
pendingWrappedGlShaderProgramFrames--; pendingWrappedGlShaderProgramFrames--;
} else { } else {
throw new IllegalArgumentException("Output texture not contained in either shader."); throw new IllegalArgumentException("Output texture not contained in either shader.");
@ -100,22 +107,61 @@ import java.util.concurrent.Executor;
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
// TODO(b/277726418) Properly handle EOS reporting. // The copy shader program does not need special EOS handling, so only EOS signal along the
// Only sending EOS signal along the wrapped GL shader program path is semantically incorrect, // wrapped GL shader program.
// but it ensures the wrapped shader program receives the EOS signal. On the other hand, the wrappedShaderProgram.signalEndOfCurrentInputStream();
// copy shader program does not need special EOS handling.
wrappedGlShaderProgram.signalEndOfCurrentInputStream();
} }
@Override @Override
public void flush() { public void flush() {
copyGlShaderProgram.flush(); wrappedShaderProgramInputListener.setToForwardingMode(false);
wrappedGlShaderProgram.flush(); wrappedShaderProgram.flush();
wrappedShaderProgramInputListener.setToForwardingMode(true);
copyShaderProgram.flush();
pendingCopyGlShaderProgramFrames = 0;
pendingWrappedGlShaderProgramFrames = 0;
} }
@Override @Override
public void release() throws VideoFrameProcessingException { public void release() throws VideoFrameProcessingException {
copyGlShaderProgram.release(); copyShaderProgram.release();
wrappedGlShaderProgram.release(); wrappedShaderProgram.release();
}
private static final class WrappedShaderProgramInputListener
implements GlShaderProgram.InputListener {
public int readyFrameCount;
private boolean forwardCalls;
private @MonotonicNonNull InputListener listener;
@Override
public void onReadyToAcceptInputFrame() {
if (listener == null) {
readyFrameCount++;
}
if (forwardCalls) {
checkNotNull(listener).onReadyToAcceptInputFrame();
}
}
@Override
public void onInputFrameProcessed(GlTextureInfo inputTexture) {
checkNotNull(listener).onInputFrameProcessed(inputTexture);
}
@Override
public void onFlush() {
// The listener is flushed from the copy shader program.
}
public void setListener(InputListener listener) {
this.listener = listener;
}
public void setToForwardingMode(boolean forwardingMode) {
checkState(!forwardingMode || listener != null);
this.forwardCalls = forwardingMode;
}
} }
} }

View File

@ -50,8 +50,10 @@ import androidx.media3.common.Player;
import androidx.media3.common.util.ConditionVariable; import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.Brightness;
import androidx.media3.effect.OverlayEffect; import androidx.media3.effect.OverlayEffect;
import androidx.media3.effect.TextOverlay; import androidx.media3.effect.TextOverlay;
import androidx.media3.effect.TimestampWrapper;
import androidx.media3.exoplayer.ExoPlayer; import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.Renderer; import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.util.EventLogger; import androidx.media3.exoplayer.util.EventLogger;
@ -284,6 +286,105 @@ public class EffectPlaybackTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
} }
@Test
public void exoplayerEffectsPreview_withTimestampWrapper_ensuresAllFramesRendered()
throws Exception {
// Internal reference: b/264252759.
assumeTrue(
"This test should run on real devices because OpenGL to ImageReader rendering is not"
+ " always reliable on emulators.",
!Util.isRunningOnEmulator());
ArrayList<BitmapPixelTestUtil.ImageBuffer> readImageBuffers = new ArrayList<>();
AtomicInteger renderedFramesCount = new AtomicInteger();
ConditionVariable playerEnded = new ConditionVariable();
ConditionVariable readAllOutputFrames = new ConditionVariable();
// Setting maxImages=5 ensures image reader gets all rendered frames from VideoFrameProcessor.
// Using maxImages=5 runs successfully on a Pixel3.
outputImageReader =
ImageReader.newInstance(
MP4_ASSET_VIDEO_SIZE.getWidth(),
MP4_ASSET_VIDEO_SIZE.getHeight(),
PixelFormat.RGBA_8888,
/* maxImages= */ 5);
instrumentation.runOnMainSync(
() -> {
player = new ExoPlayer.Builder(ApplicationProvider.getApplicationContext()).build();
checkStateNotNull(outputImageReader);
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireNextImage()) {
readImageBuffers.add(
BitmapPixelTestUtil.copyByteBufferFromRbga8888Image(image));
}
if (renderedFramesCount.incrementAndGet() == MP4_ASSET_FRAMES) {
readAllOutputFrames.open();
}
},
Util.createHandlerForCurrentOrMainLooper());
setOutputSurfaceAndSizeOnPlayer(
player, outputImageReader.getSurface(), MP4_ASSET_VIDEO_SIZE);
player.setPlayWhenReady(true);
long exoPresentationTimeOffsetUs = 1000000000000L;
player.setVideoEffects(
ImmutableList.of(
new TimestampWrapper(
new Brightness(0.5f),
/* startTimeUs= */ exoPresentationTimeOffsetUs + 166833,
/* endTimeUs= */ exoPresentationTimeOffsetUs + 510000)));
// Adding an EventLogger to use its log output in case the test fails.
player.addAnalyticsListener(new EventLogger());
player.addListener(
new Player.Listener() {
@Override
public void onPlaybackStateChanged(@Player.State int playbackState) {
if (playbackState == STATE_ENDED) {
playerEnded.open();
}
}
});
player.setMediaItem(MediaItem.fromUri(MP4_ASSET_URI_STRING));
player.prepare();
});
if (!playerEnded.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException(
Util.formatInvariant("Playback not ended in %d ms.", TEST_TIMEOUT_MS));
}
if (!readAllOutputFrames.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException(
Util.formatInvariant(
"Haven't received all frames in %d ms after playback ends.", TEST_TIMEOUT_MS));
}
ArrayList<Float> averagePixelDifferences =
new ArrayList<>(/* initialCapacity= */ readImageBuffers.size());
for (int i = 0; i < readImageBuffers.size(); i++) {
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888ImageBuffer(readImageBuffers.get(i));
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
/* expected= */ readBitmap(
Util.formatInvariant("%s/%s/frame_%d.png", TEST_DIRECTORY, testId, i)),
/* actual= */ actualBitmap,
/* testId= */ Util.formatInvariant("%s_frame_%d", testId, i));
averagePixelDifferences.add(averagePixelAbsoluteDifference);
}
for (int i = 0; i < averagePixelDifferences.size(); i++) {
float averagePixelDifference = averagePixelDifferences.get(i);
assertWithMessage(
Util.formatInvariant(
"Frame %d with average pixel difference %f. ", i, averagePixelDifference))
.that(averagePixelDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
}
} }
@Nullable @Nullable