Compare commits

...

6 Commits

Author SHA1 Message Date
kimvde
f8b1dcc33b Move VideoSink config to dedicated method
This is  to improve readability of MediaCodecVideoRenderer.onEnabled

PiperOrigin-RevId: 742246092
2025-03-31 06:40:12 -07:00
sheenachhabra
25c1760b17 Add VIDEO_APV mimetype to FrameworkMuxer supported types
Came up in github issue https://github.com/androidx/media/issues/2275.

PiperOrigin-RevId: 742233737
2025-03-31 05:44:06 -07:00
claincly
9254efd8da Add pixel test for replaying
I could've add another test that seeks into the media before replying, but I
don't think fundamentally it's different from the one added.

I wish I could add one that replays while playing, but it'd be hard to match
the frames perfectly.

I'll add more timestamp based tests

PiperOrigin-RevId: 742229436
2025-03-31 05:22:31 -07:00
claincly
ff6537d69b Fix a bug that video effects are added twice
The tests passed because of an issue in chaining. The chaining listener allows
self-looping, i.e. the producer and the consumer of a frame could be the same
instance. Like an effect chain of `a -> a -> b` This didn't fail any test
before, because the chaining is rectified when connecting a to b, but it should
have failed when connecting a to a.

PiperOrigin-RevId: 742215700
2025-03-31 04:22:18 -07:00
dancho
73fa820828 Skip decoder input buffers for encrypted content
Previous assertion was incorrect. Per-frame initialization
vectors are written to the output stream in
`FragmentedMp4Extractor`

PiperOrigin-RevId: 742203717
2025-03-31 03:34:09 -07:00
kimvde
427daef350 Remove VideoSink.setWakeUpListener
This is the last Renderer reference in VideoSink

PiperOrigin-RevId: 742189332
2025-03-31 02:26:56 -07:00
14 changed files with 395 additions and 136 deletions

View File

@ -15,6 +15,8 @@
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.effect.GlShaderProgram.InputListener;
@ -51,6 +53,9 @@ import androidx.media3.effect.GlShaderProgram.OutputListener;
GlShaderProgram producingGlShaderProgram,
GlShaderProgram consumingGlShaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
checkArgument(
producingGlShaderProgram != consumingGlShaderProgram,
"Creating a self loop in the chain: " + producingGlShaderProgram);
this.producingGlShaderProgram = producingGlShaderProgram;
frameConsumptionManager =
new FrameConsumptionManager(

View File

@ -17,13 +17,19 @@ package androidx.media3.exoplayer.drm;
import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeTrue;
import android.content.Context;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.DecoderCounters;
import androidx.media3.exoplayer.DefaultRenderersFactory;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.util.concurrent.atomic.AtomicReference;
import okhttp3.mockwebserver.MockResponse;
@ -97,4 +103,73 @@ public final class DrmPlaybackTest {
getInstrumentation().waitForIdleSync();
assertThat(playbackException.get()).isNull();
}
@Test
public void clearkeyPlayback_withLateThresholdToDropDecoderInput_dropsInputBuffers()
throws Exception {
// The API 21 emulator doesn't have a secure decoder. Due to b/18678462 MediaCodecUtil pretends
// that there is a secure decoder so we must only run this test on API 21 - i.e. we cannot
// assumeTrue() on getDecoderInfos.
assumeTrue(Util.SDK_INT > 21);
Context context = getInstrumentation().getContext();
MockWebServer mockWebServer = new MockWebServer();
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody(CLEARKEY_RESPONSE));
mockWebServer.start();
MediaItem mediaItem =
new MediaItem.Builder()
.setUri("asset:///media/drm/sample_fragmented_clearkey.mp4")
.setDrmConfiguration(
new MediaItem.DrmConfiguration.Builder(C.CLEARKEY_UUID)
.setLicenseUri(mockWebServer.url("license").toString())
.build())
.build();
AtomicReference<ExoPlayer> player = new AtomicReference<>();
ConditionVariable playbackComplete = new ConditionVariable();
AtomicReference<PlaybackException> playbackException = new AtomicReference<>();
AtomicReference<DecoderCounters> decoderCountersAtomicReference = new AtomicReference<>();
getInstrumentation()
.runOnMainSync(
() -> {
player.set(
new ExoPlayer.Builder(
context,
new DefaultRenderersFactory(context)
.experimentalSetLateThresholdToDropDecoderInputUs(-100_000_000L),
new DefaultMediaSourceFactory(context)
.experimentalSetCodecsToParseWithinGopSampleDependencies(
C.VIDEO_CODEC_FLAG_H264))
.build());
player
.get()
.addListener(
new Player.Listener() {
@Override
public void onPlaybackStateChanged(@Player.State int playbackState) {
if (playbackState == Player.STATE_ENDED) {
decoderCountersAtomicReference.set(
player.get().getVideoDecoderCounters());
playbackComplete.open();
}
}
@Override
public void onPlayerError(PlaybackException error) {
playbackException.set(error);
playbackComplete.open();
}
});
player.get().setMediaItem(mediaItem);
player.get().prepare();
player.get().play();
});
playbackComplete.block();
getInstrumentation().runOnMainSync(() -> player.get().release());
getInstrumentation().waitForIdleSync();
assertThat(playbackException.get()).isNull();
// Which input buffers are dropped first depends on the number of MediaCodec buffer slots.
// This means the asserts cannot be isEqualTo.
assertThat(decoderCountersAtomicReference.get().droppedInputBufferCount).isAtLeast(1);
}
}

View File

@ -839,7 +839,6 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
throws ExoPlaybackException {
if (messageType == MSG_SET_WAKEUP_LISTENER) {
wakeupListener = checkNotNull((WakeupListener) message);
onWakeupListenerSet(wakeupListener);
} else {
super.handleMessage(messageType, message);
}
@ -1546,17 +1545,6 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
// Do nothing.
}
/**
* Called when a {@link WakeupListener} is set.
*
* <p>The default implementation is a no-op.
*
* @param wakeupListener The {@link WakeupListener}.
*/
protected void onWakeupListenerSet(WakeupListener wakeupListener) {
// Do nothing.
}
/**
* Called when a new {@link Format} is read from the upstream {@link MediaPeriod}.
*

View File

@ -30,7 +30,6 @@ import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.Renderer;
import java.util.ArrayDeque;
import java.util.List;
import java.util.Queue;
@ -46,7 +45,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* <ul>
* <li>Applying video effects
* <li>Inputting bitmaps
* <li>Setting a WakeupListener
* </ul>
*
* <p>The {@linkplain #getInputSurface() input} and {@linkplain #setOutputSurfaceInfo(Surface, Size)
@ -224,6 +222,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoFrameHandlers.add(videoFrameHandler);
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
videoFrameRenderControl.onFrameAvailableForRendering(bufferPresentationTimeUs);
listenerExecutor.execute(() -> listener.onFrameAvailableForRendering());
return true;
}
@ -246,16 +245,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
/**
* {@inheritDoc}
*
* <p>This method will always throw an {@link UnsupportedOperationException}.
*/
@Override
public void setWakeupListener(Renderer.WakeupListener wakeupListener) {
throw new UnsupportedOperationException();
}
@Override
public void join(boolean renderNextFrameImmediately) {
videoFrameReleaseControl.join(renderNextFrameImmediately);

View File

@ -881,61 +881,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
// Configure the VideoSink every time the renderer is enabled, in case the parameters have
// been overridden by another renderer. Also configure the VideoSink with the parameters that
// have been set on the renderer before creating the VideoSink.
videoSink.setListener(
new VideoSink.Listener() {
@Override
public void onFirstFrameRendered() {
if (displaySurface != null) {
notifyRenderedFirstFrame();
}
}
@Override
public void onFrameDropped() {
if (displaySurface != null) {
updateDroppedBufferCounters(
/* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1);
}
}
@Override
public void onVideoSizeChanged(VideoSize videoSize) {
// TODO: b/292111083 - Report video size change to app. Video size reporting is
// removed at the moment to ensure the first frame is rendered, and the video is
// rendered after switching on/off the screen.
}
@Override
public void onError(VideoSink.VideoSinkException videoSinkException) {
setPendingPlaybackException(
createRendererException(
videoSinkException,
videoSinkException.format,
PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
},
// Pass a direct executor since the callback handling involves posting on the app looper
// again, so there's no need to do two hops.
directExecutor());
if (frameMetadataListener != null) {
videoSink.setVideoFrameMetadataListener(frameMetadataListener);
}
if (displaySurface != null && !outputResolution.equals(Size.UNKNOWN)) {
videoSink.setOutputSurfaceInfo(displaySurface, outputResolution);
}
videoSink.setChangeFrameRateStrategy(changeFrameRateStrategy);
videoSink.setPlaybackSpeed(getPlaybackSpeed());
if (videoEffects != null) {
videoSink.setVideoEffects(videoEffects);
}
configureVideoSink();
nextVideoSinkFirstFrameReleaseInstruction =
mayRenderStartOfStream
? RELEASE_FIRST_FRAME_IMMEDIATELY
: RELEASE_FIRST_FRAME_WHEN_STARTED;
@Nullable WakeupListener wakeupListener = getWakeupListener();
if (wakeupListener != null) {
videoSink.setWakeupListener(wakeupListener);
}
experimentalEnableProcessedStreamChangedAtStart();
} else {
videoFrameReleaseControl.setClock(getClock());
@ -947,6 +897,66 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
}
}
@RequiresNonNull("videoSink")
private void configureVideoSink() {
videoSink.setListener(
new VideoSink.Listener() {
@Override
public void onFrameAvailableForRendering() {
@Nullable WakeupListener wakeupListener = getWakeupListener();
if (wakeupListener != null) {
wakeupListener.onWakeup();
}
}
@Override
public void onFirstFrameRendered() {
if (displaySurface != null) {
notifyRenderedFirstFrame();
}
}
@Override
public void onFrameDropped() {
if (displaySurface != null) {
updateDroppedBufferCounters(
/* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1);
}
}
@Override
public void onVideoSizeChanged(VideoSize videoSize) {
// TODO: b/292111083 - Report video size change to app. Video size reporting is
// removed at the moment to ensure the first frame is rendered, and the video is
// rendered after switching on/off the screen.
}
@Override
public void onError(VideoSink.VideoSinkException videoSinkException) {
setPendingPlaybackException(
createRendererException(
videoSinkException,
videoSinkException.format,
PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
},
// Pass a direct executor since the callback handling involves posting on the app looper
// again, so there's no need to do two hops.
directExecutor());
if (frameMetadataListener != null) {
videoSink.setVideoFrameMetadataListener(frameMetadataListener);
}
if (displaySurface != null && !outputResolution.equals(Size.UNKNOWN)) {
videoSink.setOutputSurfaceInfo(displaySurface, outputResolution);
}
videoSink.setChangeFrameRateStrategy(changeFrameRateStrategy);
videoSink.setPlaybackSpeed(getPlaybackSpeed());
if (videoEffects != null) {
videoSink.setVideoEffects(videoEffects);
}
}
/** Creates a {@link PlaybackVideoGraphWrapper} instance. */
protected PlaybackVideoGraphWrapper createPlaybackVideoGraphWrapper(
Context context, VideoFrameReleaseControl videoFrameReleaseControl) {
@ -1464,13 +1474,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
eventDispatcher.videoCodecError(codecError);
}
@Override
protected void onWakeupListenerSet(WakeupListener wakeupListener) {
if (videoSink != null) {
videoSink.setWakeupListener(wakeupListener);
}
}
@Override
@Nullable
protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder)
@ -1529,11 +1532,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
// Make sure to decode and render the last frame.
return false;
}
if (buffer.isEncrypted()) {
// Commonly used decryption algorithms require updating the initialization vector for each
// block processed. Skipping input buffers before the decoder is not allowed.
return false;
}
boolean shouldSkipDecoderInputBuffer = isBufferBeforeStartTime(buffer);
if (!shouldSkipDecoderInputBuffer && !shouldDropDecoderInputBuffers) {
return false;

View File

@ -55,7 +55,6 @@ import androidx.media3.common.util.TimedValueQueue;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.Renderer;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
@ -80,11 +79,15 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
/** Listener for {@link PlaybackVideoGraphWrapper} events. */
public interface Listener {
/** Called when the video frame processor renders the first frame. */
void onFirstFrameRendered();
/** Called when the video frame processor dropped a frame. */
void onFrameDropped();
/** Called when an output frame is available for rendering. */
default void onFrameAvailableForRendering() {}
/** Called when the first output frame is rendered. */
default void onFirstFrameRendered() {}
/** Called when an output frame is dropped. */
default void onFrameDropped() {}
/**
* Called before a frame is rendered for the first time since setting the surface, and each time
@ -92,14 +95,14 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
*
* @param videoSize The video size.
*/
void onVideoSizeChanged(VideoSize videoSize);
default void onVideoSizeChanged(VideoSize videoSize) {}
/**
* Called when the video frame processor encountered an error.
* Called when an error occurs.
*
* @param videoFrameProcessingException The error.
*/
void onError(VideoFrameProcessingException videoFrameProcessingException);
default void onError(VideoFrameProcessingException videoFrameProcessingException) {}
}
/** A builder for {@link PlaybackVideoGraphWrapper} instances. */
@ -288,7 +291,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Nullable private Pair<Surface, Size> currentSurfaceAndSize;
private int pendingFlushCount;
private @State int state;
@Nullable private Renderer.WakeupListener wakeupListener;
/**
* The buffer presentation time of the frame most recently output by the video graph, in
@ -430,9 +432,9 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// Ignore available frames while flushing
return;
}
if (wakeupListener != null) {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
// Wake up the player when not playing to render the frame more promptly.
wakeupListener.onWakeup();
listener.onFrameAvailableForRendering();
}
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
@ -780,7 +782,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@FirstFrameReleaseInstruction int firstFrameReleaseInstruction,
List<Effect> videoEffects) {
checkState(isInitialized());
setPendingVideoEffects(videoEffects);
this.videoEffects = ImmutableList.copyOf(videoEffects);
this.inputType = inputType;
this.inputFormat = format;
finalBufferPresentationTimeUs = C.TIME_UNSET;
@ -861,7 +863,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
if (this.videoEffects.equals(videoEffects)) {
return;
}
setPendingVideoEffects(videoEffects);
this.videoEffects = ImmutableList.copyOf(videoEffects);
if (inputFormat != null) {
registerInputStream(inputFormat);
}
@ -949,11 +951,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
PlaybackVideoGraphWrapper.this.render(positionUs, elapsedRealtimeUs);
}
@Override
public void setWakeupListener(Renderer.WakeupListener wakeupListener) {
PlaybackVideoGraphWrapper.this.wakeupListener = wakeupListener;
}
@Override
public void join(boolean renderNextFrameImmediately) {
defaultVideoSink.join(renderNextFrameImmediately);
@ -966,6 +963,12 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// PlaybackVideoGraphWrapper.Listener implementation
@Override
public void onFrameAvailableForRendering() {
VideoSink.Listener currentListener = listener;
listenerExecutor.execute(currentListener::onFrameAvailableForRendering);
}
@Override
public void onFirstFrameRendered() {
VideoSink.Listener currentListener = listener;
@ -996,23 +999,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// Private methods
/**
* Sets the pending video effects.
*
* <p>Effects are pending until a new input stream is registered.
*/
private void setPendingVideoEffects(List<Effect> newVideoEffects) {
if (videoGraphFactory.supportsMultipleInputs()) {
this.videoEffects = ImmutableList.copyOf(newVideoEffects);
} else {
this.videoEffects =
new ImmutableList.Builder<Effect>()
.addAll(newVideoEffects)
.addAll(compositionEffects)
.build();
}
}
private void registerInputStream(Format inputFormat) {
Format adjustedInputFormat =
inputFormat

View File

@ -28,7 +28,6 @@ import androidx.media3.common.VideoSize;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.Renderer;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@ -56,6 +55,10 @@ public interface VideoSink {
/** Listener for {@link VideoSink} events. */
interface Listener {
/** Called when an output frame is available for rendering. */
default void onFrameAvailableForRendering() {}
/** Called when the sink renders the first frame on the output surface. */
default void onFirstFrameRendered() {}
@ -305,9 +308,6 @@ public interface VideoSink {
*/
void render(long positionUs, long elapsedRealtimeUs) throws VideoSinkException;
/** Sets a {@link Renderer.WakeupListener} on the {@code VideoSink}. */
void setWakeupListener(Renderer.WakeupListener wakeupListener);
/**
* Joins the video sink to a new stream.
*

View File

@ -19,6 +19,7 @@ import static android.media.MediaCodecInfo.CodecProfileLevel.AACObjectHE;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.MediaFormatUtil.createFormatFromMediaFormat;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.common.util.Util.isRunningOnEmulator;
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
@ -2557,6 +2558,24 @@ public class TransformerEndToEndTest {
() -> new Composition.Builder(firstSequence, secondSequence).build());
}
@Test
public void transmux_apvFile_transmuxesSuccessfully() throws Exception {
// MediaMuxer supports APV from API 36.
assumeTrue(SDK_INT >= 36);
String apvFile = "asset:///media/mp4/sample_with_apvc.mp4";
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(apvFile))).build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.build()
.run(testId, editedMediaItem);
Format format = retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO);
assertThat(format.sampleMimeType).isEqualTo(MimeTypes.VIDEO_APV);
assertThat(result.exportResult.videoConversionProcess).isEqualTo(CONVERSION_PROCESS_TRANSMUXED);
}
private static boolean shouldSkipDeviceForAacObjectHeProfileEncoding() {
return Util.SDK_INT < 29;
}

View File

@ -43,10 +43,12 @@ import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.Player;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util;
import androidx.media3.effect.Brightness;
import androidx.media3.effect.RgbMatrix;
import androidx.media3.effect.TimestampWrapper;
import androidx.media3.exoplayer.DefaultRenderersFactory;
import androidx.media3.exoplayer.ExoPlayer;
@ -54,6 +56,8 @@ import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.util.EventLogger;
import androidx.media3.exoplayer.video.MediaCodecVideoRenderer;
import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
import androidx.media3.exoplayer.video.VideoRendererEventListener;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.transformer.SurfaceTestActivity;
@ -271,6 +275,147 @@ public class EffectPlaybackPixelTest {
}
}
@Test
public void exoplayerEffectRedraw_changeEffectOnFirstFrame_ensuresCorrectFramesAreRedrawn()
throws Exception {
// Internal reference: b/264252759.
assumeTrue(
"This test should run on real devices because OpenGL to ImageReader rendering is"
+ "not always reliable on emulators.",
!Util.isRunningOnEmulator());
ArrayList<BitmapPixelTestUtil.ImageBuffer> readImageBuffers = new ArrayList<>();
AtomicInteger renderedFramesCount = new AtomicInteger();
AtomicInteger firstFrameRenderedCount = new AtomicInteger();
ConditionVariable playerEnded = new ConditionVariable();
ConditionVariable readAllOutputFrames = new ConditionVariable();
Handler mainHandler = new Handler(instrumentation.getTargetContext().getMainLooper());
instrumentation.runOnMainSync(
() -> {
Context context = ApplicationProvider.getApplicationContext();
Renderer videoRenderer = new ReplayVideoRenderer(context, MediaCodecSelector.DEFAULT);
player =
new ExoPlayer.Builder(context)
.setRenderersFactory(
new DefaultRenderersFactory(context) {
@Override
protected void buildVideoRenderers(
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
boolean enableDecoderFallback,
Handler eventHandler,
VideoRendererEventListener eventListener,
long allowedVideoJoiningTimeMs,
ArrayList<Renderer> builtVideoRenderers) {
builtVideoRenderers.add(videoRenderer);
}
})
.build();
checkStateNotNull(outputImageReader);
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireNextImage()) {
if (renderedFramesCount.getAndIncrement() < 2) {
// Record only the first and replayed frames.
readImageBuffers.add(
BitmapPixelTestUtil.copyByteBufferFromRbga8888Image(image));
} else {
readAllOutputFrames.open();
}
}
},
Util.createHandlerForCurrentOrMainLooper());
setOutputSurfaceAndSizeOnPlayer(
player,
videoRenderer,
outputImageReader.getSurface(),
new Size(MP4_ASSET.videoFormat.width, MP4_ASSET.videoFormat.height));
player.setPlayWhenReady(false);
AdjustableContrast contrast = new AdjustableContrast();
player.setVideoEffects(ImmutableList.of(createTimestampOverlay(), contrast));
// Adding an EventLogger to use its log output in case the test fails.
player.addAnalyticsListener(new EventLogger());
player.addListener(
new Player.Listener() {
@Override
public void onPlaybackStateChanged(@Player.State int playbackState) {
if (playbackState == STATE_ENDED) {
playerEnded.open();
}
}
});
player.setVideoFrameMetadataListener(
(bufferPresentationTimeUs, releaseTimeNs, format, mediaFormat) -> {
// The buffer presentation time is offset with rendererOffset.
if (bufferPresentationTimeUs != 1_000_000_000_000L) {
return;
}
if (firstFrameRenderedCount.get() == 0) {
// Render the current frame, and redraw a frame with some delay. This is to ensure
// that the first frame is rendered with the original effect, and the second
// frame is rendered with the new effect. Following this call, the first frame
// will be rendered twicw.
mainHandler.postDelayed(
() -> {
contrast.changeContrast(-0.8f);
player.setVideoEffects(VideoFrameProcessor.REDRAW);
},
/* delayMillis= */ 500);
} else if (firstFrameRenderedCount.get() == 1) {
// Redraw another frame. This renders the first frame for the third time.
instrumentation.runOnMainSync(
() -> player.setVideoEffects(VideoFrameProcessor.REDRAW));
} else {
instrumentation.runOnMainSync(player::play);
}
firstFrameRenderedCount.getAndIncrement();
});
player.setMediaItem(MediaItem.fromUri(MP4_ASSET.uri));
player.prepare();
});
if (!playerEnded.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException(
Util.formatInvariant("Playback not ended in %d ms.", TEST_TIMEOUT_MS));
}
if (!readAllOutputFrames.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException(
Util.formatInvariant(
"Haven't received all frames in %d ms after playback ends.", TEST_TIMEOUT_MS));
}
ArrayList<Float> averagePixelDifferences =
new ArrayList<>(/* initialCapacity= */ readImageBuffers.size());
for (int i = 0; i < readImageBuffers.size(); i++) {
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888ImageBuffer(readImageBuffers.get(i));
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
/* expected= */ readBitmap(
Util.formatInvariant("%s/%s/frame_%d.png", TEST_DIRECTORY, testId, i)),
/* actual= */ actualBitmap,
/* testId= */ Util.formatInvariant("%s_frame_%d", testId, i));
averagePixelDifferences.add(averagePixelAbsoluteDifference);
}
for (int i = 0; i < averagePixelDifferences.size(); i++) {
float averagePixelDifference = averagePixelDifferences.get(i);
assertWithMessage(
Util.formatInvariant(
"Frame %d with average pixel difference %f. ", i, averagePixelDifference))
.that(averagePixelDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
// Played once, replayed twice.
assertThat(firstFrameRenderedCount.get()).isEqualTo(3);
}
@Test
public void exoplayerEffectsPreview_withTimestampWrapper_ensuresAllFramesRendered()
throws Exception {
@ -428,6 +573,22 @@ public class EffectPlaybackPixelTest {
}
}
private static class ReplayVideoRenderer extends MediaCodecVideoRenderer {
public ReplayVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
super(new Builder(context).setMediaCodecSelector(mediaCodecSelector));
}
@Override
protected PlaybackVideoGraphWrapper createPlaybackVideoGraphWrapper(
Context context, VideoFrameReleaseControl videoFrameReleaseControl) {
return new PlaybackVideoGraphWrapper.Builder(context, videoFrameReleaseControl)
.setClock(getClock())
.setEnableReplayableCache(true)
.build();
}
}
private static class NoFrameDroppedVideoRenderer extends MediaCodecVideoRenderer {
public NoFrameDroppedVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
@ -446,4 +607,35 @@ public class EffectPlaybackPixelTest {
return false;
}
}
private static final class AdjustableContrast implements RgbMatrix {
private float contrast;
public void changeContrast(float contrast) {
this.contrast = contrast;
}
@Override
public float[] getMatrix(long presentationTimeUs, boolean useHdr) {
float contrastFactor = (1 + contrast) / (1.0001f - contrast);
return new float[] {
contrastFactor,
0.0f,
0.0f,
0.0f,
0.0f,
contrastFactor,
0.0f,
0.0f,
0.0f,
0.0f,
contrastFactor,
0.0f,
(1.0f - contrastFactor) * 0.5f,
(1.0f - contrastFactor) * 0.5f,
(1.0f - contrastFactor) * 0.5f,
1.0f
};
}
}
}

View File

@ -23,7 +23,6 @@ import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.video.PlaceholderSurface;
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
import androidx.media3.exoplayer.video.VideoSink;
@ -263,11 +262,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
@Override
public void setWakeupListener(Renderer.WakeupListener wakeupListener) {
executeOrDelay(videoSink -> videoSink.setWakeupListener(wakeupListener));
}
@Override
public void join(boolean renderNextFrameImmediately) {
executeOrDelay(videoSink -> videoSink.join(renderNextFrameImmediately));

View File

@ -321,6 +321,9 @@ import java.util.Locale;
if (SDK_INT >= 34) {
supportedMimeTypes.add(MimeTypes.VIDEO_AV1);
}
if (SDK_INT >= 36) {
supportedMimeTypes.add(MimeTypes.VIDEO_APV);
}
return supportedMimeTypes.build();
}

View File

@ -27,6 +27,7 @@ import static androidx.media3.exoplayer.DefaultRenderersFactory.MAX_DROPPED_VIDE
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_IMMEDIATELY;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_WHEN_STARTED;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import android.content.Context;
import android.graphics.Bitmap;
@ -500,6 +501,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private boolean mayRenderStartOfStream;
private @VideoSink.FirstFrameReleaseInstruction int nextFirstFrameReleaseInstruction;
private long offsetToCompositionTimeUs;
private @MonotonicNonNull WakeupListener wakeupListener;
public SequenceImageRenderer(
EditedMediaItemSequence sequence,
@ -523,9 +525,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
mayRenderStartOfStream
? RELEASE_FIRST_FRAME_IMMEDIATELY
: RELEASE_FIRST_FRAME_WHEN_STARTED;
// TODO: b/328444280 - Do not set a listener on VideoSink, but MediaCodecVideoRenderer must
// unregister itself as a listener too.
videoSink.setListener(VideoSink.Listener.NO_OP, /* executor= */ (runnable) -> {});
// TODO: b/328444280 - Unregister as a listener when the renderer is not used anymore
videoSink.setListener(
new VideoSink.Listener() {
@Override
public void onFrameAvailableForRendering() {
if (wakeupListener != null) {
wakeupListener.onWakeup();
}
}
},
directExecutor());
}
@Override
@ -658,7 +668,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void handleMessage(@MessageType int messageType, @Nullable Object message)
throws ExoPlaybackException {
if (messageType == MSG_SET_WAKEUP_LISTENER) {
videoSink.setWakeupListener((WakeupListener) checkNotNull(message));
this.wakeupListener = (WakeupListener) checkNotNull(message);
} else {
super.handleMessage(messageType, message);
}