Add VideoFrameMetadataListener calls to FakeVideoRenderer

This change also tightens `FakeVideoRenderer` to only 'handle' buffers
that are close to the current playback position.

This condition controls whether the renderer fires `onVideoSizeChanged`
and `onRenderedFirstFrame`, both of which should only be fired once the
frame has been 'released' to the screen, which in a real renderer
happens much closer to 'current position' than the existing 250ms of
`FakeRenderer.SOURCE_READAHEAD_US`.

A later change uses `VideoFrameMetadataListener` to test ExoPlayer
behaviour in scrubbing mode.

PiperOrigin-RevId: 740763283
This commit is contained in:
ibaker 2025-03-26 07:42:49 -07:00 committed by Copybara-Service
parent 95fbecd076
commit 96bb777484
4 changed files with 154 additions and 22 deletions

View File

@ -41,6 +41,7 @@ dependencies {
implementation 'com.squareup.okhttp3:mockwebserver:' + okhttpVersion
api project(modulePrefix + 'lib-exoplayer')
api project(modulePrefix + 'lib-transformer')
testImplementation project(modulePrefix + 'test-utils-robolectric')
testImplementation 'androidx.test.espresso:espresso-core:' + androidxTestEspressoVersion
testImplementation 'org.robolectric:robolectric:' + robolectricVersion
}

View File

@ -47,7 +47,9 @@ import androidx.media3.exoplayer.upstream.Allocator;
import androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.math.DoubleMath;
import java.io.IOException;
import java.math.RoundingMode;
import java.util.Collections;
import java.util.List;
import java.util.Set;
@ -80,6 +82,33 @@ public class FakeMediaPeriod implements MediaPeriod {
ImmutableList.of(
oneByteSample(sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME), END_OF_STREAM_ITEM);
}
/**
* Creates a {@code TrackDataFactory} which generates samples at the given rate to cover the
* provided duration, with a specified key frame interval.
*
* @param initialSampleTimeUs The time of the initial sample, in microseconds.
* @param sampleRate The number of samples per second.
* @param durationUs The duration of samples to generate, in microseconds.
* @param keyFrameInterval The number of samples between each keyframe (inclusive).
* @return The {@code TrackDataFactory}.
*/
static TrackDataFactory samplesWithRateDurationAndKeyframeInterval(
long initialSampleTimeUs, float sampleRate, long durationUs, int keyFrameInterval) {
return (unusedFormat, unusedMediaPeriodId) -> {
ImmutableList.Builder<FakeSampleStreamItem> samples = ImmutableList.builder();
for (int frameIndex = 0; frameIndex < durationUs / 33_333; frameIndex++) {
long frameTimeUs =
initialSampleTimeUs
+ DoubleMath.roundToLong(
(frameIndex * C.MICROS_PER_SECOND) / sampleRate, RoundingMode.DOWN);
samples.add(
FakeSampleStreamItem.oneByteSample(
frameTimeUs, frameIndex % keyFrameInterval == 0 ? C.BUFFER_FLAG_KEY_FRAME : 0));
}
return samples.add(END_OF_STREAM_ITEM).build();
};
}
}
private final TrackGroupArray trackGroupArray;

View File

@ -16,18 +16,20 @@
package androidx.media3.test.utils;
import static androidx.media3.common.util.Assertions.checkNotNull;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.VideoSize;
import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.DecoderCounters;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.source.MediaSource;
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
import androidx.media3.exoplayer.video.VideoRendererEventListener;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
@ -43,6 +45,7 @@ public class FakeVideoRenderer extends FakeRenderer {
private final AtomicReference<VideoSize> videoSizeRef = new AtomicReference<>();
private @MonotonicNonNull Format format;
@Nullable private Object output;
@Nullable private VideoFrameMetadataListener videoFrameMetadataListener;
private boolean renderedFirstFrameAfterReset;
private boolean mayRenderFirstFrameAfterEnableIfNotStarted;
private boolean renderedFirstFrameAfterEnable;
@ -123,7 +126,9 @@ public class FakeVideoRenderer extends FakeRenderer {
output = message;
renderedFirstFrameAfterReset = false;
break;
case Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER:
this.videoFrameMetadataListener = (VideoFrameMetadataListener) message;
break;
case Renderer.MSG_SET_AUDIO_ATTRIBUTES:
case Renderer.MSG_SET_AUDIO_SESSION_ID:
case Renderer.MSG_SET_AUX_EFFECT_INFO:
@ -131,7 +136,6 @@ public class FakeVideoRenderer extends FakeRenderer {
case Renderer.MSG_SET_CHANGE_FRAME_RATE_STRATEGY:
case Renderer.MSG_SET_SCALING_MODE:
case Renderer.MSG_SET_SKIP_SILENCE_ENABLED:
case Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER:
case Renderer.MSG_SET_VOLUME:
case Renderer.MSG_SET_WAKEUP_LISTENER:
default:
@ -141,32 +145,44 @@ public class FakeVideoRenderer extends FakeRenderer {
@Override
protected boolean shouldProcessBuffer(long bufferTimeUs, long playbackPositionUs) {
boolean shouldProcess = super.shouldProcessBuffer(bufferTimeUs, playbackPositionUs);
boolean shouldRenderFirstFrame =
output != null
&& (!renderedFirstFrameAfterEnable
? (getState() == Renderer.STATE_STARTED
|| mayRenderFirstFrameAfterEnableIfNotStarted)
: !renderedFirstFrameAfterReset);
shouldProcess |= shouldRenderFirstFrame && playbackPositionUs >= getStreamOffsetUs();
// Process a buffer if it's due within one 60Hz vsync, or it's the first frame.
boolean shouldProcess =
bufferTimeUs < playbackPositionUs + 16_666
|| (shouldRenderFirstFrame && playbackPositionUs >= getStreamOffsetUs());
@Nullable Object output = this.output;
if (shouldProcess && !renderedFirstFrameAfterReset && output != null) {
@MonotonicNonNull Format format = Assertions.checkNotNull(this.format);
handler.post(
() -> {
VideoSize videoSize =
new VideoSize(format.width, format.height, format.pixelWidthHeightRatio);
if (!Objects.equals(videoSize, videoSizeRef.get())) {
eventListener.onVideoSizeChanged(videoSize);
videoSizeRef.set(videoSize);
}
});
handler.post(
() ->
eventListener.onRenderedFirstFrame(
output, /* renderTimeMs= */ SystemClock.elapsedRealtime()));
renderedFirstFrameAfterReset = true;
renderedFirstFrameAfterEnable = true;
if (shouldProcess && bufferTimeUs >= getLastResetPositionUs()) {
checkNotNull(format);
if (videoFrameMetadataListener != null) {
videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
bufferTimeUs - getStreamOffsetUs(),
/* releaseTimeNs= */ System.nanoTime(),
format,
/* mediaFormat= */ null);
}
if (!renderedFirstFrameAfterReset && output != null) {
Format format = this.format;
handler.post(
() -> {
VideoSize videoSize =
new VideoSize(format.width, format.height, format.pixelWidthHeightRatio);
if (!Objects.equals(videoSize, videoSizeRef.get())) {
eventListener.onVideoSizeChanged(videoSize);
videoSizeRef.set(videoSize);
}
});
handler.post(
() ->
eventListener.onRenderedFirstFrame(
output, /* renderTimeMs= */ SystemClock.elapsedRealtime()));
renderedFirstFrameAfterReset = true;
renderedFirstFrameAfterEnable = true;
}
}
return shouldProcess;
}

View File

@ -0,0 +1,86 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.test.utils;
import static androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition.DEFAULT_WINDOW_DURATION_US;
import static androidx.media3.test.utils.robolectric.TestPlayerRunHelper.advance;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import androidx.media3.common.Timeline;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.drm.DrmSessionManager;
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
import androidx.media3.test.utils.FakeMediaPeriod.TrackDataFactory;
import androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
/** Tests for {@link FakeVideoRenderer}. */
@RunWith(AndroidJUnit4.class)
public final class FakeVideoRendererTest {
@Test
public void videoFrameMetadataListener_skipsDecodeOnlySamples() throws Exception {
Timeline timeline =
new FakeTimeline(
new TimelineWindowDefinition.Builder().setWindowPositionInFirstPeriodUs(0).build());
ExoPlayer player =
new TestExoPlayerBuilder(ApplicationProvider.getApplicationContext()).build();
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
player.setVideoSurface(surface);
VideoFrameMetadataListener mockVideoFrameMetadataListener =
mock(VideoFrameMetadataListener.class);
player.setVideoFrameMetadataListener(mockVideoFrameMetadataListener);
player.setMediaSource(
new FakeMediaSource(
timeline,
DrmSessionManager.DRM_UNSUPPORTED,
TrackDataFactory.samplesWithRateDurationAndKeyframeInterval(
/* initialSampleTimeUs= */ 0,
/* sampleRate= */ 30,
/* durationUs= */ DEFAULT_WINDOW_DURATION_US,
/* keyFrameInterval= */ 60),
ExoPlayerTestRunner.VIDEO_FORMAT));
player.prepare();
player.play();
advance(player).untilPosition(0, 100);
player.seekTo(2500);
advance(player).untilPosition(0, 2600);
player.stop();
player.release();
surface.release();
ArgumentCaptor<Long> presentationTimeUsCaptor = ArgumentCaptor.forClass(Long.class);
verify(mockVideoFrameMetadataListener, atLeastOnce())
.onVideoFrameAboutToBeRendered(presentationTimeUsCaptor.capture(), anyLong(), any(), any());
assertThat(presentationTimeUsCaptor.getAllValues())
.containsExactly(
0L, 33_333L, 66_666L, 100_000L, 2_500_000L, 2_533_333L, 2_566_666L, 2_600_000L)
.inOrder();
}
}