Only use first frame stream start position check at stream transitions

We currently only force the first frame if the frame timestamp is
greater than the stream *offset*.

This is wrong for two reasons:
 1. The timestamp and the offset are not comparable and it should be
    the stream start position.
 2. The check should only be applied at stream transitions where we
    need to make sure that a new first frame isn't rendered until we
    passed the transition point.

We have to fix both issues together, because fixing just issue (1)
causes seeks to before the start position to no longer render the
frame (and playback will be stuck). A new test covers this case.

We also amend the stream transition test case to actually test what it
promises to test and add a test for prerolling samples at the
beginning, to ensure the first frame is still renderered.

Issue: androidx/media#291
PiperOrigin-RevId: 552858967
This commit is contained in:
tonihei 2023-08-01 18:09:20 +00:00 committed by Tianyi Feng
parent 79e05ad049
commit 69656774f9
5 changed files with 104 additions and 13 deletions

View File

@ -95,6 +95,9 @@
* Allow `MediaCodecVideoRenderer` to use a custom * Allow `MediaCodecVideoRenderer` to use a custom
`VideoFrameProcessor.Factory`. `VideoFrameProcessor.Factory`.
* H.265/HEVC: Fix parsing SPS short and long term reference picture info. * H.265/HEVC: Fix parsing SPS short and long term reference picture info.
* Fix bug where the first frame couldn't be rendered if the audio stream
starts with negative timestamps
([#291](https://github.com/androidx/media/issues/291)).
* Text: * Text:
* CEA-608: Change cue truncation logic to only consider visible text. * CEA-608: Change cue truncation logic to only consider visible text.
Previously indent and tab offset were included when limiting the cue Previously indent and tab offset were included when limiting the cue

View File

@ -1510,8 +1510,9 @@ public final class C {
* State of the first frame in a video renderer. * State of the first frame in a video renderer.
* *
* <p>One of {@link #FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED}, {@link * <p>One of {@link #FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED}, {@link
* #FIRST_FRAME_NOT_RENDERED} or {@link #FIRST_FRAME_RENDERED}. The stages are ordered and * #FIRST_FRAME_NOT_RENDERED}, {@link #FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE} or {@link
* comparable, i.e., a value implies that all stages with higher values are not reached yet. * #FIRST_FRAME_RENDERED}. The stages are ordered and comparable, i.e., a value implies that all
* stages with higher values are not reached yet.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -1520,6 +1521,7 @@ public final class C {
@IntDef({ @IntDef({
FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED, FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED,
FIRST_FRAME_NOT_RENDERED, FIRST_FRAME_NOT_RENDERED,
FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE,
FIRST_FRAME_RENDERED FIRST_FRAME_RENDERED
}) })
public @interface FirstFrameState {} public @interface FirstFrameState {}
@ -1533,8 +1535,11 @@ public final class C {
/** The first frame was not rendered after the last reset, output surface or stream change. */ /** The first frame was not rendered after the last reset, output surface or stream change. */
@UnstableApi public static final int FIRST_FRAME_NOT_RENDERED = 1; @UnstableApi public static final int FIRST_FRAME_NOT_RENDERED = 1;
/** The first frame was not rendered after the last stream change. */
@UnstableApi public static final int FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE = 2;
/** The first frame was rendered. */ /** The first frame was rendered. */
@UnstableApi public static final int FIRST_FRAME_RENDERED = 2; @UnstableApi public static final int FIRST_FRAME_RENDERED = 3;
/** /**
* @deprecated Use {@link Util#usToMs(long)}. * @deprecated Use {@link Util#usToMs(long)}.

View File

@ -2102,6 +2102,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
return outputStreamInfo.streamOffsetUs; return outputStreamInfo.streamOffsetUs;
} }
/** Returns the start position of the current output stream in microseconds. */
protected final long getOutputStreamStartPositionUs() {
return outputStreamInfo.startPositionUs;
}
private void setOutputStreamInfo(OutputStreamInfo outputStreamInfo) { private void setOutputStreamInfo(OutputStreamInfo outputStreamInfo) {
this.outputStreamInfo = outputStreamInfo; this.outputStreamInfo = outputStreamInfo;
if (outputStreamInfo.streamOffsetUs != C.TIME_UNSET) { if (outputStreamInfo.streamOffsetUs != C.TIME_UNSET) {

View File

@ -1341,18 +1341,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// No force rendering during joining. // No force rendering during joining.
return false; return false;
} }
if (positionUs < getOutputStreamOffsetUs()) {
// No force rendering if we haven't reached the stream start position.
// TODO: b/160461756 - This is a bug because it compares against the offset and not the start
// position and also should only be applied when transitioning streams, not after every reset.
return false;
}
boolean isStarted = getState() == STATE_STARTED; boolean isStarted = getState() == STATE_STARTED;
switch (firstFrameState) { switch (firstFrameState) {
case C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED: case C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED:
return isStarted; return isStarted;
case C.FIRST_FRAME_NOT_RENDERED: case C.FIRST_FRAME_NOT_RENDERED:
return true; return true;
case C.FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE:
return positionUs >= getOutputStreamStartPositionUs();
case C.FIRST_FRAME_RENDERED: case C.FIRST_FRAME_RENDERED:
long elapsedSinceLastRenderUs = msToUs(getClock().elapsedRealtime()) - lastRenderRealtimeUs; long elapsedSinceLastRenderUs = msToUs(getClock().elapsedRealtime()) - lastRenderRealtimeUs;
return isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs); return isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs);
@ -1431,7 +1427,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
protected void onProcessedStreamChange() { protected void onProcessedStreamChange() {
super.onProcessedStreamChange(); super.onProcessedStreamChange();
lowerFirstFrameState(C.FIRST_FRAME_NOT_RENDERED); lowerFirstFrameState(C.FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE);
} }
/** /**

View File

@ -509,6 +509,38 @@ public class MediaCodecVideoRendererTest {
verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong()); verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
} }
@Test
public void enable_withPrerollSamplesLessThanStartPosition_rendersFirstFrame() throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ VIDEO_H264,
ImmutableList.of(
oneByteSample(/* timeUs= */ -500, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 500),
oneByteSample(/* timeUs= */ 1500)));
fakeSampleStream.writeData(/* startPositionUs= */ -500);
mediaCodecVideoRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {VIDEO_H264},
fakeSampleStream,
/* positionUs= */ 2000,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 2000,
/* offsetUs= */ 1000);
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
shadowOf(testMainLooper).idle();
verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}
@Test @Test
public void replaceStream_rendersFirstFrameOnlyAfterStartPosition() throws Exception { public void replaceStream_rendersFirstFrameOnlyAfterStartPosition() throws Exception {
ShadowLooper shadowLooper = shadowOf(testMainLooper); ShadowLooper shadowLooper = shadowOf(testMainLooper);
@ -545,7 +577,8 @@ public class MediaCodecVideoRendererTest {
mediaCodecVideoRenderer.start(); mediaCodecVideoRenderer.start();
boolean replacedStream = false; boolean replacedStream = false;
for (int i = 0; i <= 10; i++) { // Render to just before the specified start position.
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render( mediaCodecVideoRenderer.render(
/* positionUs= */ i * 10, SystemClock.elapsedRealtime() * 1000); /* positionUs= */ i * 10, SystemClock.elapsedRealtime() * 1000);
if (!replacedStream && mediaCodecVideoRenderer.hasReadStreamToEnd()) { if (!replacedStream && mediaCodecVideoRenderer.hasReadStreamToEnd()) {
@ -553,12 +586,19 @@ public class MediaCodecVideoRendererTest {
new Format[] {VIDEO_H264}, new Format[] {VIDEO_H264},
fakeSampleStream2, fakeSampleStream2,
/* startPositionUs= */ 100, /* startPositionUs= */ 100,
/* offsetUs= */ 100); /* offsetUs= */ 50);
replacedStream = true; replacedStream = true;
} }
} }
// Expect only the first frame of the first stream to have been rendered. // Assert that only one first frame was rendered so far.
shadowLooper.idle();
verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
// Render at start position.
mediaCodecVideoRenderer.render(/* positionUs= */ 100, SystemClock.elapsedRealtime() * 1000);
// Assert the new first frame was rendered.
shadowLooper.idle(); shadowLooper.idle();
verify(eventListener, times(2)) verify(eventListener, times(2))
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong()); .onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
@ -622,6 +662,48 @@ public class MediaCodecVideoRendererTest {
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong()); .onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
} }
@Test
public void resetPosition_toBeforeOriginalStartPosition_rendersFirstFrame() throws Exception {
ShadowLooper shadowLooper = shadowOf(testMainLooper);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ VIDEO_H264,
ImmutableList.of(oneByteSample(/* timeUs= */ 1000, C.BUFFER_FLAG_KEY_FRAME)));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecVideoRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {VIDEO_H264},
fakeSampleStream,
/* positionUs= */ 1000,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 1000,
/* offsetUs= */ 0);
mediaCodecVideoRenderer.start();
// Render at the original start position.
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 1000, SystemClock.elapsedRealtime() * 1000);
}
// Reset the position to before the original start position and render at this position.
mediaCodecVideoRenderer.resetPosition(500);
fakeSampleStream.append(
ImmutableList.of(oneByteSample(/* timeUs= */ 500, C.BUFFER_FLAG_KEY_FRAME)));
fakeSampleStream.writeData(/* startPositionUs= */ 500);
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 500, SystemClock.elapsedRealtime() * 1000);
}
// Assert that we rendered the first frame after the reset.
shadowLooper.idle();
verify(eventListener, times(2))
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}
@Test @Test
public void supportsFormat_withDolbyVisionMedia_returnsTrueWhenFallbackToH265orH264Allowed() public void supportsFormat_withDolbyVisionMedia_returnsTrueWhenFallbackToH265orH264Allowed()
throws Exception { throws Exception {