mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Keep stream offset alive in ImageRenderer until stream transition
Fix modeled after OutputStreamInfo usage for stream offset in `MediaCodecRenderer` PiperOrigin-RevId: 601109900 (cherry picked from commit 688622eb47ac707affa824d3d68f44755f947380)
This commit is contained in:
parent
a6756c6d40
commit
db0262efdb
@ -21,6 +21,7 @@ import static androidx.media3.common.C.FIRST_FRAME_RENDERED;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
@ -30,7 +31,6 @@ import androidx.annotation.IntDef;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.PlaybackException;
|
||||
import androidx.media3.common.util.LongArrayQueue;
|
||||
import androidx.media3.common.util.TraceUtil;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.decoder.DecoderInputBuffer;
|
||||
@ -45,6 +45,7 @@ import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.ArrayDeque;
|
||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
@ -91,10 +92,20 @@ public class ImageRenderer extends BaseRenderer {
|
||||
|
||||
private final ImageDecoder.Factory decoderFactory;
|
||||
private final DecoderInputBuffer flagsOnlyBuffer;
|
||||
private final LongArrayQueue offsetQueue;
|
||||
|
||||
/**
|
||||
* Pending {@link OutputStreamInfo} for following streams. All {@code OutputStreamInfo} added to
|
||||
* this list will have {@linkplain OutputStreamInfo#previousStreamLastBufferTimeUs
|
||||
* previousStreamLastBufferTimeUs} and {@linkplain OutputStreamInfo#streamOffsetUs streamOffsetUs}
|
||||
* set.
|
||||
*/
|
||||
private final ArrayDeque<OutputStreamInfo> pendingOutputStreamChanges;
|
||||
|
||||
private boolean inputStreamEnded;
|
||||
private boolean outputStreamEnded;
|
||||
private OutputStreamInfo outputStreamInfo;
|
||||
private long lastProcessedOutputBufferTimeUs;
|
||||
private long largestQueuedPresentationTimeUs;
|
||||
private @ReinitializationState int decoderReinitializationState;
|
||||
private @C.FirstFrameState int firstFrameState;
|
||||
private @Nullable Format inputFormat;
|
||||
@ -120,7 +131,10 @@ public class ImageRenderer extends BaseRenderer {
|
||||
this.decoderFactory = decoderFactory;
|
||||
this.imageOutput = getImageOutput(imageOutput);
|
||||
flagsOnlyBuffer = DecoderInputBuffer.newNoDataInstance();
|
||||
offsetQueue = new LongArrayQueue();
|
||||
outputStreamInfo = OutputStreamInfo.UNSET;
|
||||
pendingOutputStreamChanges = new ArrayDeque<>();
|
||||
largestQueuedPresentationTimeUs = C.TIME_UNSET;
|
||||
lastProcessedOutputBufferTimeUs = C.TIME_UNSET;
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
|
||||
firstFrameState = FIRST_FRAME_NOT_RENDERED;
|
||||
}
|
||||
@ -140,8 +154,7 @@ public class ImageRenderer extends BaseRenderer {
|
||||
if (outputStreamEnded) {
|
||||
return;
|
||||
}
|
||||
// If the offsetQueue is empty, we haven't been given a stream to render.
|
||||
checkState(!offsetQueue.isEmpty());
|
||||
|
||||
if (inputFormat == null) {
|
||||
// We don't have a format yet, so try and read one.
|
||||
FormatHolder formatHolder = getFormatHolder();
|
||||
@ -203,9 +216,20 @@ public class ImageRenderer extends BaseRenderer {
|
||||
throws ExoPlaybackException {
|
||||
// TODO: b/319484746 - Take startPositionUs into account to not output images too early.
|
||||
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
|
||||
offsetQueue.add(offsetUs);
|
||||
inputStreamEnded = false;
|
||||
outputStreamEnded = false;
|
||||
if (outputStreamInfo.streamOffsetUs == C.TIME_UNSET
|
||||
|| (pendingOutputStreamChanges.isEmpty()
|
||||
&& (largestQueuedPresentationTimeUs == C.TIME_UNSET
|
||||
|| (lastProcessedOutputBufferTimeUs != C.TIME_UNSET
|
||||
&& lastProcessedOutputBufferTimeUs >= largestQueuedPresentationTimeUs)))) {
|
||||
// Either the first stream, or all previous streams have never queued any samples or have been
|
||||
// fully output already.
|
||||
outputStreamInfo =
|
||||
new OutputStreamInfo(/* previousStreamLastBufferTimeUs= */ C.TIME_UNSET, offsetUs);
|
||||
} else {
|
||||
pendingOutputStreamChanges.add(
|
||||
new OutputStreamInfo(
|
||||
/* previousStreamLastBufferTimeUs= */ largestQueuedPresentationTimeUs, offsetUs));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -221,26 +245,26 @@ public class ImageRenderer extends BaseRenderer {
|
||||
if (decoder != null) {
|
||||
decoder.flush();
|
||||
}
|
||||
pendingOutputStreamChanges.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDisabled() {
|
||||
offsetQueue.clear();
|
||||
inputFormat = null;
|
||||
outputStreamInfo = OutputStreamInfo.UNSET;
|
||||
pendingOutputStreamChanges.clear();
|
||||
releaseDecoderResources();
|
||||
imageOutput.onDisabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onReset() {
|
||||
offsetQueue.clear();
|
||||
releaseDecoderResources();
|
||||
lowerFirstFrameState(FIRST_FRAME_NOT_RENDERED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRelease() {
|
||||
offsetQueue.clear();
|
||||
releaseDecoderResources();
|
||||
}
|
||||
|
||||
@ -286,7 +310,6 @@ public class ImageRenderer extends BaseRenderer {
|
||||
return false;
|
||||
}
|
||||
if (checkStateNotNull(outputBuffer).isEndOfStream()) {
|
||||
offsetQueue.remove();
|
||||
if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
|
||||
// We're waiting to re-initialize the decoder, and have now processed all final buffers.
|
||||
releaseDecoderResources();
|
||||
@ -294,7 +317,7 @@ public class ImageRenderer extends BaseRenderer {
|
||||
initDecoder();
|
||||
} else {
|
||||
checkStateNotNull(outputBuffer).release();
|
||||
if (offsetQueue.isEmpty()) {
|
||||
if (pendingOutputStreamChanges.isEmpty()) {
|
||||
outputStreamEnded = true;
|
||||
}
|
||||
}
|
||||
@ -327,6 +350,7 @@ public class ImageRenderer extends BaseRenderer {
|
||||
tileInfo.getPresentationTimeUs())) {
|
||||
return false;
|
||||
}
|
||||
onProcessedOutputBuffer(checkStateNotNull(tileInfo).getPresentationTimeUs());
|
||||
firstFrameState = FIRST_FRAME_RENDERED;
|
||||
if (!isThumbnailGrid
|
||||
|| checkStateNotNull(tileInfo).getTileIndex()
|
||||
@ -375,12 +399,26 @@ public class ImageRenderer extends BaseRenderer {
|
||||
// image.
|
||||
long earlyUs = bufferPresentationTimeUs - positionUs;
|
||||
if (shouldForceRender() || earlyUs < IMAGE_PRESENTATION_WINDOW_THRESHOLD_US) {
|
||||
imageOutput.onImageAvailable(bufferPresentationTimeUs - offsetQueue.element(), outputBitmap);
|
||||
imageOutput.onImageAvailable(
|
||||
bufferPresentationTimeUs - outputStreamInfo.streamOffsetUs, outputBitmap);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when an output buffer is successfully processed.
|
||||
*
|
||||
* @param presentationTimeUs The timestamp associated with the output buffer.
|
||||
*/
|
||||
private void onProcessedOutputBuffer(long presentationTimeUs) {
|
||||
lastProcessedOutputBufferTimeUs = presentationTimeUs;
|
||||
while (!pendingOutputStreamChanges.isEmpty()
|
||||
&& presentationTimeUs >= pendingOutputStreamChanges.peek().previousStreamLastBufferTimeUs) {
|
||||
outputStreamInfo = pendingOutputStreamChanges.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param positionUs The current playback position in microseconds, measured at the start of the
|
||||
* current iteration of the rendering loop.
|
||||
@ -432,6 +470,9 @@ public class ImageRenderer extends BaseRenderer {
|
||||
inputStreamEnded = true;
|
||||
inputBuffer = null;
|
||||
return false;
|
||||
} else {
|
||||
largestQueuedPresentationTimeUs =
|
||||
max(largestQueuedPresentationTimeUs, checkStateNotNull(inputBuffer).timeUs);
|
||||
}
|
||||
// If inputBuffer was queued, the decoder already cleared it. Otherwise, inputBuffer is
|
||||
// cleared here.
|
||||
@ -479,6 +520,7 @@ public class ImageRenderer extends BaseRenderer {
|
||||
private void releaseDecoderResources() {
|
||||
inputBuffer = null;
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
|
||||
largestQueuedPresentationTimeUs = C.TIME_UNSET;
|
||||
if (decoder != null) {
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
@ -557,4 +599,19 @@ public class ImageRenderer extends BaseRenderer {
|
||||
return tileBitmap != null;
|
||||
}
|
||||
}
|
||||
|
||||
private static final class OutputStreamInfo {
|
||||
|
||||
public static final OutputStreamInfo UNSET =
|
||||
new OutputStreamInfo(
|
||||
/* previousStreamLastBufferTimeUs= */ C.TIME_UNSET, /* streamOffsetUs= */ C.TIME_UNSET);
|
||||
|
||||
public final long previousStreamLastBufferTimeUs;
|
||||
public final long streamOffsetUs;
|
||||
|
||||
public OutputStreamInfo(long previousStreamLastBufferTimeUs, long streamOffsetUs) {
|
||||
this.previousStreamLastBufferTimeUs = previousStreamLastBufferTimeUs;
|
||||
this.streamOffsetUs = streamOffsetUs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -217,6 +217,136 @@ public class ImageRendererTest {
|
||||
assertThat(renderedBitmaps.get(1).second).isSameInstanceAs(fakeDecodedBitmap2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
renderTwoStreams_withReplaceStreamPriorToFinishingFirstStreamOutput_rendersWithCorrectPosition()
|
||||
throws Exception {
|
||||
FakeSampleStream fakeSampleStream1 =
|
||||
createSampleStream(
|
||||
JPEG_FORMAT_WITH_FOUR_TILES,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0L, /* flags= */ C.BUFFER_FLAG_KEY_FRAME),
|
||||
emptySample(/* timeUs= */ 100_000L, /* flags= */ 0),
|
||||
emptySample(/* timeUs= */ 200_000L, /* flags= */ 0),
|
||||
emptySample(/* timeUs= */ 300_000L, /* flags= */ 0)));
|
||||
fakeSampleStream1.writeData(/* startPositionUs= */ 0);
|
||||
FakeSampleStream fakeSampleStream2 =
|
||||
createSampleStream(
|
||||
JPEG_FORMAT_WITH_FOUR_TILES,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 10L, /* flags= */ C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream2.writeData(/* startPositionUs= */ 10L);
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream1,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 100_000L,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 100_000L, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.start();
|
||||
renderer.render(/* positionUs= */ 200_000L, /* elapsedRealtimeUs= */ 0);
|
||||
renderer.render(/* positionUs= */ 300_000L, /* elapsedRealtimeUs= */ 0);
|
||||
|
||||
renderer.replaceStream(
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream2,
|
||||
/* startPositionUs= */ 10,
|
||||
/* offsetUs= */ 450_000L,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
renderer.setCurrentStreamFinal();
|
||||
// Render last sample of first stream
|
||||
renderer.render(/* positionUs= */ 400_000L, /* elapsedRealtimeUs= */ 0);
|
||||
StopWatch hasReadStreamToEndStopWatch = new StopWatch(HAS_READ_STREAM_TO_END_TIMEOUT_MESSAGE);
|
||||
while (!renderer.hasReadStreamToEnd() && hasReadStreamToEndStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 450_010L, /* elapsedRealtimeUs= */ 0L);
|
||||
}
|
||||
renderer.stop();
|
||||
|
||||
assertThat(renderedBitmaps).hasSize(5);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0);
|
||||
assertThat(renderedBitmaps.get(4).first).isEqualTo(10L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void renderTwoStreams_withDisableandEnablePostReplaceStream_rendersWithCorrectPosition()
|
||||
throws Exception {
|
||||
FakeSampleStream fakeSampleStream1 =
|
||||
createSampleStream(
|
||||
JPEG_FORMAT_WITH_FOUR_TILES,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0L, /* flags= */ C.BUFFER_FLAG_KEY_FRAME),
|
||||
emptySample(/* timeUs= */ 100_000L, /* flags= */ 0),
|
||||
emptySample(/* timeUs= */ 200_000L, /* flags= */ 0),
|
||||
emptySample(/* timeUs= */ 300_000L, /* flags= */ 0)));
|
||||
fakeSampleStream1.writeData(/* startPositionUs= */ 0);
|
||||
FakeSampleStream fakeSampleStream2 =
|
||||
createSampleStream(
|
||||
JPEG_FORMAT_WITH_FOUR_TILES,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 10L, /* flags= */ C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream2.writeData(/* startPositionUs= */ 10L);
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream1,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 100_000L,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 100_000L, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.start();
|
||||
renderer.render(/* positionUs= */ 200_000L, /* elapsedRealtimeUs= */ 0);
|
||||
renderer.render(/* positionUs= */ 300_000L, /* elapsedRealtimeUs= */ 0);
|
||||
renderer.replaceStream(
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream2,
|
||||
/* startPositionUs= */ 10,
|
||||
/* offsetUs= */ 400_000L,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
|
||||
// Reset and enable renderer as if application changed playlist to just the second stream.
|
||||
renderer.stop();
|
||||
renderer.disable();
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream2,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0L, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.start();
|
||||
StopWatch hasReadStreamToEndStopWatch = new StopWatch(HAS_READ_STREAM_TO_END_TIMEOUT_MESSAGE);
|
||||
while (!renderer.hasReadStreamToEnd() && hasReadStreamToEndStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0L, /* elapsedRealtimeUs= */ 0L);
|
||||
}
|
||||
renderer.stop();
|
||||
|
||||
assertThat(renderedBitmaps).hasSize(4);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0);
|
||||
assertThat(renderedBitmaps.get(3).first).isEqualTo(10L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void renderTwoStreams_differentFormat_rendersToImageOutput() throws Exception {
|
||||
FakeSampleStream fakeSampleStream1 = createSampleStream(/* timeUs= */ 0);
|
||||
|
@ -347,4 +347,28 @@ public final class DashPlaybackTest {
|
||||
DumpFileAsserts.assertOutput(
|
||||
applicationContext, playbackOutput, "playbackdumps/dash/loadimage.dump");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void playThumbnailGrid_withSeekAfterEoS() throws Exception {
|
||||
Context applicationContext = ApplicationProvider.getApplicationContext();
|
||||
CapturingRenderersFactory capturingRenderersFactory =
|
||||
new CapturingRenderersFactory(applicationContext);
|
||||
ExoPlayer player =
|
||||
new ExoPlayer.Builder(applicationContext, capturingRenderersFactory)
|
||||
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
|
||||
.build();
|
||||
PlaybackOutput playbackOutput = PlaybackOutput.register(player, capturingRenderersFactory);
|
||||
player.setMediaItem(MediaItem.fromUri("asset:///media/dash/thumbnails/sample.mpd"));
|
||||
player.seekTo(55_000L);
|
||||
player.prepare();
|
||||
player.play();
|
||||
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||
|
||||
player.seekTo(55_000L);
|
||||
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||
player.release();
|
||||
|
||||
DumpFileAsserts.assertOutput(
|
||||
applicationContext, playbackOutput, "playbackdumps/dash/image_with_seek_after_eos.dump");
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,38 @@
|
||||
ImageOutput:
|
||||
rendered image count = 12
|
||||
image output #1:
|
||||
presentationTimeUs = 54375000
|
||||
bitmap hash = 1407821609
|
||||
image output #2:
|
||||
presentationTimeUs = 55312500
|
||||
bitmap hash = -1744072926
|
||||
image output #3:
|
||||
presentationTimeUs = 56250000
|
||||
bitmap hash = -1355216794
|
||||
image output #4:
|
||||
presentationTimeUs = 57187500
|
||||
bitmap hash = -7610058
|
||||
image output #5:
|
||||
presentationTimeUs = 58125000
|
||||
bitmap hash = 1362483058
|
||||
image output #6:
|
||||
presentationTimeUs = 59062500
|
||||
bitmap hash = 442567684
|
||||
image output #7:
|
||||
presentationTimeUs = 54375000
|
||||
bitmap hash = 1407821609
|
||||
image output #8:
|
||||
presentationTimeUs = 55312500
|
||||
bitmap hash = -1744072926
|
||||
image output #9:
|
||||
presentationTimeUs = 56250000
|
||||
bitmap hash = -1355216794
|
||||
image output #10:
|
||||
presentationTimeUs = 57187500
|
||||
bitmap hash = -7610058
|
||||
image output #11:
|
||||
presentationTimeUs = 58125000
|
||||
bitmap hash = 1362483058
|
||||
image output #12:
|
||||
presentationTimeUs = 59062500
|
||||
bitmap hash = 442567684
|
Loading…
x
Reference in New Issue
Block a user