mirror of
https://github.com/androidx/media.git
synced 2025-05-16 03:59:54 +08:00
Support multiple streams in the ImageRenderer
PiperOrigin-RevId: 565410924
This commit is contained in:
parent
5ef5d46708
commit
16b0ea850f
@ -31,4 +31,7 @@ public interface ImageOutput {
|
||||
* @param bitmap The new image available.
|
||||
*/
|
||||
void onImageAvailable(long presentationTimeUs, Bitmap bitmap);
|
||||
|
||||
/** Called when the renderer is disabled. */
|
||||
void onDisabled();
|
||||
}
|
||||
|
@ -15,16 +15,22 @@
|
||||
*/
|
||||
package androidx.media3.exoplayer.image;
|
||||
|
||||
import static androidx.media3.common.PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK;
|
||||
import static androidx.media3.common.C.FIRST_FRAME_NOT_RENDERED;
|
||||
import static androidx.media3.common.C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED;
|
||||
import static androidx.media3.common.C.FIRST_FRAME_RENDERED;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import androidx.annotation.IntDef;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.PlaybackException;
|
||||
import androidx.media3.common.util.LongArrayQueue;
|
||||
import androidx.media3.common.util.TraceUtil;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.decoder.DecoderInputBuffer;
|
||||
@ -35,31 +41,61 @@ import androidx.media3.exoplayer.Renderer;
|
||||
import androidx.media3.exoplayer.RendererCapabilities;
|
||||
import androidx.media3.exoplayer.source.MediaSource;
|
||||
import androidx.media3.exoplayer.source.SampleStream;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
|
||||
// TODO(b/289989736): Currently works for one stream only. Refactor so that it works for multiple
|
||||
// inputs streams.
|
||||
/** A {@link Renderer} implementation for images. */
|
||||
@UnstableApi
|
||||
public final class ImageRenderer extends BaseRenderer {
|
||||
|
||||
private static final String TAG = "ImageRenderer";
|
||||
|
||||
private final DecoderInputBuffer flagsOnlyBuffer;
|
||||
/** Decoder reinitialization states. */
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
@Target(TYPE_USE)
|
||||
@IntDef({
|
||||
REINITIALIZATION_STATE_NONE,
|
||||
REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM_THEN_WAIT,
|
||||
REINITIALIZATION_STATE_WAIT_END_OF_STREAM
|
||||
})
|
||||
private @interface ReinitializationState {}
|
||||
|
||||
/** The decoder does not need to be re-initialized. */
|
||||
private static final int REINITIALIZATION_STATE_NONE = 0;
|
||||
|
||||
/**
|
||||
* The input format has changed in a way that requires the decoder to be re-initialized, but we
|
||||
* haven't yet signaled an end of stream to the existing decoder. We need to do so in order to
|
||||
* ensure that it outputs any remaining buffers before we release it.
|
||||
*/
|
||||
private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM_THEN_WAIT = 2;
|
||||
|
||||
/**
|
||||
* The input format has changed in a way that requires the decoder to be re-initialized, and we've
|
||||
* signaled an end of stream to the existing decoder. We're waiting for the decoder to output an
|
||||
* end of stream signal to indicate that it has output any remaining buffers before we release it.
|
||||
*/
|
||||
private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 3;
|
||||
|
||||
private final ImageDecoder.Factory decoderFactory;
|
||||
private final ImageOutput imageOutput;
|
||||
private final DecoderInputBuffer flagsOnlyBuffer;
|
||||
private final LongArrayQueue offsetQueue;
|
||||
|
||||
private @C.FirstFrameState int firstFrameState;
|
||||
private boolean inputStreamEnded;
|
||||
private boolean outputStreamEnded;
|
||||
private long durationUs;
|
||||
private long offsetUs;
|
||||
private @ReinitializationState int decoderReinitializationState;
|
||||
private @C.FirstFrameState int firstFrameState;
|
||||
private @Nullable Format inputFormat;
|
||||
private @Nullable ImageDecoder decoder;
|
||||
private @Nullable DecoderInputBuffer inputBuffer;
|
||||
private @Nullable ImageOutputBuffer outputBuffer;
|
||||
private @MonotonicNonNull Format inputFormat;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
@ -71,11 +107,12 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
*/
|
||||
public ImageRenderer(ImageDecoder.Factory decoderFactory, ImageOutput imageOutput) {
|
||||
super(C.TRACK_TYPE_IMAGE);
|
||||
flagsOnlyBuffer = DecoderInputBuffer.newNoDataInstance();
|
||||
this.decoderFactory = decoderFactory;
|
||||
this.imageOutput = imageOutput;
|
||||
durationUs = C.TIME_UNSET;
|
||||
firstFrameState = C.FIRST_FRAME_NOT_RENDERED;
|
||||
flagsOnlyBuffer = DecoderInputBuffer.newNoDataInstance();
|
||||
offsetQueue = new LongArrayQueue();
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
|
||||
firstFrameState = FIRST_FRAME_NOT_RENDERED;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -90,11 +127,11 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
|
||||
@Override
|
||||
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
||||
checkState(durationUs != C.TIME_UNSET);
|
||||
if (outputStreamEnded) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the offsetQueue is empty, we haven't been given a stream to render.
|
||||
checkState(!offsetQueue.isEmpty());
|
||||
if (inputFormat == null) {
|
||||
// We don't have a format yet, so try and read one.
|
||||
FormatHolder formatHolder = getFormatHolder();
|
||||
@ -102,9 +139,9 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
@SampleStream.ReadDataResult
|
||||
int result = readSource(formatHolder, flagsOnlyBuffer, FLAG_REQUIRE_FORMAT);
|
||||
if (result == C.RESULT_FORMAT_READ) {
|
||||
// Note that this works because we only expect to enter this if-condition once per playback
|
||||
// for now.
|
||||
maybeInitDecoder(checkNotNull(formatHolder.format));
|
||||
// Note that this works because we only expect to enter this if-condition once per playback.
|
||||
inputFormat = checkNotNull(formatHolder.format);
|
||||
initDecoder();
|
||||
} else if (result == C.RESULT_BUFFER_READ) {
|
||||
// End of stream read having not read a format.
|
||||
checkState(flagsOnlyBuffer.isEndOfStream());
|
||||
@ -116,7 +153,6 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Rendering loop.
|
||||
TraceUtil.beginSection("drainAndFeedDecoder");
|
||||
@ -130,7 +166,9 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
|
||||
@Override
|
||||
public boolean isReady() {
|
||||
return firstFrameState == C.FIRST_FRAME_RENDERED;
|
||||
return firstFrameState == FIRST_FRAME_RENDERED
|
||||
|| (firstFrameState == FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED
|
||||
&& outputBuffer != null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -138,6 +176,14 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
return outputStreamEnded;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) {
|
||||
firstFrameState =
|
||||
mayRenderStartOfStream
|
||||
? C.FIRST_FRAME_NOT_RENDERED
|
||||
: C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStreamChanged(
|
||||
Format[] formats,
|
||||
@ -145,36 +191,41 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
long offsetUs,
|
||||
MediaSource.MediaPeriodId mediaPeriodId)
|
||||
throws ExoPlaybackException {
|
||||
// TODO(b/289989736): when the mediaPeriodId is signalled to the renders, collect and set
|
||||
// durationUs here.
|
||||
durationUs = 2 * C.MICROS_PER_SECOND;
|
||||
this.offsetUs = offsetUs;
|
||||
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
|
||||
offsetQueue.add(offsetUs);
|
||||
inputStreamEnded = false;
|
||||
outputStreamEnded = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPositionReset(long positionUs, boolean joining) {
|
||||
// Since the renderer only supports playing one image from, this is currently a no-op (don't
|
||||
// need to consider a new stream because it will be the same as the last one).
|
||||
lowerFirstFrameState(FIRST_FRAME_NOT_RENDERED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDisabled() {
|
||||
releaseResources();
|
||||
offsetQueue.clear();
|
||||
inputFormat = null;
|
||||
releaseDecoderResources();
|
||||
imageOutput.onDisabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onReset() {
|
||||
releaseResources();
|
||||
offsetQueue.clear();
|
||||
releaseDecoderResources();
|
||||
lowerFirstFrameState(FIRST_FRAME_NOT_RENDERED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRelease() {
|
||||
releaseResources();
|
||||
offsetQueue.clear();
|
||||
releaseDecoderResources();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to dequeue an output buffer from the decoder and, if successful, renders it.
|
||||
* Attempts to dequeue an output buffer from the decoder and, if successful and permitted to,
|
||||
* renders it.
|
||||
*
|
||||
* @param positionUs The player's current position.
|
||||
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
|
||||
@ -183,7 +234,7 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
* @throws ImageDecoderException If an error occurs draining the output buffer.
|
||||
*/
|
||||
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
|
||||
throws ImageDecoderException {
|
||||
throws ImageDecoderException, ExoPlaybackException {
|
||||
if (outputBuffer == null) {
|
||||
checkStateNotNull(decoder);
|
||||
outputBuffer = decoder.dequeueOutputBuffer();
|
||||
@ -191,27 +242,45 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (outputBuffer.isEndOfStream()) {
|
||||
outputBuffer.release();
|
||||
outputBuffer = null;
|
||||
outputStreamEnded = true;
|
||||
if (firstFrameState == FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED
|
||||
&& getState() != STATE_STARTED) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (checkNotNull(outputBuffer).isEndOfStream()) {
|
||||
offsetQueue.remove();
|
||||
if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
|
||||
// We're waiting to re-initialize the decoder, and have now processed all final buffers.
|
||||
releaseDecoderResources();
|
||||
checkStateNotNull(inputFormat);
|
||||
initDecoder();
|
||||
} else {
|
||||
checkNotNull(outputBuffer).release();
|
||||
outputBuffer = null;
|
||||
if (offsetQueue.isEmpty()) {
|
||||
outputStreamEnded = true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
checkStateNotNull(outputBuffer);
|
||||
if (!processOutputBuffer(positionUs, elapsedRealtimeUs)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
firstFrameState = C.FIRST_FRAME_RENDERED;
|
||||
firstFrameState = FIRST_FRAME_RENDERED;
|
||||
return true;
|
||||
}
|
||||
|
||||
@RequiresNonNull("outputBuffer")
|
||||
@SuppressWarnings("unused") // Will be used or removed when the integrated with the videoSink.
|
||||
@RequiresNonNull("outputBuffer")
|
||||
private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs) {
|
||||
checkStateNotNull(
|
||||
outputBuffer.bitmap, "Non-EOS buffer came back from the decoder without bitmap.");
|
||||
imageOutput.onImageAvailable(positionUs - offsetUs, outputBuffer.bitmap);
|
||||
Bitmap outputBitmap =
|
||||
checkNotNull(
|
||||
outputBuffer.bitmap, "Non-EOS buffer came back from the decoder without bitmap.");
|
||||
if (positionUs < outputBuffer.timeUs) {
|
||||
// It's too early to render the buffer.
|
||||
return false;
|
||||
}
|
||||
imageOutput.onImageAvailable(outputBuffer.timeUs - offsetQueue.element(), outputBitmap);
|
||||
checkNotNull(outputBuffer).release();
|
||||
outputBuffer = null;
|
||||
return true;
|
||||
@ -220,9 +289,12 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
/**
|
||||
* @return Whether we can feed more input data to the decoder.
|
||||
*/
|
||||
private boolean feedInputBuffer() throws ExoPlaybackException, ImageDecoderException {
|
||||
private boolean feedInputBuffer() throws ImageDecoderException {
|
||||
FormatHolder formatHolder = getFormatHolder();
|
||||
if (decoder == null || inputStreamEnded) {
|
||||
if (decoder == null
|
||||
|| decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM
|
||||
|| inputStreamEnded) {
|
||||
// We need to reinitialize the decoder or the input stream has ended.
|
||||
return false;
|
||||
}
|
||||
if (inputBuffer == null) {
|
||||
@ -231,6 +303,14 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM_THEN_WAIT) {
|
||||
checkStateNotNull(inputBuffer);
|
||||
inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
|
||||
checkNotNull(decoder).queueInputBuffer(inputBuffer);
|
||||
inputBuffer = null;
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM;
|
||||
return false;
|
||||
}
|
||||
switch (readSource(formatHolder, inputBuffer, /* readFlags= */ 0)) {
|
||||
case C.RESULT_NOTHING_READ:
|
||||
return false;
|
||||
@ -245,26 +325,18 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
inputBuffer = null;
|
||||
return true;
|
||||
case C.RESULT_FORMAT_READ:
|
||||
if (checkNotNull(formatHolder.format).equals(inputFormat)) {
|
||||
return true;
|
||||
}
|
||||
throw createRendererException(
|
||||
new UnsupportedOperationException(
|
||||
"Changing format is not supported in the ImageRenderer."),
|
||||
formatHolder.format,
|
||||
ERROR_CODE_FAILED_RUNTIME_CHECK);
|
||||
inputFormat = checkNotNull(formatHolder.format);
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM_THEN_WAIT;
|
||||
return true;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresNonNull("inputFormat")
|
||||
@EnsuresNonNull("decoder")
|
||||
private void maybeInitDecoder(Format format) throws ExoPlaybackException {
|
||||
if (inputFormat != null && inputFormat.equals(format) && decoder != null) {
|
||||
return;
|
||||
}
|
||||
inputFormat = format;
|
||||
if (canCreateDecoderForFormat(format)) {
|
||||
private void initDecoder() throws ExoPlaybackException {
|
||||
if (canCreateDecoderForFormat(inputFormat)) {
|
||||
if (decoder != null) {
|
||||
decoder.release();
|
||||
}
|
||||
@ -272,7 +344,7 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
} else {
|
||||
throw createRendererException(
|
||||
new ImageDecoderException("Provided decoder factory can't create decoder for format."),
|
||||
format,
|
||||
inputFormat,
|
||||
PlaybackException.ERROR_CODE_DECODING_FORMAT_UNSUPPORTED);
|
||||
}
|
||||
}
|
||||
@ -283,12 +355,17 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
|| supportsFormat == RendererCapabilities.create(C.FORMAT_EXCEEDS_CAPABILITIES);
|
||||
}
|
||||
|
||||
private void releaseResources() {
|
||||
private void lowerFirstFrameState(@C.FirstFrameState int firstFrameState) {
|
||||
this.firstFrameState = min(this.firstFrameState, firstFrameState);
|
||||
}
|
||||
|
||||
private void releaseDecoderResources() {
|
||||
inputBuffer = null;
|
||||
if (outputBuffer != null) {
|
||||
outputBuffer.release();
|
||||
}
|
||||
outputBuffer = null;
|
||||
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
|
||||
if (decoder != null) {
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2020 The Android Open Source Project
|
||||
* Copyright (C) 2023 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@ -30,7 +30,13 @@ import androidx.media3.test.utils.FakeClock;
|
||||
import androidx.media3.test.utils.robolectric.PlaybackOutput;
|
||||
import androidx.media3.test.utils.robolectric.TestPlayerRunHelper;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Collections2;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Sets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.robolectric.ParameterizedRobolectricTestRunner;
|
||||
@ -42,17 +48,23 @@ import org.robolectric.annotation.GraphicsMode;
|
||||
@RunWith(ParameterizedRobolectricTestRunner.class)
|
||||
@GraphicsMode(value = NATIVE)
|
||||
public class ImagePlaybackTest {
|
||||
|
||||
@Parameter public String inputFile;
|
||||
@Parameter public Set<String> inputFiles;
|
||||
|
||||
@Parameters(name = "{0}")
|
||||
public static ImmutableList<String> mediaSamples() {
|
||||
// TODO(b/289989736): When extraction for other types of images is implemented, add those image
|
||||
// types to this list.
|
||||
// Robolectric's NativeShadowBitmapFactory doesn't support decoding HEIF format, so we don't
|
||||
// test that format here.
|
||||
return ImmutableList.of(
|
||||
"png/non-motion-photo-shortened.png", "jpeg/non-motion-photo-shortened.jpg");
|
||||
public static List<Set<String>> mediaSamples() {
|
||||
// Robolectric's ShadowNativeBitmapFactory doesn't support decoding HEIF format, so we don't
|
||||
// test that here.
|
||||
// TODO b/300457060 - Find out why jpegs cause flaky failures in this test and then add jpegs to
|
||||
// this list if possible.
|
||||
return new ArrayList<>(
|
||||
Collections2.filter(
|
||||
Sets.powerSet(
|
||||
ImmutableSet.of(
|
||||
"bitmap/input_images/media3test.png",
|
||||
"bmp/non-motion-photo-shortened-cropped.bmp",
|
||||
"png/non-motion-photo-shortened.png",
|
||||
"webp/ic_launcher_round.webp")),
|
||||
/* predicate= */ input -> !input.isEmpty()));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -64,23 +76,43 @@ public class ImagePlaybackTest {
|
||||
ExoPlayer player =
|
||||
new ExoPlayer.Builder(applicationContext, renderersFactory).setClock(clock).build();
|
||||
PlaybackOutput playbackOutput = PlaybackOutput.register(player, renderersFactory);
|
||||
long durationMs = 5 * C.MILLIS_PER_SECOND;
|
||||
player.setMediaItem(
|
||||
new MediaItem.Builder()
|
||||
.setUri("asset:///media/" + inputFile)
|
||||
.setImageDurationMs(durationMs)
|
||||
.build());
|
||||
List<String> sortedInputFiles = new ArrayList<>(inputFiles);
|
||||
Collections.sort(sortedInputFiles);
|
||||
List<MediaItem> mediaItems = new ArrayList<>(inputFiles.size());
|
||||
long totalDurationMs = 0;
|
||||
long currentDurationMs = 3 * C.MILLIS_PER_SECOND;
|
||||
for (String inputFile : sortedInputFiles) {
|
||||
mediaItems.add(
|
||||
new MediaItem.Builder()
|
||||
.setUri("asset:///media/" + inputFile)
|
||||
.setImageDurationMs(currentDurationMs)
|
||||
.build());
|
||||
totalDurationMs += currentDurationMs;
|
||||
if (currentDurationMs < 5 * C.MILLIS_PER_SECOND) {
|
||||
currentDurationMs += C.MILLIS_PER_SECOND;
|
||||
}
|
||||
}
|
||||
player.setMediaItems(mediaItems);
|
||||
player.prepare();
|
||||
|
||||
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY);
|
||||
long playerStartedMs = clock.elapsedRealtime();
|
||||
player.play();
|
||||
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||
long playbackDurationMs = clock.elapsedRealtime() - playerStartedMs;
|
||||
player.release();
|
||||
|
||||
assertThat(playbackDurationMs).isEqualTo(durationMs);
|
||||
assertThat(playbackDurationMs).isEqualTo(totalDurationMs);
|
||||
DumpFileAsserts.assertOutput(
|
||||
applicationContext, playbackOutput, "playbackdumps/" + inputFile + ".dump");
|
||||
applicationContext,
|
||||
playbackOutput,
|
||||
"playbackdumps/image/" + generateName(sortedInputFiles) + ".dump");
|
||||
}
|
||||
|
||||
private static String generateName(List<String> sortedInputFiles) {
|
||||
StringBuilder name = new StringBuilder();
|
||||
for (String inputFile : sortedInputFiles) {
|
||||
name.append(inputFile, inputFile.lastIndexOf("/") + 1, inputFile.length()).append("+");
|
||||
}
|
||||
name.setLength(name.length() - 1);
|
||||
return name.toString();
|
||||
}
|
||||
}
|
||||
|
@ -20,11 +20,12 @@ import static androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem.o
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.util.Pair;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.Clock;
|
||||
import androidx.media3.common.util.TimedValueQueue;
|
||||
import androidx.media3.common.util.SystemClock;
|
||||
import androidx.media3.exoplayer.RendererConfiguration;
|
||||
import androidx.media3.exoplayer.analytics.PlayerId;
|
||||
import androidx.media3.exoplayer.drm.DrmSessionEventListener;
|
||||
@ -34,34 +35,65 @@ import androidx.media3.exoplayer.upstream.DefaultAllocator;
|
||||
import androidx.media3.test.utils.FakeSampleStream;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Unit test for {@link ImageRenderer}. */
|
||||
/** Unit tests for {@link ImageRenderer}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class ImageRendererTest {
|
||||
|
||||
private static final Format FORMAT =
|
||||
private static final long DEFAULT_LOOP_TIMEOUT_MS = 10 * C.MILLIS_PER_SECOND;
|
||||
private static final String IS_READY_TIMEOUT_MESSAGE =
|
||||
"Renderer not ready after " + DEFAULT_LOOP_TIMEOUT_MS + " milliseconds.";
|
||||
private static final String IS_ENDED_TIMEOUT_MESSAGE =
|
||||
"Renderer not ended after " + DEFAULT_LOOP_TIMEOUT_MS + " milliseconds.";
|
||||
private static final String HAS_READ_STREAM_TO_END_TIMEOUT_MESSAGE =
|
||||
"Renderer has not read stream to end after " + DEFAULT_LOOP_TIMEOUT_MS + " milliseconds.";
|
||||
private static final Format PNG_FORMAT =
|
||||
new Format.Builder()
|
||||
.setContainerMimeType(MimeTypes.IMAGE_PNG)
|
||||
.setTileCountVertical(1)
|
||||
.setTileCountHorizontal(1)
|
||||
.build();
|
||||
private static final Format JPEG_FORMAT =
|
||||
new Format.Builder()
|
||||
.setContainerMimeType(MimeTypes.IMAGE_JPEG)
|
||||
.setTileCountVertical(1)
|
||||
.setTileCountHorizontal(1)
|
||||
.build();
|
||||
|
||||
private final TimedValueQueue<Bitmap> renderedBitmaps = new TimedValueQueue<>();
|
||||
private final Bitmap fakeDecodedBitmap =
|
||||
private final List<Pair<Long, Bitmap>> renderedBitmaps = new ArrayList<>();
|
||||
private final Bitmap fakeDecodedBitmap1 =
|
||||
Bitmap.createBitmap(/* width= */ 1, /* height= */ 1, Bitmap.Config.ARGB_8888);
|
||||
private final Bitmap fakeDecodedBitmap2 =
|
||||
Bitmap.createBitmap(/* width= */ 2, /* height= */ 2, Bitmap.Config.ARGB_8888);
|
||||
|
||||
private ImageRenderer renderer;
|
||||
private int decodeCallCount;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
decodeCallCount = 0;
|
||||
ImageDecoder.Factory fakeDecoderFactory =
|
||||
new DefaultImageDecoder.Factory((data, length) -> fakeDecodedBitmap);
|
||||
ImageOutput capturingImageOutput = renderedBitmaps::add;
|
||||
renderer = new ImageRenderer(fakeDecoderFactory, capturingImageOutput);
|
||||
new DefaultImageDecoder.Factory(
|
||||
(data, length) -> ++decodeCallCount == 1 ? fakeDecodedBitmap1 : fakeDecodedBitmap2);
|
||||
ImageOutput queuingImageOutput =
|
||||
new ImageOutput() {
|
||||
@Override
|
||||
public void onImageAvailable(long presentationTimeUs, Bitmap bitmap) {
|
||||
renderedBitmaps.add(Pair.create(presentationTimeUs, bitmap));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisabled() {
|
||||
// Do nothing.
|
||||
}
|
||||
};
|
||||
renderer = new ImageRenderer(fakeDecoderFactory, queuingImageOutput);
|
||||
renderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
}
|
||||
|
||||
@ -73,22 +105,12 @@ public class ImageRendererTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void renderOneStream_rendersToImageOutput() throws Exception {
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
FORMAT,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME), END_OF_STREAM_ITEM));
|
||||
public void renderOneStream_withMayRenderStartOfStream_rendersToImageOutput() throws Exception {
|
||||
FakeSampleStream fakeSampleStream = createSampleStream(/* timeUs= */ 0);
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
// TODO(b/289989736): When the mediaPeriodId is signalled to the renders set durationUs here and
|
||||
// assert on it.
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {FORMAT},
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
@ -98,15 +120,168 @@ public class ImageRendererTest {
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
renderer.setCurrentStreamFinal();
|
||||
|
||||
while (!renderer.isReady()) {
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
|
||||
assertThat(renderedBitmaps.size()).isEqualTo(1);
|
||||
assertThat(renderedBitmaps.poll(0)).isSameInstanceAs(fakeDecodedBitmap);
|
||||
assertThat(renderedBitmaps).hasSize(1);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0L);
|
||||
assertThat(renderedBitmaps.get(0).second).isSameInstanceAs(fakeDecodedBitmap1);
|
||||
}
|
||||
|
||||
renderer.render(
|
||||
/* positionUs= */ C.MICROS_PER_SECOND, /* elapsedRealtimeUs= */ C.MICROS_PER_SECOND);
|
||||
assertThat(renderer.isEnded()).isTrue();
|
||||
@Test
|
||||
public void renderOneStream_withoutMayRenderStartOfStream_rendersToImageOutput()
|
||||
throws Exception {
|
||||
FakeSampleStream fakeSampleStream = createSampleStream(/* timeUs= */ 0);
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ false,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
renderer.setCurrentStreamFinal();
|
||||
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
assertThat(renderedBitmaps).isEmpty();
|
||||
renderer.start();
|
||||
StopWatch isEndedStopWatch = new StopWatch(IS_ENDED_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isEnded() && isEndedStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.stop();
|
||||
|
||||
assertThat(renderedBitmaps).hasSize(1);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0L);
|
||||
assertThat(renderedBitmaps.get(0).second).isSameInstanceAs(fakeDecodedBitmap1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void renderTwoStreams_sameFormat_rendersToImageOutput() throws Exception {
|
||||
FakeSampleStream fakeSampleStream1 = createSampleStream(/* timeUs= */ 0);
|
||||
fakeSampleStream1.writeData(/* startPositionUs= */ 0);
|
||||
FakeSampleStream fakeSampleStream2 = createSampleStream(/* timeUs= */ 10);
|
||||
fakeSampleStream2.writeData(/* startPositionUs= */ 10);
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream1,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.start();
|
||||
StopWatch hasReadStreamToEndStopWatch = new StopWatch(HAS_READ_STREAM_TO_END_TIMEOUT_MESSAGE);
|
||||
while (!renderer.hasReadStreamToEnd() && hasReadStreamToEndStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.replaceStream(
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream2,
|
||||
/* startPositionUs= */ 10,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
renderer.setCurrentStreamFinal();
|
||||
StopWatch isEndedStopWatch = new StopWatch(IS_ENDED_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isEnded() && isEndedStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 10, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.stop();
|
||||
|
||||
assertThat(renderedBitmaps).hasSize(2);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0L);
|
||||
assertThat(renderedBitmaps.get(0).second).isSameInstanceAs(fakeDecodedBitmap1);
|
||||
assertThat(renderedBitmaps.get(1).first).isEqualTo(10L);
|
||||
assertThat(renderedBitmaps.get(1).second).isSameInstanceAs(fakeDecodedBitmap2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void renderTwoStreams_differentFormat_rendersToImageOutput() throws Exception {
|
||||
FakeSampleStream fakeSampleStream1 = createSampleStream(/* timeUs= */ 0);
|
||||
fakeSampleStream1.writeData(/* startPositionUs= */ 0);
|
||||
FakeSampleStream fakeSampleStream2 = createSampleStream(/* timeUs= */ 10);
|
||||
fakeSampleStream2.writeData(/* startPositionUs= */ 10);
|
||||
renderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {PNG_FORMAT},
|
||||
fakeSampleStream1,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
|
||||
StopWatch isReadyStopWatch = new StopWatch(IS_READY_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isReady() && isReadyStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.start();
|
||||
StopWatch hasReadStreamToEndStopWatch = new StopWatch(HAS_READ_STREAM_TO_END_TIMEOUT_MESSAGE);
|
||||
while (!renderer.hasReadStreamToEnd() && hasReadStreamToEndStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.replaceStream(
|
||||
new Format[] {JPEG_FORMAT},
|
||||
fakeSampleStream2,
|
||||
/* startPositionUs= */ 10,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
renderer.setCurrentStreamFinal();
|
||||
StopWatch isEndedStopWatch = new StopWatch(IS_ENDED_TIMEOUT_MESSAGE);
|
||||
while (!renderer.isEnded() && isEndedStopWatch.ensureNotExpired()) {
|
||||
renderer.render(/* positionUs= */ 10, /* elapsedRealtimeUs= */ 0);
|
||||
}
|
||||
renderer.stop();
|
||||
|
||||
assertThat(renderedBitmaps).hasSize(2);
|
||||
assertThat(renderedBitmaps.get(0).first).isEqualTo(0L);
|
||||
assertThat(renderedBitmaps.get(0).second).isSameInstanceAs(fakeDecodedBitmap1);
|
||||
assertThat(renderedBitmaps.get(1).first).isEqualTo(10L);
|
||||
assertThat(renderedBitmaps.get(1).second).isSameInstanceAs(fakeDecodedBitmap2);
|
||||
}
|
||||
|
||||
private static FakeSampleStream createSampleStream(long timeUs) {
|
||||
return new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
PNG_FORMAT,
|
||||
ImmutableList.of(oneByteSample(timeUs, C.BUFFER_FLAG_KEY_FRAME), END_OF_STREAM_ITEM));
|
||||
}
|
||||
|
||||
private static final class StopWatch {
|
||||
private final long startTimeMs;
|
||||
private final long timeOutMs;
|
||||
private final String timeoutMessage;
|
||||
|
||||
public StopWatch(String timeoutMessage) {
|
||||
startTimeMs = SystemClock.DEFAULT.currentTimeMillis();
|
||||
timeOutMs = DEFAULT_LOOP_TIMEOUT_MS;
|
||||
this.timeoutMessage = timeoutMessage;
|
||||
}
|
||||
|
||||
public boolean ensureNotExpired() throws TimeoutException {
|
||||
if (startTimeMs + timeOutMs < SystemClock.DEFAULT.currentTimeMillis()) {
|
||||
throw new TimeoutException(timeoutMessage);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,5 @@
|
||||
ImageOutput:
|
||||
rendered image count = 1
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,11 @@
|
||||
ImageOutput:
|
||||
rendered image count = 3
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #3:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,14 @@
|
||||
ImageOutput:
|
||||
rendered image count = 4
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #3:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
||||
image output #4:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,11 @@
|
||||
ImageOutput:
|
||||
rendered image count = 3
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #3:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
@ -0,0 +1,11 @@
|
||||
ImageOutput:
|
||||
rendered image count = 3
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
||||
image output #3:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
@ -0,0 +1,5 @@
|
||||
ImageOutput:
|
||||
rendered image count = 1
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -389047680
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,11 @@
|
||||
ImageOutput:
|
||||
rendered image count = 3
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
||||
image output #3:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
@ -0,0 +1,5 @@
|
||||
ImageOutput:
|
||||
rendered image count = 1
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = -1851534335
|
@ -0,0 +1,8 @@
|
||||
ImageOutput:
|
||||
rendered image count = 2
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
||||
image output #2:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 443865884
|
@ -0,0 +1,5 @@
|
||||
ImageOutput:
|
||||
rendered image count = 1
|
||||
image output #1:
|
||||
presentationTimeUs = 0
|
||||
bitmap hash = 1367007828
|
@ -57,6 +57,11 @@ public final class CapturingImageOutput implements Dumpable, ImageOutput {
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisabled() {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dump(Dumper dumper) {
|
||||
dumper.startBlock("ImageOutput");
|
||||
|
Loading…
x
Reference in New Issue
Block a user