Use ExoPlayer in DecodeOneFrameUtil

The old code that uses MediaCodec directly has a race condition
that causes the decoder to incorrectly crop the decoded picture.

PiperOrigin-RevId: 690620868
This commit is contained in:
dancho 2024-10-28 08:28:30 -07:00 committed by Copybara-Service
parent 9e088ac2b8
commit fe14525a97
2 changed files with 88 additions and 252 deletions

View File

@ -17,34 +17,31 @@
package androidx.media3.test.utils;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.MediaFormatUtil.createMediaFormatFromFormat;
import static androidx.media3.test.utils.TestUtil.buildAssetUri;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import static java.lang.Math.round;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.MediaFormatUtil;
import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import java.io.IOException;
import java.nio.ByteBuffer;
import androidx.media3.exoplayer.DecoderReuseEvaluation;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.analytics.AnalyticsListener;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
/** Utilities for decoding a video frame for tests. */
@UnstableApi
public final class DecodeOneFrameUtil {
public static final String NO_DECODER_SUPPORT_ERROR_STRING =
"No MediaCodec decoders on this device support this value.";
/** Listener for decoding events. */
public interface Listener {
/** Called when the video {@link MediaFormat} is extracted from the container. */
@ -57,174 +54,86 @@ public final class DecodeOneFrameUtil {
void onFrameDecoded(MediaFormat mediaFormat);
}
/** Timeout for dequeueing buffers from the codec, in microseconds. */
private static final int DEQUEUE_TIMEOUT_US = 5_000_000;
/** Timeout for reading, decoding and rendering a video frame, in milliseconds. */
private static final int TIMEOUT_MS = 5_000;
/**
* Reads and decodes one frame from the {@code assetFilePath} and renders it to the {@code
* surface}.
* Reads and decodes one frame synchronously from the {@code assetFilePath} and renders it to the
* {@code surface}.
*
* <p>This method blocks until the frame has been rendered to the {@code surface}.
*
* @param assetFilePath The path to the file in the asset directory.
* @param listener A {@link Listener} implementation.
* @param surface The {@link Surface} to render the decoded frame to, {@code null} if the decoded
* frame is not needed.
* @throws IOException If the {@link MediaExtractor} or {@link MediaCodec} cannot be created.
*/
public static void decodeOneAssetFileFrame(
String assetFilePath, Listener listener, @Nullable Surface surface) throws IOException {
MediaExtractor mediaExtractor = new MediaExtractor();
Context context = getApplicationContext();
try (AssetFileDescriptor afd = context.getAssets().openFd(assetFilePath)) {
mediaExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
if (surface == null) {
decodeOneVideoFrame(mediaExtractor, listener);
} else {
decodeOneVideoFrame(mediaExtractor, listener, surface);
}
} finally {
mediaExtractor.release();
}
}
/**
* Reads and decodes one video frame from the {@code mediaExtractor} and renders it to the {@code
* surface}.
*
* <p>A placeholder surface is used.
*
* @param mediaExtractor The {@link MediaExtractor} with a {@link
* MediaExtractor#setDataSource(String) data source set}.
* @param listener A {@link Listener} implementation.
* @throws UnsupportedOperationException If there is no supported {@linkplain MediaCodec decoders}
* available.
* @throws IOException If the {@link MediaExtractor} or {@link MediaCodec} cannot be created.
*/
private static void decodeOneVideoFrame(MediaExtractor mediaExtractor, Listener listener)
throws IOException {
@Nullable SurfaceTexture placeholderSurfaceTexture = null;
@Nullable Surface placeholderSurface = null;
try {
placeholderSurfaceTexture = new SurfaceTexture(/* texName= */ 0);
placeholderSurface = new Surface(placeholderSurfaceTexture);
decodeOneVideoFrame(mediaExtractor, listener, placeholderSurface);
} finally {
if (placeholderSurfaceTexture != null) {
placeholderSurfaceTexture.release();
}
if (placeholderSurface != null) {
placeholderSurface.release();
}
}
}
/**
* Reads and decodes one video frame from the {@code mediaExtractor} and renders it to the {@code
* surface}.
*
* @param mediaExtractor The {@link MediaExtractor} with a {@link
* MediaExtractor#setDataSource(String) data source set}.
* @param listener A {@link Listener} implementation.
* @param surface The {@link Surface} to render the decoded frame to.
* @throws IOException If the {@link MediaCodec} cannot be created.
* @throws UnsupportedOperationException If there is no supported {@linkplain MediaCodec decoders}
* available.
*/
private static void decodeOneVideoFrame(
MediaExtractor mediaExtractor, Listener listener, Surface surface) throws IOException {
@Nullable MediaFormat mediaFormat = null;
@Nullable MediaCodec mediaCodec = null;
@SuppressWarnings("CatchingUnchecked")
public static void decodeOneAssetFileFrame(
String assetFilePath, Listener listener, Surface surface) throws Exception {
Context context = getApplicationContext();
AtomicReference<@NullableType Exception> unexpectedExceptionReference = new AtomicReference<>();
AtomicReference<@NullableType PlaybackException> playbackExceptionReference =
new AtomicReference<>();
ConditionVariable firstFrameRenderedOrError = new ConditionVariable();
try {
for (int i = 0; i < mediaExtractor.getTrackCount(); i++) {
if (MimeTypes.isVideo(mediaExtractor.getTrackFormat(i).getString(MediaFormat.KEY_MIME))) {
mediaFormat = mediaExtractor.getTrackFormat(i);
listener.onContainerExtracted(checkNotNull(mediaFormat));
mediaExtractor.selectTrack(i);
break;
}
}
ExoPlayer exoPlayer = new ExoPlayer.Builder(context).build();
Handler handler = new Handler(exoPlayer.getApplicationLooper());
AnalyticsListener analyticsListener =
new AnalyticsListener() {
@Override
public void onVideoInputFormatChanged(
EventTime eventTime,
Format format,
@Nullable DecoderReuseEvaluation decoderReuseEvaluation) {
listener.onContainerExtracted(createMediaFormatFromFormat(format));
}
@Nullable String decoderName = getSupportedDecoderName(checkStateNotNull(mediaFormat));
if (decoderName == null) {
throw new UnsupportedOperationException(NO_DECODER_SUPPORT_ERROR_STRING);
}
mediaCodec = MediaCodec.createByCodecName(decoderName);
@Override
public void onRenderedFirstFrame(EventTime eventTime, Object output, long renderTimeMs) {
listener.onFrameDecoded(
createMediaFormatFromFormat(checkNotNull(exoPlayer.getVideoFormat())));
firstFrameRenderedOrError.open();
}
// Queue the first video frame from the extractor.
mediaCodec.configure(mediaFormat, surface, /* crypto= */ null, /* flags= */ 0);
mediaCodec.start();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
ByteBuffer inputBuffer = checkNotNull(mediaCodec.getInputBuffers()[inputBufferIndex]);
int sampleSize = mediaExtractor.readSampleData(inputBuffer, /* offset= */ 0);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
sampleSize,
mediaExtractor.getSampleTime(),
mediaExtractor.getSampleFlags());
@Override
public void onEvents(Player player, Events events) {
if (events.contains(EVENT_PLAYER_ERROR)) {
playbackExceptionReference.set(checkNotNull(player.getPlayerError()));
firstFrameRenderedOrError.open();
}
}
};
// Queue an end-of-stream buffer to force the codec to produce output.
inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
/* size= */ 0,
/* presentationTimeUs= */ 0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
handler.post(
() -> {
try {
exoPlayer.setVideoSurface(surface);
exoPlayer.addAnalyticsListener(analyticsListener);
exoPlayer.setMediaItem(MediaItem.fromUri(buildAssetUri(assetFilePath)));
exoPlayer.setPlayWhenReady(false);
exoPlayer.prepare();
// Catch all exceptions to report. Exceptions thrown here and not caught will not
// propagate.
} catch (Exception e) {
unexpectedExceptionReference.set(e);
firstFrameRenderedOrError.open();
}
});
// Dequeue and render the output video frame.
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex;
boolean decoderFormatRead = false;
do {
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, DEQUEUE_TIMEOUT_US);
if (!decoderFormatRead && outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
listener.onFrameDecoded(mediaCodec.getOutputFormat());
decoderFormatRead = true;
}
assertThat(outputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
} while (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED
|| outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED);
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ true);
} finally {
if (mediaCodec != null) {
mediaCodec.release();
}
if (!firstFrameRenderedOrError.block(TIMEOUT_MS)) {
throw new TimeoutException(
"DecodeOneFrameUtil timed out after " + TIMEOUT_MS + " milliseconds.");
}
}
/**
* Returns the name of a decoder that supports this {@link MediaFormat}.
*
* <p>Capability check is similar to
* androidx.media3.transformer.EncoderUtil.java#findCodecForFormat().
*/
@Nullable
private static String getSupportedDecoderName(MediaFormat format) {
// TODO(b/266923205): De-duplicate logic from EncoderUtil.java#findCodecForFormat().
MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
// Format must not include KEY_FRAME_RATE on API21.
// https://developer.android.com/reference/android/media/MediaCodecList#findDecoderForFormat(android.media.MediaFormat)
float frameRate = Format.NO_VALUE;
if (Util.SDK_INT == 21 && format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
try {
frameRate = format.getFloat(MediaFormat.KEY_FRAME_RATE);
} catch (ClassCastException e) {
frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
}
// Clears the frame rate field.
format.setString(MediaFormat.KEY_FRAME_RATE, null);
handler.post(exoPlayer::release);
@Nullable PlaybackException playbackException = playbackExceptionReference.get();
if (playbackException != null) {
throw playbackException;
}
@Nullable String mediaCodecName = mediaCodecList.findDecoderForFormat(format);
if (Util.SDK_INT == 21) {
MediaFormatUtil.maybeSetInteger(format, MediaFormat.KEY_FRAME_RATE, round(frameRate));
@Nullable Exception unexpectedException = unexpectedExceptionReference.get();
if (unexpectedException != null) {
throw new IllegalStateException(
"Unexpected exception starting the player.", unexpectedException);
}
return mediaCodecName;
}
private DecodeOneFrameUtil() {}

View File

@ -19,10 +19,8 @@ import static androidx.media3.common.MimeTypes.VIDEO_H265;
import static androidx.media3.effect.DefaultVideoFrameProcessor.WORKING_COLOR_SPACE_ORIGINAL;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
import static androidx.media3.transformer.mh.UnoptimizedGlEffect.NO_OP_EFFECT;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap;
@ -32,7 +30,6 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.util.Util;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.test.utils.DecodeOneFrameUtil;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.base.Ascii;
@ -146,22 +143,8 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TONE_MAP_HLG_TO_SDR_PNG_ASSET_PATH);
Bitmap actualBitmap;
try {
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
recordTestSkipped(
getApplicationContext(),
testId,
/* reason= */ DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING);
return;
} else {
throw e;
}
}
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
Log.i(TAG, "Successfully tone mapped.");
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
@ -182,22 +165,8 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TONE_MAP_HLG_TO_SDR_PNG_ASSET_PATH);
Bitmap actualBitmap;
try {
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
recordTestSkipped(
getApplicationContext(),
testId,
/* reason= */ DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING);
return;
} else {
throw e;
}
}
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
Log.i(TAG, "Successfully tone mapped.");
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
@ -218,22 +187,8 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TONE_MAP_PQ_TO_SDR_PNG_ASSET_PATH);
Bitmap actualBitmap;
try {
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
recordTestSkipped(
getApplicationContext(),
testId,
/* reason= */ DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING);
return;
} else {
throw e;
}
}
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
Log.i(TAG, "Successfully tone mapped.");
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
@ -255,22 +210,8 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TONE_MAP_PQ_TO_SDR_PNG_ASSET_PATH);
Bitmap actualBitmap;
try {
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
recordTestSkipped(
getApplicationContext(),
testId,
/* reason= */ DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING);
return;
} else {
throw e;
}
}
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
Log.i(TAG, "Successfully tone mapped.");
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
@ -295,22 +236,8 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TONE_MAP_HLG_TO_SDR_PNG_ASSET_PATH);
Bitmap actualBitmap;
try {
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
recordTestSkipped(
getApplicationContext(),
testId,
/* reason= */ DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING);
return;
} else {
throw e;
}
}
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
Log.i(TAG, "Successfully tone mapped.");
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.