Split decoding out of the test

This method is useful for other tests also.

PiperOrigin-RevId: 473574282
This commit is contained in:
claincly 2022-09-11 13:01:26 +00:00 committed by Marc Baechinger
parent 5c78444bf8
commit 7f42e19f09
2 changed files with 189 additions and 108 deletions

View File

@ -0,0 +1,132 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.view.Surface;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MimeTypes;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.Nullable;
/** Utilities for instrumentation tests for {@link FrameProcessor}. */
public class FrameProcessorTestUtil {
/** Listener for decoding events. */
interface Listener {
/** Called when the video {@link MediaFormat} is extracted from the container. */
void onVideoMediaFormatExtracted(MediaFormat mediaFormat);
/** Called when the video {@link MediaFormat} is read by the decoder from the byte stream. */
void onVideoMediaFormatRead(MediaFormat mediaFormat);
}
/** Timeout for dequeueing buffers from the codec, in microseconds. */
private static final int DEQUEUE_TIMEOUT_US = 5_000_000;
/**
* Decodes one frame from the {@code assetFilePath} and renders it to the {@code surface}.
*
* @param assetFilePath The path to the file in the asset directory.
* @param listener A {@link Listener} implementation.
* @param surface The {@link Surface} to render the decoded frame to, {@code null} if the decoded
* frame is not needed.
*/
public static void decodeOneFrame(
String assetFilePath, Listener listener, @Nullable Surface surface) throws Exception {
// Set up the extractor to read the first video frame and get its format.
if (surface == null) {
// Creates a placeholder surface.
surface = new Surface(new SurfaceTexture(/* texName= */ 0));
}
MediaExtractor mediaExtractor = new MediaExtractor();
@Nullable MediaCodec mediaCodec = null;
@Nullable MediaFormat mediaFormat = null;
Context context = getApplicationContext();
try (AssetFileDescriptor afd = context.getAssets().openFd(assetFilePath)) {
mediaExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
for (int i = 0; i < mediaExtractor.getTrackCount(); i++) {
if (MimeTypes.isVideo(mediaExtractor.getTrackFormat(i).getString(MediaFormat.KEY_MIME))) {
mediaFormat = mediaExtractor.getTrackFormat(i);
listener.onVideoMediaFormatExtracted(checkNotNull(mediaFormat));
mediaExtractor.selectTrack(i);
break;
}
}
checkStateNotNull(mediaFormat);
// Queue the first video frame from the extractor.
String mimeType = checkNotNull(mediaFormat.getString(MediaFormat.KEY_MIME));
mediaCodec = MediaCodec.createDecoderByType(mimeType);
mediaCodec.configure(mediaFormat, surface, /* crypto= */ null, /* flags= */ 0);
mediaCodec.start();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
ByteBuffer inputBuffer = checkNotNull(mediaCodec.getInputBuffers()[inputBufferIndex]);
int sampleSize = mediaExtractor.readSampleData(inputBuffer, /* offset= */ 0);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
sampleSize,
mediaExtractor.getSampleTime(),
mediaExtractor.getSampleFlags());
// Queue an end-of-stream buffer to force the codec to produce output.
inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
/* size= */ 0,
/* presentationTimeUs= */ 0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// Dequeue and render the output video frame.
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex;
boolean decoderFormatRead = false;
do {
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, DEQUEUE_TIMEOUT_US);
if (!decoderFormatRead && outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
listener.onVideoMediaFormatRead(mediaCodec.getOutputFormat());
decoderFormatRead = true;
}
assertThat(outputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
} while (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED
|| outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED);
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ true);
} finally {
mediaExtractor.release();
if (mediaCodec != null) {
mediaCodec.release();
}
}
}
private FrameProcessorTestUtil() {}
}

View File

@ -18,33 +18,28 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.effect.BitmapTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE; import static androidx.media3.effect.BitmapTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
import static androidx.media3.effect.FrameProcessorTestUtil.decodeOneFrame;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext; import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import android.content.Context; import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Matrix; import android.graphics.Matrix;
import android.graphics.PixelFormat; import android.graphics.PixelFormat;
import android.media.Image; import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -87,8 +82,6 @@ public final class GlEffectsFrameProcessorPixelTest {
/** Input video of which we only use the first frame. */ /** Input video of which we only use the first frame. */
private static final String INPUT_MP4_ASSET_STRING = "media/mp4/sample.mp4"; private static final String INPUT_MP4_ASSET_STRING = "media/mp4/sample.mp4";
/** Timeout for dequeueing buffers from the codec, in microseconds. */
private static final int DEQUEUE_TIMEOUT_US = 5_000_000;
/** /**
* Time to wait for the decoded frame to populate the {@link GlEffectsFrameProcessor} instance's * Time to wait for the decoded frame to populate the {@link GlEffectsFrameProcessor} instance's
* input surface and the {@link GlEffectsFrameProcessor} to finish processing the frame, in * input surface and the {@link GlEffectsFrameProcessor} to finish processing the frame, in
@ -101,7 +94,6 @@ public final class GlEffectsFrameProcessorPixelTest {
private final AtomicReference<FrameProcessingException> frameProcessingException = private final AtomicReference<FrameProcessingException> frameProcessingException =
new AtomicReference<>(); new AtomicReference<>();
private @MonotonicNonNull MediaFormat mediaFormat;
private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor; private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor;
private volatile @MonotonicNonNull ImageReader outputImageReader; private volatile @MonotonicNonNull ImageReader outputImageReader;
private volatile boolean frameProcessingEnded; private volatile boolean frameProcessingEnded;
@ -422,9 +414,11 @@ public final class GlEffectsFrameProcessorPixelTest {
// GlEffectWrapper to ensure usage of intermediate textures. // GlEffectWrapper to ensure usage of intermediate textures.
/** /**
* Set up and prepare the first frame from an input video, as well as relevant test * Sets up and prepares the first frame from an input video, as well as the relevant test
* infrastructure. The frame will be sent towards the {@link GlEffectsFrameProcessor}, and output * infrastructure.
* may be accessed on the {@code outputImageReader}. *
* <p>The frame will be sent towards {@link #glEffectsFrameProcessor}, and output may be accessed
* on the {@code outputImageReader}.
* *
* @param pixelWidthHeightRatio The ratio of width over height for each pixel. * @param pixelWidthHeightRatio The ratio of width over height for each pixel.
* @param effects The {@link GlEffect GlEffects} to apply to the input frame. * @param effects The {@link GlEffect GlEffects} to apply to the input frame.
@ -436,106 +430,61 @@ public final class GlEffectsFrameProcessorPixelTest {
private void setUpAndPrepareFirstFrame(float pixelWidthHeightRatio, List<Effect> effects) private void setUpAndPrepareFirstFrame(float pixelWidthHeightRatio, List<Effect> effects)
throws Exception { throws Exception {
// Set up the extractor to read the first video frame and get its format. glEffectsFrameProcessor =
MediaExtractor mediaExtractor = new MediaExtractor(); checkNotNull(
@Nullable MediaCodec mediaCodec = null; new GlEffectsFrameProcessor.Factory()
Context context = getApplicationContext(); .create(
try (AssetFileDescriptor afd = context.getAssets().openFd(INPUT_MP4_ASSET_STRING)) { getApplicationContext(),
mediaExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); new FrameProcessor.Listener() {
for (int i = 0; i < mediaExtractor.getTrackCount(); i++) { @Override
if (MimeTypes.isVideo(mediaExtractor.getTrackFormat(i).getString(MediaFormat.KEY_MIME))) { public void onOutputSizeChanged(int width, int height) {
mediaFormat = mediaExtractor.getTrackFormat(i); outputImageReader =
mediaExtractor.selectTrack(i); ImageReader.newInstance(
break; width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
} checkNotNull(glEffectsFrameProcessor)
} .setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
}
int inputWidth = checkNotNull(mediaFormat).getInteger(MediaFormat.KEY_WIDTH); @Override
int inputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT); public void onOutputFrameAvailable(long presentationTimeUs) {
glEffectsFrameProcessor = // Do nothing as frames are released automatically.
checkNotNull( }
new GlEffectsFrameProcessor.Factory()
.create(
context,
new FrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
outputImageReader =
ImageReader.newInstance(
width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
checkNotNull(glEffectsFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
}
@Override @Override
public void onOutputFrameAvailable(long presentationTimeUs) { public void onFrameProcessingError(FrameProcessingException exception) {
// Do nothing as frames are released automatically. frameProcessingException.set(exception);
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onFrameProcessingEnded() {
frameProcessingException.set(exception); frameProcessingEnded = true;
} }
},
effects,
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED,
/* releaseFramesAutomatically= */ true));
decodeOneFrame(
INPUT_MP4_ASSET_STRING,
new FrameProcessorTestUtil.Listener() {
@Override
public void onVideoMediaFormatExtracted(MediaFormat mediaFormat) {
glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(
mediaFormat.getInteger(MediaFormat.KEY_WIDTH),
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT),
pixelWidthHeightRatio,
/* streamOffsetUs= */ 0));
glEffectsFrameProcessor.registerInputFrame();
}
@Override @Override
public void onFrameProcessingEnded() { public void onVideoMediaFormatRead(MediaFormat mediaFormat) {
frameProcessingEnded = true; // Do nothing.
} }
}, },
effects, glEffectsFrameProcessor.getInputSurface());
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED,
/* releaseFramesAutomatically= */ true));
glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio, /* streamOffsetUs= */ 0));
glEffectsFrameProcessor.registerInputFrame();
// Queue the first video frame from the extractor.
String mimeType = checkNotNull(mediaFormat.getString(MediaFormat.KEY_MIME));
mediaCodec = MediaCodec.createDecoderByType(mimeType);
mediaCodec.configure(
mediaFormat,
glEffectsFrameProcessor.getInputSurface(),
/* crypto= */ null,
/* flags= */ 0);
mediaCodec.start();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
ByteBuffer inputBuffer = checkNotNull(mediaCodec.getInputBuffers()[inputBufferIndex]);
int sampleSize = mediaExtractor.readSampleData(inputBuffer, /* offset= */ 0);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
sampleSize,
mediaExtractor.getSampleTime(),
mediaExtractor.getSampleFlags());
// Queue an end-of-stream buffer to force the codec to produce output.
inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
mediaCodec.queueInputBuffer(
inputBufferIndex,
/* offset= */ 0,
/* size= */ 0,
/* presentationTimeUs= */ 0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// Dequeue and render the output video frame.
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex;
do {
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, DEQUEUE_TIMEOUT_US);
assertThat(outputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
} while (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED
|| outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED);
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ true);
} finally {
mediaExtractor.release();
if (mediaCodec != null) {
mediaCodec.release();
}
}
} }
private Bitmap processFirstFrameAndEnd() throws InterruptedException { private Bitmap processFirstFrameAndEnd() throws InterruptedException {