Add multi-image input FrameProcessor tests

PiperOrigin-RevId: 510441777
This commit is contained in:
tofunmi 2023-02-17 16:47:17 +00:00 committed by Andrew Lewis
parent bd4a5ecf6a
commit 8aa1638fe4
4 changed files with 164 additions and 20 deletions

View File

@ -0,0 +1,122 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import java.util.concurrent.atomic.AtomicInteger;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for frame queuing and output in {@link DefaultVideoFrameProcessor} given image input. */
@RunWith(AndroidJUnit4.class)
public class DefaultVideoFrameProcessorImageFrameOutputTest {
public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/image_input_with_wrapped_crop.png";
public static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
private @MonotonicNonNull AtomicInteger framesProduced;
@EnsuresNonNull("framesProduced")
@Before
public void setUp() {
framesProduced = new AtomicInteger();
}
@After
public void release() {
checkNotNull(videoFrameProcessorTestRunner).release();
}
@RequiresNonNull("framesProduced")
@Test
public void imageInput_queueThreeBitmaps_outputsAllFrames() throws Exception {
String testId = "imageInput_withThreeBitmaps_outputsAllFrames";
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), C.MICROS_PER_SECOND, /* frameRate= */ 2);
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(WRAPPED_CROP_PNG_ASSET_PATH), 2 * C.MICROS_PER_SECOND, /* frameRate= */ 3);
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH), 3 * C.MICROS_PER_SECOND, /* frameRate= */ 4);
videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
int actualFrameCount = framesProduced.get();
assertThat(actualFrameCount).isEqualTo(/* expected= */ 20);
}
@RequiresNonNull("framesProduced")
@Test
public void imageInput_queueTwentyBitmaps_outputsAllFrames() throws Exception {
String testId = "imageInput_queueTwentyBitmaps_outputsAllFrames";
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
for (int i = 0; i < 20; i++) {
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* frameRate= */ 1);
}
videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
int actualFrameCount = framesProduced.get();
assertThat(actualFrameCount).isEqualTo(/* expected= */ 20);
}
@RequiresNonNull("framesProduced")
@Test
public void imageInput_queueEndAndQueueAgain_outputsFirstSetOfFramesOnly() throws Exception {
String testId = "imageInput_queueEndAndQueueAgain_outputsFirstSetOfFramesOnly";
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* frameRate= */ 2);
videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 2 * C.MICROS_PER_SECOND,
/* frameRate= */ 3);
int actualFrameCount = framesProduced.get();
assertThat(actualFrameCount).isEqualTo(/* expected= */ 2);
}
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory())
.setIsInputTextureExternal(false)
.setOnFrameAvailableListener((unused) -> checkNotNull(framesProduced).incrementAndGet());
}
}

View File

@ -27,6 +27,7 @@ import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.Effect;
@ -109,7 +110,9 @@ public final class DefaultVideoFrameProcessorPixelTest {
getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap);
videoFrameProcessorTestRunner.queueInputBitmap(
expectedBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
Bitmap actualBitmap = videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -134,16 +137,15 @@ public final class DefaultVideoFrameProcessorPixelTest {
Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(originalBitmap);
videoFrameProcessorTestRunner.queueInputBitmap(
originalBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
Bitmap actualBitmap = videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
// TODO(b/262693274): Once texture deletion is added to InternalTextureManager.java, add a test
// queuing multiple input bitmaps to ensure the operation successfully completes and that the
// correct number of frames has been queued.
@Test
public void noEffects_withFrameCache_matchesGoldenFile() throws Exception {

View File

@ -273,14 +273,14 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
assertThat(actualReleaseTimesNs).isEqualTo(releaseTimesNs);
}
private interface OnFrameAvailableListener {
private interface OnOutputFrameAvailableListener {
void onFrameAvailable(long presentationTimeUs);
}
@EnsuresNonNull("defaultVideoFrameProcessor")
private void processFramesToEndOfStream(
long[] inputPresentationTimesUs,
OnFrameAvailableListener onFrameAvailableListener,
OnOutputFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically)
throws Exception {
AtomicReference<@NullableType VideoFrameProcessingException>

View File

@ -27,8 +27,8 @@ import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaFormat;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo;
@ -61,11 +61,13 @@ public final class VideoFrameProcessorTestRunner {
private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull ColorInfo outputColorInfo;
private boolean isInputTextureExternal;
private OnOutputFrameAvailableListener onOutputFrameAvailableListener;
/** Creates a new instance with default values. */
public Builder() {
pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO;
isInputTextureExternal = true;
onOutputFrameAvailableListener = (unused) -> {};
}
/**
@ -94,7 +96,7 @@ public final class VideoFrameProcessorTestRunner {
/**
* Sets the input video asset path.
*
* <p>This is a required value.
* <p>No default value is set. Must be set when the input is a video file.
*/
@CanIgnoreReturnValue
public Builder setVideoAssetPath(String videoAssetPath) {
@ -170,7 +172,8 @@ public final class VideoFrameProcessorTestRunner {
return this;
}
/**
* Sets the input track type. See {@link VideoFrameProcessor.Factory#create}.
* Sets whether input comes from an external texture. See {@link
* VideoFrameProcessor.Factory#create}.
*
* <p>The default value is {@code true}.
*/
@ -180,10 +183,21 @@ public final class VideoFrameProcessorTestRunner {
return this;
}
/**
* Sets the method to be called in {@link VideoFrameProcessor.Listener#onOutputFrameAvailable}.
*
* <p>The default value is a no-op.
*/
@CanIgnoreReturnValue
public Builder setOnFrameAvailableListener(
OnOutputFrameAvailableListener onOutputFrameAvailableListener) {
this.onOutputFrameAvailableListener = onOutputFrameAvailableListener;
return this;
}
public VideoFrameProcessorTestRunner build() throws VideoFrameProcessingException {
checkStateNotNull(testId, "testId must be set.");
checkStateNotNull(videoFrameProcessorFactory, "videoFrameProcessorFactory must be set.");
checkStateNotNull(videoAssetPath, "videoAssetPath must be set.");
return new VideoFrameProcessorTestRunner(
testId,
@ -194,7 +208,8 @@ public final class VideoFrameProcessorTestRunner {
pixelWidthHeightRatio,
inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo,
outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo,
isInputTextureExternal);
isInputTextureExternal,
onOutputFrameAvailableListener);
}
}
@ -205,7 +220,7 @@ public final class VideoFrameProcessorTestRunner {
private static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5000;
private final String testId;
private final String videoAssetPath;
private final @MonotonicNonNull String videoAssetPath;
private final String outputFileLabel;
private final float pixelWidthHeightRatio;
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
@ -218,13 +233,14 @@ public final class VideoFrameProcessorTestRunner {
private VideoFrameProcessorTestRunner(
String testId,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
String videoAssetPath,
@Nullable String videoAssetPath,
String outputFileLabel,
ImmutableList<Effect> effects,
float pixelWidthHeightRatio,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean isInputTextureExternal)
boolean isInputTextureExternal,
OnOutputFrameAvailableListener onOutputFrameAvailableListener)
throws VideoFrameProcessingException {
this.testId = testId;
this.videoAssetPath = videoAssetPath;
@ -256,6 +272,7 @@ public final class VideoFrameProcessorTestRunner {
@Override
public void onOutputFrameAvailable(long presentationTimeUs) {
// Do nothing as frames are released automatically.
onOutputFrameAvailableListener.onFrameAvailable(presentationTimeUs);
}
@Override
@ -272,7 +289,7 @@ public final class VideoFrameProcessorTestRunner {
public Bitmap processFirstFrameAndEnd() throws Exception {
DecodeOneFrameUtil.decodeOneAssetFileFrame(
videoAssetPath,
checkNotNull(videoAssetPath),
new DecodeOneFrameUtil.Listener() {
@Override
public void onContainerExtracted(MediaFormat mediaFormat) {
@ -294,16 +311,15 @@ public final class VideoFrameProcessorTestRunner {
return endFrameProcessingAndGetImage();
}
public Bitmap processImageFrameAndEnd(Bitmap inputBitmap) throws Exception {
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
videoFrameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
return endFrameProcessingAndGetImage();
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
private Bitmap endFrameProcessingAndGetImage() throws Exception {
public Bitmap endFrameProcessingAndGetImage() throws Exception {
videoFrameProcessor.signalEndOfInput();
Thread.sleep(VIDEO_FRAME_PROCESSING_WAIT_MS);
@ -322,4 +338,8 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.release();
}
}
public interface OnOutputFrameAvailableListener {
void onFrameAvailable(long presentationTimeUs);
}
}