Remove setInputFrameInfo

After this change, every queued bitmap is treated as an individual input stream
(like a new MediaItems).

This change merges the FrameDropTest and FrameDropPixelTest into one (while maintaining all the test cases)

- This is accomplished by generating bitmaps with timestamps on it in FrameDropTest and compare them with goldens (one may call this a pixel test, please lmk if you want this to be renamed)
- The most part of the change comes from DefaultVideoFrameProcessorVideoFrameRenderingTest. The overall working is
  - We bypass the input manager
  - The TestFrameGenerator generates frames based on timestamps. In this case, we generate frames with timestamps on it
  - The generated frame is sent to texture output and in turn saved to bitmaps
  - We then compare the generated bitmap with the goldens

PiperOrigin-RevId: 551795770
This commit is contained in:
claincly 2023-07-28 10:52:28 +01:00 committed by Rohit Singh
parent ebbcec9a20
commit c221958889
25 changed files with 371 additions and 382 deletions

View File

@ -168,8 +168,8 @@ public interface VideoFrameProcessor {
/** /**
* Provides an input texture ID to the {@code VideoFrameProcessor}. * Provides an input texture ID to the {@code VideoFrameProcessor}.
* *
* <p>It must be called after the {@link #setOnInputFrameProcessedListener * <p>It must be only called after {@link #setOnInputFrameProcessedListener} and {@link
* onInputFrameProcessedListener} and the {@link #setInputFrameInfo frameInfo} have been set. * #registerInputStream} have been called.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
@ -191,6 +191,10 @@ public interface VideoFrameProcessor {
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames * Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from. * from.
* *
* <p>The frames arriving on the {@link Surface} will not be consumed by the {@code
* VideoFrameProcessor} until {@link #registerInputStream} is called with {@link
* #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept * @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
@ -202,27 +206,11 @@ public interface VideoFrameProcessor {
* Informs the {@code VideoFrameProcessor} that a new input stream will be queued with the list of * Informs the {@code VideoFrameProcessor} that a new input stream will be queued with the list of
* {@link Effect Effects} to apply to the new input stream. * {@link Effect Effects} to apply to the new input stream.
* *
* <p>Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input
* stream differs from that of the current input stream.
*
* @param inputType The {@link InputType} of the new input stream. * @param inputType The {@link InputType} of the new input stream.
* @param effects The list of {@link Effect effects} to apply to the new input stream. * @param effects The list of {@link Effect effects} to apply to the new input stream.
* @param frameInfo The {@link FrameInfo} of the new input stream.
*/ */
// TODO(b/286032822): Merge this and setInputFrameInfo. void registerInputStream(@InputType int inputType, List<Effect> effects, FrameInfo frameInfo);
void registerInputStream(@InputType int inputType, List<Effect> effects);
/**
* Sets information about the input frames.
*
* <p>The new input information is applied from the next frame {@linkplain #registerInputFrame()
* registered} or {@linkplain #queueInputTexture} queued} onwards.
*
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.
*
* <p>Can be called on any thread.
*/
void setInputFrameInfo(FrameInfo inputFrameInfo);
/** /**
* Informs the {@code VideoFrameProcessor} that a frame will be queued to its {@linkplain * Informs the {@code VideoFrameProcessor} that a frame will be queued to its {@linkplain
@ -235,7 +223,7 @@ public interface VideoFrameProcessor {
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept * @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}. * {@linkplain #INPUT_TYPE_SURFACE surface input}.
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
* #setInputFrameInfo(FrameInfo)}. * #registerInputStream}.
*/ */
void registerInputFrame(); void registerInputFrame();

View File

@ -0,0 +1,97 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import androidx.media3.common.C;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import java.util.List;
import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Produces blank frames with the given timestamps. */
/* package */ final class BlankFrameProducer implements GlShaderProgram {
private final int width;
private final int height;
private @MonotonicNonNull GlTextureInfo blankTexture;
private @MonotonicNonNull OutputListener outputListener;
public BlankFrameProducer(int width, int height) {
this.width = width;
this.height = height;
}
public void configureGlObjects() throws VideoFrameProcessingException {
try {
int texId = GlUtil.createTexture(width, height, /* useHighPrecisionColorComponents= */ false);
int fboId = GlUtil.createFboForTexture(texId);
blankTexture = new GlTextureInfo(texId, fboId, /* rboId= */ C.INDEX_UNSET, width, height);
GlUtil.focusFramebufferUsingCurrentContext(fboId, width, height);
GlUtil.clearFocusedBuffers();
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
}
public void produceBlankFrames(List<Long> presentationTimesUs) {
checkNotNull(outputListener);
for (long presentationTimeUs : presentationTimesUs) {
outputListener.onOutputFrameAvailable(checkNotNull(blankTexture), presentationTimeUs);
}
}
@Override
public void setInputListener(InputListener inputListener) {}
@Override
public void setOutputListener(OutputListener outputListener) {
this.outputListener = outputListener;
}
@Override
public void setErrorListener(Executor executor, ErrorListener errorListener) {}
@Override
public void queueInputFrame(
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
// No input is queued in these tests. The BlankFrameProducer is used to produce frames.
throw new UnsupportedOperationException();
}
@Override
public void releaseOutputFrame(GlTextureInfo outputTexture) {}
@Override
public void signalEndOfCurrentInputStream() {
checkNotNull(outputListener).onCurrentOutputStreamEnded();
}
@Override
public void flush() {
throw new UnsupportedOperationException();
}
@Override
public void release() {
// Do nothing as destroying the OpenGL context destroys the texture.
}
}

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
@ -195,7 +194,6 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
return new VideoFrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build()) .setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setOnOutputFrameAvailableForRenderingListener( .setOnOutputFrameAvailableForRenderingListener(
unused -> checkNotNull(framesProduced).incrementAndGet()); unused -> checkNotNull(framesProduced).incrementAndGet());

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE; import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
@ -158,7 +157,6 @@ public final class DefaultVideoFrameProcessorPixelTest {
String testId = "noEffects_withImageInput_matchesGoldenFile"; String testId = "noEffects_withImageInput_matchesGoldenFile";
videoFrameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.build(); .build();
Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
@ -180,7 +178,6 @@ public final class DefaultVideoFrameProcessorPixelTest {
String testId = "wrappedCrop_withImageInput_matchesGoldenFile"; String testId = "wrappedCrop_withImageInput_matchesGoldenFile";
videoFrameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setEffects( .setEffects(
new GlEffectWrapper( new GlEffectWrapper(
@ -214,7 +211,6 @@ public final class DefaultVideoFrameProcessorPixelTest {
new DefaultVideoFrameProcessor.Factory.Builder() new DefaultVideoFrameProcessor.Factory.Builder()
.setEnableColorTransfers(false) .setEnableColorTransfers(false)
.build()) .build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setEffects(NO_OP_EFFECT) .setEffects(NO_OP_EFFECT)
.build(); .build();

View File

@ -350,8 +350,8 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
checkNotNull(defaultVideoFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.registerInputStream( .registerInputStream(
INPUT_TYPE_SURFACE, INPUT_TYPE_SURFACE,
/* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer)); /* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
defaultVideoFrameProcessor.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build()); new FrameInfo.Builder(WIDTH, HEIGHT).build());
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs); blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput(); defaultVideoFrameProcessor.signalEndOfInput();
videoFrameProcessingEndedCountDownLatch.await(); videoFrameProcessingEndedCountDownLatch.await();

View File

@ -5,7 +5,7 @@
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * https://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
@ -15,18 +15,38 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP; import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import androidx.media3.common.C; import android.graphics.Bitmap;
import android.graphics.Color;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.style.AbsoluteSizeSpan;
import android.text.style.ForegroundColorSpan;
import android.text.style.TypefaceSpan;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.Util;
import androidx.media3.test.utils.TextureBitmapReader;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.Queue; import com.google.common.util.concurrent.MoreExecutors;
import java.util.concurrent.ConcurrentLinkedQueue; import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -38,108 +58,184 @@ import org.junit.runner.RunWith;
/** Tests for {@link FrameDropEffect}. */ /** Tests for {@link FrameDropEffect}. */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class FrameDropTest { public class FrameDropTest {
private static final String ORIGINAL_PNG_ASSET_PATH = private static final String ASSET_PATH = "media/bitmap/FrameDropTest";
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; private static final int BLANK_FRAME_WIDTH = 100;
private static final int BLANK_FRAME_HEIGHT = 50;
private static final String SCALE_WIDE_PNG_ASSET_PATH = private @MonotonicNonNull TextureBitmapReader textureBitmapReader;
"media/bitmap/sample_mp4_first_frame/electrical_colors/scale_wide.png"; private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner; @EnsuresNonNull("textureBitmapReader")
private @MonotonicNonNull Queue<Long> actualPresentationTimesUs;
@EnsuresNonNull("actualPresentationTimesUs")
@Before @Before
public void setUp() { public void setUp() {
actualPresentationTimesUs = new ConcurrentLinkedQueue<>(); textureBitmapReader = new TextureBitmapReader();
} }
@After @After
public void release() { public void tearDown() {
checkNotNull(videoFrameProcessorTestRunner).release(); checkNotNull(defaultVideoFrameProcessor).release();
} }
@RequiresNonNull("actualPresentationTimesUs") @RequiresNonNull("textureBitmapReader")
@Test @Test
public void frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs() public void frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs()
throws Exception { throws Exception {
String testId = "frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs"; String testId = "frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner = ImmutableList<Long> frameTimesUs =
getDefaultFrameProcessorTestRunnerBuilder( ImmutableList.of(0L, 16_000L, 32_000L, 48_000L, 58_000L, 71_000L, 86_000L);
testId, FrameDropEffect.createDefaultFrameDropEffect(/* targetFrameRate= */ 30))
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build();
ImmutableList<Integer> timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86); ImmutableList<Long> actualPresentationTimesUs =
for (int timestampMs : timestampsMs) { processFramesToEndOfStream(
videoFrameProcessorTestRunner.queueInputBitmap( frameTimesUs, FrameDropEffect.createDefaultFrameDropEffect(/* targetFrameRate= */ 30));
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ timestampMs * 1000L,
/* frameRate= */ 1);
}
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(0L, 32_000L, 71_000L).inOrder(); assertThat(actualPresentationTimesUs).containsExactly(0L, 32_000L, 71_000L).inOrder();
getAndAssertOutputBitmaps(textureBitmapReader, actualPresentationTimesUs, testId);
} }
@RequiresNonNull("actualPresentationTimesUs") @RequiresNonNull("textureBitmapReader")
@Test @Test
public void frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs() public void frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs()
throws Exception { throws Exception {
String testId = "frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs"; String testId = "frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner = ImmutableList<Long> frameTimesUs =
getDefaultFrameProcessorTestRunnerBuilder( ImmutableList.of(0L, 250_000L, 500_000L, 750_000L, 1_000_000L, 1_500_000L);
testId,
ImmutableList<Long> actualPresentationTimesUs =
processFramesToEndOfStream(
frameTimesUs,
FrameDropEffect.createSimpleFrameDropEffect( FrameDropEffect.createSimpleFrameDropEffect(
/* expectedFrameRate= */ 6, /* targetFrameRate= */ 2)) /* expectedFrameRate= */ 6, /* targetFrameRate= */ 2));
.build();
videoFrameProcessorTestRunner.queueInputBitmap( assertThat(actualPresentationTimesUs).containsExactly(0L, 750_000L).inOrder();
readBitmap(ORIGINAL_PNG_ASSET_PATH), getAndAssertOutputBitmaps(textureBitmapReader, actualPresentationTimesUs, testId);
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L,
/* frameRate= */ 4);
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(SCALE_WIDE_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ C.MICROS_PER_SECOND,
/* frameRate= */ 2);
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(500_000L, 1_500_000L).inOrder();
} }
@RequiresNonNull("actualPresentationTimesUs") @RequiresNonNull("textureBitmapReader")
@Test @Test
public void frameDrop_withSimpleStrategy_outputsAllFrames() throws Exception { public void frameDrop_withSimpleStrategy_outputsAllFrames() throws Exception {
String testId = "frameDrop_withSimpleStrategy_outputsCorrectNumberOfFrames"; String testId = "frameDrop_withSimpleStrategy_outputsAllFrames";
videoFrameProcessorTestRunner = ImmutableList<Long> frameTimesUs = ImmutableList.of(0L, 333_333L, 666_667L);
getDefaultFrameProcessorTestRunnerBuilder(
testId,
FrameDropEffect.createSimpleFrameDropEffect(
/* expectedFrameRate= */ 3, /* targetFrameRate= */ 3))
.build();
videoFrameProcessorTestRunner.queueInputBitmap( ImmutableList<Long> actualPresentationTimesUs =
readBitmap(ORIGINAL_PNG_ASSET_PATH), processFramesToEndOfStream(
/* durationUs= */ C.MICROS_PER_SECOND, frameTimesUs,
/* offsetToAddUs= */ 0L, FrameDropEffect.createSimpleFrameDropEffect(
/* frameRate= */ 3); /* expectedFrameRate= */ 3, /* targetFrameRate= */ 3));
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(0L, 333_333L, 666_667L).inOrder(); assertThat(actualPresentationTimesUs).containsExactly(0L, 333_333L, 666_667L).inOrder();
getAndAssertOutputBitmaps(textureBitmapReader, actualPresentationTimesUs, testId);
} }
@RequiresNonNull("actualPresentationTimesUs") private static void getAndAssertOutputBitmaps(
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( TextureBitmapReader textureBitmapReader, List<Long> presentationTimesUs, String testId)
String testId, FrameDropEffect frameDropEffect) { throws IOException {
return new VideoFrameProcessorTestRunner.Builder() for (int i = 0; i < presentationTimesUs.size(); i++) {
.setTestId(testId) long presentationTimeUs = presentationTimesUs.get(i);
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build()) Bitmap actualBitmap = textureBitmapReader.getBitmap(presentationTimeUs);
.setInputType(INPUT_TYPE_BITMAP) Bitmap expectedBitmap =
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) readBitmap(Util.formatInvariant("%s/pts_%d.png", ASSET_PATH, presentationTimeUs));
.setEffects(frameDropEffect) maybeSaveTestBitmap(
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add); testId, String.valueOf(presentationTimeUs), actualBitmap, /* path= */ null);
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
}
@EnsuresNonNull("defaultVideoFrameProcessor")
private ImmutableList<Long> processFramesToEndOfStream(
List<Long> inputPresentationTimesUs, FrameDropEffect frameDropEffect) throws Exception {
AtomicReference<@NullableType VideoFrameProcessingException>
videoFrameProcessingExceptionReference = new AtomicReference<>();
BlankFrameProducer blankFrameProducer =
new BlankFrameProducer(BLANK_FRAME_WIDTH, BLANK_FRAME_HEIGHT);
CountDownLatch videoFrameProcessingEndedCountDownLatch = new CountDownLatch(1);
ImmutableList.Builder<Long> actualPresentationTimesUs = new ImmutableList.Builder<>();
defaultVideoFrameProcessor =
checkNotNull(
new DefaultVideoFrameProcessor.Factory.Builder()
.setTextureOutput(
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) -> {
checkNotNull(textureBitmapReader)
.readBitmap(outputTexture, presentationTimeUs);
releaseOutputTextureCallback.release(presentationTimeUs);
},
/* textureOutputCapacity= */ 1)
.build()
.create(
getApplicationContext(),
DebugViewProvider.NONE,
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
actualPresentationTimesUs.add(presentationTimeUs);
}
@Override
public void onError(VideoFrameProcessingException exception) {
videoFrameProcessingExceptionReference.set(exception);
videoFrameProcessingEndedCountDownLatch.countDown();
}
@Override
public void onEnded() {
videoFrameProcessingEndedCountDownLatch.countDown();
}
}));
defaultVideoFrameProcessor.getTaskExecutor().submit(blankFrameProducer::configureGlObjects);
// A frame needs to be registered despite not queuing any external input to ensure
// that the video frame processor knows about the stream offset.
checkNotNull(defaultVideoFrameProcessor)
.registerInputStream(
INPUT_TYPE_SURFACE,
/* effects= */ ImmutableList.of(
(GlEffect) (context, useHdr) -> blankFrameProducer,
// Use an overlay effect to generate bitmaps with timestamps on it.
new OverlayEffect(
ImmutableList.of(
new TextOverlay() {
@Override
public SpannableString getText(long presentationTimeUs) {
SpannableString text =
new SpannableString(String.valueOf(presentationTimeUs));
text.setSpan(
new ForegroundColorSpan(Color.BLACK),
/* start= */ 0,
text.length(),
Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
text.setSpan(
new AbsoluteSizeSpan(/* size= */ 24),
/* start= */ 0,
text.length(),
Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
text.setSpan(
new TypefaceSpan(/* family= */ "sans-serif"),
/* start= */ 0,
text.length(),
Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
return text;
}
})),
frameDropEffect),
new FrameInfo.Builder(BLANK_FRAME_WIDTH, BLANK_FRAME_HEIGHT).build());
blankFrameProducer.produceBlankFrames(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput();
videoFrameProcessingEndedCountDownLatch.await();
@Nullable
Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get();
if (videoFrameProcessingException != null) {
throw videoFrameProcessingException;
}
return actualPresentationTimesUs.build();
} }
} }

View File

@ -88,8 +88,10 @@ import androidx.media3.common.util.Size;
copyTextureToPreviousFrame(glObjectsProvider, inputTexture, presentationTimeUs); copyTextureToPreviousFrame(glObjectsProvider, inputTexture, presentationTimeUs);
getInputListener().onInputFrameProcessed(inputTexture); getInputListener().onInputFrameProcessed(inputTexture);
if (outputTexturePool.freeTextureCount() > 0) {
getInputListener().onReadyToAcceptInputFrame(); getInputListener().onReadyToAcceptInputFrame();
} }
}
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {

View File

@ -418,7 +418,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) { public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
checkState( checkState(
hasRefreshedNextInputFrameInfo, hasRefreshedNextInputFrameInfo,
"setInputFrameInfo must be called before queueing another bitmap"); "registerInputStream must be called before queueing another bitmap");
inputSwitcher inputSwitcher
.activeTextureManager() .activeTextureManager()
.queueInputBitmap( .queueInputBitmap(
@ -442,15 +442,19 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return inputSwitcher.activeTextureManager().getInputSurface(); return inputSwitcher.getInputSurface();
} }
@Override @Override
public void registerInputStream(@InputType int inputType, List<Effect> effects) { public void registerInputStream(
@InputType int inputType, List<Effect> effects, FrameInfo frameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
hasRefreshedNextInputFrameInfo = true;
synchronized (lock) { synchronized (lock) {
if (!processingInput) { if (!processingInput) {
videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects)); videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
inputSwitcher.switchToInput(inputType); inputSwitcher.switchToInput(inputType, nextInputFrameInfo);
inputSwitcher.activeTextureManager().setInputFrameInfo(nextInputFrameInfo);
processingInput = true; processingInput = true;
return; return;
} }
@ -477,21 +481,14 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
// a new frame from the new input stream prematurely. // a new frame from the new input stream prematurely.
videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects)); videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
} }
inputSwitcher.switchToInput(inputType); inputSwitcher.switchToInput(inputType, nextInputFrameInfo);
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
inputSwitcher.activeTextureManager().setInputFrameInfo(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = true;
} }
@Override @Override
public void registerInputFrame() { public void registerInputFrame() {
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
checkStateNotNull( checkStateNotNull(
nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames"); nextInputFrameInfo, "registerInputStream must be called before registering input frames");
inputSwitcher.activeTextureManager().registerInputFrame(nextInputFrameInfo); inputSwitcher.activeTextureManager().registerInputFrame(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = false; hasRefreshedNextInputFrameInfo = false;

View File

@ -17,6 +17,9 @@
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
@ -24,7 +27,9 @@ import static androidx.media3.common.util.Util.containsKey;
import android.content.Context; import android.content.Context;
import android.util.SparseArray; import android.util.SparseArray;
import android.view.Surface;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
@ -84,7 +89,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
TextureManager textureManager; TextureManager textureManager;
// TODO(b/274109008): Refactor DefaultShaderProgram to create a class just for sampling. // TODO(b/274109008): Refactor DefaultShaderProgram to create a class just for sampling.
switch (inputType) { switch (inputType) {
case VideoFrameProcessor.INPUT_TYPE_SURFACE: case INPUT_TYPE_SURFACE:
samplingShaderProgram = samplingShaderProgram =
DefaultShaderProgram.createWithExternalSampler( DefaultShaderProgram.createWithExternalSampler(
context, context,
@ -98,7 +103,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
glObjectsProvider, samplingShaderProgram, videoFrameProcessingTaskExecutor); glObjectsProvider, samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram)); inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break; break;
case VideoFrameProcessor.INPUT_TYPE_BITMAP: case INPUT_TYPE_BITMAP:
samplingShaderProgram = samplingShaderProgram =
DefaultShaderProgram.createWithInternalSampler( DefaultShaderProgram.createWithInternalSampler(
context, context,
@ -113,7 +118,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
glObjectsProvider, samplingShaderProgram, videoFrameProcessingTaskExecutor); glObjectsProvider, samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram)); inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break; break;
case VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID: case INPUT_TYPE_TEXTURE_ID:
samplingShaderProgram = samplingShaderProgram =
DefaultShaderProgram.createWithInternalSampler( DefaultShaderProgram.createWithInternalSampler(
context, context,
@ -145,8 +150,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* registered}. * registered}.
* *
* @param newInputType The new {@link VideoFrameProcessor.InputType} to switch to. * @param newInputType The new {@link VideoFrameProcessor.InputType} to switch to.
* @param inputFrameInfo The {@link FrameInfo} associated with the new input.
*/ */
public void switchToInput(@VideoFrameProcessor.InputType int newInputType) { public void switchToInput(
@VideoFrameProcessor.InputType int newInputType, FrameInfo inputFrameInfo) {
checkStateNotNull(downstreamShaderProgram); checkStateNotNull(downstreamShaderProgram);
checkState(containsKey(inputs, newInputType), "Input type not registered: " + newInputType); checkState(containsKey(inputs, newInputType), "Input type not registered: " + newInputType);
@ -167,6 +174,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
input.setActive(false); input.setActive(false);
} }
} }
checkNotNull(activeTextureManager).setInputFrameInfo(inputFrameInfo);
} }
/** /**
@ -186,6 +194,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkNotNull(activeTextureManager).signalEndOfCurrentInputStream(); checkNotNull(activeTextureManager).signalEndOfCurrentInputStream();
} }
/**
* Returns the input {@link Surface}.
*
* @return The input {@link Surface}, regardless if the current input is {@linkplain
* #switchToInput set} to {@link VideoFrameProcessor#INPUT_TYPE_SURFACE}.
* @throws IllegalStateException If {@link VideoFrameProcessor#INPUT_TYPE_SURFACE} is not
* {@linkplain #registerInput registered}.
*/
public Surface getInputSurface() {
checkState(containsKey(inputs, INPUT_TYPE_SURFACE));
return inputs.get(INPUT_TYPE_SURFACE).textureManager.getInputSurface();
}
/** Releases the resources. */ /** Releases the resources. */
public void release() throws VideoFrameProcessingException { public void release() throws VideoFrameProcessingException {
for (int i = 0; i < inputs.size(); i++) { for (int i = 0; i < inputs.size(); i++) {

View File

@ -57,13 +57,13 @@ import androidx.media3.common.VideoFrameProcessingException;
@Override @Override
public void queueInputFrame( public void queueInputFrame(
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) { GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
framesReceived++;
if (framesReceived % n == 0) { if (framesReceived % n == 0) {
super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs); super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
} else { } else {
getInputListener().onInputFrameProcessed(inputTexture); getInputListener().onInputFrameProcessed(inputTexture);
getInputListener().onReadyToAcceptInputFrame(); getInputListener().onReadyToAcceptInputFrame();
} }
framesReceived++;
} }
@Override @Override

View File

@ -73,7 +73,11 @@ import androidx.media3.common.VideoFrameProcessor;
/** /**
* Sets information about the input frames. * Sets information about the input frames.
* *
* @see VideoFrameProcessor#setInputFrameInfo * <p>The new input information is applied from the next frame {@linkplain #registerInputFrame
* registered} or {@linkplain #queueInputTexture queued} onwards.
*
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.
*/ */
default void setInputFrameInfo(FrameInfo inputFrameInfo) { default void setInputFrameInfo(FrameInfo inputFrameInfo) {
// Do nothing. // Do nothing.

View File

@ -2142,8 +2142,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
}); });
videoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_SURFACE, videoEffects);
this.initialStreamOffsetUs = initialStreamOffsetUs; this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) { } catch (Exception e) {
throw renderer.createRendererException( throw renderer.createRendererException(
@ -2226,7 +2224,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
*/ */
public void setInputFormat(Format inputFormat) { public void setInputFormat(Format inputFormat) {
checkNotNull(videoFrameProcessor) checkNotNull(videoFrameProcessor)
.setInputFrameInfo( .registerInputStream(
VideoFrameProcessor.INPUT_TYPE_SURFACE,
checkNotNull(videoEffects),
new FrameInfo.Builder(inputFormat.width, inputFormat.height) new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
.build()); .build());

Binary file not shown.

After

Width:  |  Height:  |  Size: 591 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -13,32 +13,33 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package androidx.media3.transformer;
package androidx.media3.test.utils;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
* <p>Reads from an OpenGL texture. Only for use on physical devices. * <p>Reads from an OpenGL texture. Only for use on physical devices.
*/ */
@UnstableApi
public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.BitmapReader { public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators. // TODO(b/239172735): This outputs an incorrect black output image on emulators.
private final Map<Long, Bitmap> outputTimestampsToBitmaps; private final Map<Long, Bitmap> outputTimestampsToBitmaps;
private boolean useHighPrecisionColorComponents; private boolean useHighPrecisionColorComponents;
@ -60,6 +61,10 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
return checkStateNotNull(outputBitmap); return checkStateNotNull(outputBitmap);
} }
/**
* @return The output {@link Bitmap} at a given {@code presentationTimeUs}.
* @throws IllegalStateException If no such bitmap is produced.
*/
public Bitmap getBitmap(long presentationTimeUs) { public Bitmap getBitmap(long presentationTimeUs) {
return checkStateNotNull(outputTimestampsToBitmaps.get(presentationTimeUs)); return checkStateNotNull(outputTimestampsToBitmaps.get(presentationTimeUs));
} }
@ -69,6 +74,11 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
return outputTimestampsToBitmaps.keySet(); return outputTimestampsToBitmaps.keySet();
} }
/**
* Reads the given {@code outputTexture}.
*
* <p>The read result can be fetched by calling one of me {@link #getBitmap} methods.
*/
public void readBitmap(GlTextureInfo outputTexture, long presentationTimeUs) public void readBitmap(GlTextureInfo outputTexture, long presentationTimeUs)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
try { try {
@ -83,15 +93,6 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
} }
} }
public void readBitmapAndReleaseTexture(
GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
throws VideoFrameProcessingException {
readBitmap(outputTexture, presentationTimeUs);
releaseOutputTextureCallback.release(presentationTimeUs);
}
private static Bitmap createBitmapFromCurrentGlFrameBuffer( private static Bitmap createBitmapFromCurrentGlFrameBuffer(
int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException { int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException {
if (!useHighPrecisionColorComponents) { if (!useHighPrecisionColorComponents) {

View File

@ -15,7 +15,9 @@
*/ */
package androidx.media3.test.utils; package androidx.media3.test.utils;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE; import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image; import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image;
@ -41,7 +43,6 @@ import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoFrameProcessor.InputType;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -71,13 +72,11 @@ public final class VideoFrameProcessorTestRunner {
private float pixelWidthHeightRatio; private float pixelWidthHeightRatio;
private @MonotonicNonNull ColorInfo inputColorInfo; private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull ColorInfo outputColorInfo; private @MonotonicNonNull ColorInfo outputColorInfo;
private @InputType int inputType;
private OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener; private OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener;
/** Creates a new instance with default values. */ /** Creates a new instance with default values. */
public Builder() { public Builder() {
pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO; pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO;
inputType = INPUT_TYPE_SURFACE;
onOutputFrameAvailableListener = unused -> {}; onOutputFrameAvailableListener = unused -> {};
} }
@ -194,18 +193,6 @@ public final class VideoFrameProcessorTestRunner {
return this; return this;
} }
/**
* Sets whether input comes from an external texture. See {@link
* VideoFrameProcessor.Factory#create}.
*
* <p>The default value is {@link VideoFrameProcessor#INPUT_TYPE_SURFACE}.
*/
@CanIgnoreReturnValue
public Builder setInputType(@InputType int inputType) {
this.inputType = inputType;
return this;
}
/** /**
* Sets the method to be called in {@link * Sets the method to be called in {@link
* VideoFrameProcessor.Listener#onOutputFrameAvailableForRendering}. * VideoFrameProcessor.Listener#onOutputFrameAvailableForRendering}.
@ -233,7 +220,6 @@ public final class VideoFrameProcessorTestRunner {
pixelWidthHeightRatio, pixelWidthHeightRatio,
inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo, inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo,
outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo, outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo,
inputType,
onOutputFrameAvailableListener); onOutputFrameAvailableListener);
} }
} }
@ -251,6 +237,7 @@ public final class VideoFrameProcessorTestRunner {
private final @MonotonicNonNull CountDownLatch videoFrameProcessingEndedLatch; private final @MonotonicNonNull CountDownLatch videoFrameProcessingEndedLatch;
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException; private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
private final VideoFrameProcessor videoFrameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final ImmutableList<Effect> effects;
private @MonotonicNonNull BitmapReader bitmapReader; private @MonotonicNonNull BitmapReader bitmapReader;
@ -264,7 +251,6 @@ public final class VideoFrameProcessorTestRunner {
float pixelWidthHeightRatio, float pixelWidthHeightRatio,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@InputType int inputType,
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener) OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
this.testId = testId; this.testId = testId;
@ -314,7 +300,7 @@ public final class VideoFrameProcessorTestRunner {
checkNotNull(videoFrameProcessingEndedLatch).countDown(); checkNotNull(videoFrameProcessingEndedLatch).countDown();
} }
}); });
videoFrameProcessor.registerInputStream(inputType, effects); this.effects = effects;
} }
public void processFirstFrameAndEnd() throws Exception { public void processFirstFrameAndEnd() throws Exception {
@ -323,7 +309,9 @@ public final class VideoFrameProcessorTestRunner {
new DecodeOneFrameUtil.Listener() { new DecodeOneFrameUtil.Listener() {
@Override @Override
public void onContainerExtracted(MediaFormat mediaFormat) { public void onContainerExtracted(MediaFormat mediaFormat) {
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.registerInputStream(
INPUT_TYPE_SURFACE,
effects,
new FrameInfo.Builder( new FrameInfo.Builder(
mediaFormat.getInteger(MediaFormat.KEY_WIDTH), mediaFormat.getInteger(MediaFormat.KEY_WIDTH),
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
@ -343,7 +331,9 @@ public final class VideoFrameProcessorTestRunner {
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) { Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) {
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.registerInputStream(
INPUT_TYPE_BITMAP,
effects,
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight()) new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.setOffsetToAddUs(offsetToAddUs) .setOffsetToAddUs(offsetToAddUs)
@ -352,7 +342,9 @@ public final class VideoFrameProcessorTestRunner {
} }
public void queueInputTexture(GlTextureInfo inputTexture, long pts) { public void queueInputTexture(GlTextureInfo inputTexture, long pts) {
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.registerInputStream(
INPUT_TYPE_TEXTURE_ID,
effects,
new FrameInfo.Builder(inputTexture.width, inputTexture.height) new FrameInfo.Builder(inputTexture.width, inputTexture.height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE; import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
@ -39,6 +38,7 @@ import androidx.media3.effect.RgbFilter;
import androidx.media3.effect.ScaleAndRotateTransformation; import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.effect.VideoCompositor; import androidx.media3.effect.VideoCompositor;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.TextureBitmapReader;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.IOException; import java.io.IOException;
@ -334,7 +334,6 @@ public final class VideoCompositorPixelTest {
return new VideoFrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build()) .setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader); .setBitmapReader(textureBitmapReader);
} }

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.transformer.mh; package androidx.media3.transformer.mh;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE; import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
@ -28,8 +27,8 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.effect.DefaultVideoFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.TextureBitmapReader;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.media3.transformer.TextureBitmapReader;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.util.Set; import java.util.Set;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -142,16 +141,15 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest {
(outputTexture, (outputTexture,
presentationTimeUs, presentationTimeUs,
releaseOutputTextureCallback, releaseOutputTextureCallback,
unusedSyncObject) -> unusedSyncObject) -> {
checkNotNull(textureBitmapReader) checkNotNull(textureBitmapReader).readBitmap(outputTexture, presentationTimeUs);
.readBitmapAndReleaseTexture( releaseOutputTextureCallback.release(presentationTimeUs);
outputTexture, presentationTimeUs, releaseOutputTextureCallback), },
/* textureOutputCapacity= */ 1) /* textureOutputCapacity= */ 1)
.build(); .build();
return new VideoFrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory) .setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL) .setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader); .setBitmapReader(textureBitmapReader);
} }

View File

@ -38,17 +38,16 @@ import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.effect.BitmapOverlay; import androidx.media3.effect.BitmapOverlay;
import androidx.media3.effect.DefaultGlObjectsProvider; import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.OverlayEffect; import androidx.media3.effect.OverlayEffect;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.TextureBitmapReader;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.media3.transformer.AndroidTestUtil; import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.EncoderUtil; import androidx.media3.transformer.EncoderUtil;
import androidx.media3.transformer.TextureBitmapReader;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
@ -537,9 +536,10 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
(outputTexture, (outputTexture,
presentationTimeUs1, presentationTimeUs1,
releaseOutputTextureCallback1, releaseOutputTextureCallback1,
unusedSyncObject) -> unusedSyncObject) -> {
bitmapReader.readBitmapAndReleaseTexture( bitmapReader.readBitmap(outputTexture, presentationTimeUs1);
outputTexture, presentationTimeUs1, releaseOutputTextureCallback1), releaseOutputTextureCallback1.release(presentationTimeUs1);
},
/* textureOutputCapacity= */ 1) /* textureOutputCapacity= */ 1)
.setGlObjectsProvider(contextSharingGlObjectsProvider) .setGlObjectsProvider(contextSharingGlObjectsProvider)
.build(); .build();
@ -550,7 +550,6 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.setInputColorInfo(colorInfo) .setInputColorInfo(colorInfo)
.setOutputColorInfo(colorInfo) .setOutputColorInfo(colorInfo)
.setBitmapReader(bitmapReader) .setBitmapReader(bitmapReader)
.setInputType(VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID)
.setEffects(effects) .setEffects(effects)
.build(); .build();
GlUtil.awaitSyncObject(syncObject); GlUtil.awaitSyncObject(syncObject);
@ -573,9 +572,10 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
(outputTexture, (outputTexture,
presentationTimeUs, presentationTimeUs,
releaseOutputTextureCallback, releaseOutputTextureCallback,
unusedSyncObject) -> unusedSyncObject) -> {
textureBitmapReader.readBitmapAndReleaseTexture( textureBitmapReader.readBitmap(outputTexture, presentationTimeUs);
outputTexture, presentationTimeUs, releaseOutputTextureCallback), releaseOutputTextureCallback.release(presentationTimeUs);
},
/* textureOutputCapacity= */ 1) /* textureOutputCapacity= */ 1)
.build(); .build();
return new VideoFrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()

View File

@ -1,199 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.FrameDropEffect;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.media3.transformer.TextureBitmapReader;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests to ensure {@link FrameDropEffect} outputs the correct frame associated with a chosen
* timestamp.
*/
@RunWith(AndroidJUnit4.class)
public class FrameDropPixelTest {
private static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
private static final String MEDIA3_TEST_PNG_ASSET_PATH =
"media/bitmap/input_images/media3test.png";
private static final String ROTATE_90_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/rotate90.png";
private static final String SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_original.png";
private static final String SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_media3test.png";
private @MonotonicNonNull TextureBitmapReader textureBitmapReader;
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@EnsuresNonNull("textureBitmapReader")
@Before
public void setUp() {
textureBitmapReader = new TextureBitmapReader();
}
@After
public void tearDown() {
checkNotNull(videoFrameProcessorTestRunner).release();
}
@RequiresNonNull("textureBitmapReader")
@Test
public void frameDrop_withDefaultStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs()
throws Exception {
String testId =
"frameDrop_withDefaultStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner =
createDefaultFrameProcessorTestRunnerBuilder(
testId, FrameDropEffect.createDefaultFrameDropEffect(/* targetFrameRate= */ 30));
long expectedPresentationTimeUs1 = 0;
long expectedPresentationTimeUs2 = 32_000;
long expectedPresentationTimeUs3 = 71_000;
Bitmap chosenBitmap1 = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap chosenBitmap2 = readBitmap(MEDIA3_TEST_PNG_ASSET_PATH);
Bitmap droppedFrameBitmap = readBitmap(ROTATE_90_PNG_ASSET_PATH);
queueOneFrameAt(chosenBitmap1, expectedPresentationTimeUs1);
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 16_000L);
queueOneFrameAt(chosenBitmap2, expectedPresentationTimeUs2);
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 48_000L);
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 58_000L);
queueOneFrameAt(chosenBitmap1, expectedPresentationTimeUs3);
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 86_000L);
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(textureBitmapReader.getOutputTimestamps())
.containsExactly(
expectedPresentationTimeUs1, expectedPresentationTimeUs2, expectedPresentationTimeUs3)
.inOrder();
assertThat(
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH),
textureBitmapReader.getBitmap(expectedPresentationTimeUs1),
testId))
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
assertThat(
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH),
textureBitmapReader.getBitmap(expectedPresentationTimeUs2),
testId))
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
assertThat(
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH),
textureBitmapReader.getBitmap(expectedPresentationTimeUs3),
testId))
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@RequiresNonNull("textureBitmapReader")
@Test
public void frameDrop_withSimpleStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs()
throws Exception {
String testId =
"frameDrop_withSimpleStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner =
createDefaultFrameProcessorTestRunnerBuilder(
testId,
FrameDropEffect.createSimpleFrameDropEffect(
/* expectedFrameRate= */ 6, /* targetFrameRate= */ 2));
long expectedPresentationTimeUs1 = 500_000;
long expectedPresentationTimeUs2 = 1_500_000;
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L,
/* frameRate= */ 4);
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(MEDIA3_TEST_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ C.MICROS_PER_SECOND,
/* frameRate= */ 2);
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(textureBitmapReader.getOutputTimestamps())
.containsExactly(expectedPresentationTimeUs1, expectedPresentationTimeUs2)
.inOrder();
Bitmap actualBitmap1 = textureBitmapReader.getBitmap(expectedPresentationTimeUs1);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual1", actualBitmap1, /* path= */ null);
Bitmap actualBitmap2 = textureBitmapReader.getBitmap(expectedPresentationTimeUs2);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual2", actualBitmap2, /* path= */ null);
assertThat(
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH), actualBitmap1, testId))
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
assertThat(
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH), actualBitmap2, testId))
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@RequiresNonNull("textureBitmapReader")
private VideoFrameProcessorTestRunner createDefaultFrameProcessorTestRunnerBuilder(
String testId, FrameDropEffect frameDropEffect) throws VideoFrameProcessingException {
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setTextureOutput(
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) ->
checkNotNull(textureBitmapReader)
.readBitmapAndReleaseTexture(
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
/* textureOutputCapacity= */ 1)
.build();
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setEffects(frameDropEffect)
.build();
}
/**
* Queues a {@link Bitmap} into the {@link VideoFrameProcessor} so that exactly one frame is
* produced at the given {@code presentationTimeUs}.
*/
private void queueOneFrameAt(Bitmap bitmap, long presentationTimeUs) {
checkNotNull(videoFrameProcessorTestRunner)
.queueInputBitmap(
bitmap,
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ presentationTimeUs,
/* frameRate= */ 1);
}
}

View File

@ -158,8 +158,7 @@ import java.util.concurrent.atomic.AtomicLong;
Size decodedSize = getDecodedSize(trackFormat); Size decodedSize = getDecodedSize(trackFormat);
videoFrameProcessor.registerInputStream( videoFrameProcessor.registerInputStream(
getInputType(checkNotNull(trackFormat.sampleMimeType)), getInputType(checkNotNull(trackFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation)); createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight()) new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get()) .setOffsetToAddUs(mediaItemOffsetUs.get())