Correct the DefaultFrameDroppingShaderProgram logic
copies the previous texture to a temp texture to ensure the correct frame is queued. #minor-release PiperOrigin-RevId: 541972349
This commit is contained in:
parent
34f23451e6
commit
09fe0d7390
@ -59,8 +59,6 @@ public class FrameDropTest {
|
||||
checkNotNull(videoFrameProcessorTestRunner).release();
|
||||
}
|
||||
|
||||
// TODO: b/536973872 - When support for testing multiple frames in the output, test whether the
|
||||
// correct frames comes out.
|
||||
@RequiresNonNull("actualPresentationTimesUs")
|
||||
@Test
|
||||
public void frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs()
|
||||
@ -72,9 +70,10 @@ public class FrameDropTest {
|
||||
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
|
||||
.build();
|
||||
|
||||
videoFrameProcessorTestRunner.registerInputStream(INPUT_TYPE_BITMAP);
|
||||
ImmutableList<Integer> timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86);
|
||||
for (int timestampMs : timestampsMs) {
|
||||
videoFrameProcessorTestRunner.registerAndQueueInputBitmap(
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
readBitmap(ORIGINAL_PNG_ASSET_PATH),
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ timestampMs * 1000L,
|
||||
|
@ -44,8 +44,8 @@ import java.util.concurrent.Executor;
|
||||
*/
|
||||
@UnstableApi
|
||||
public abstract class BaseGlShaderProgram implements GlShaderProgram {
|
||||
private final TexturePool outputTexturePool;
|
||||
protected InputListener inputListener;
|
||||
protected final TexturePool outputTexturePool;
|
||||
private InputListener inputListener;
|
||||
private OutputListener outputListener;
|
||||
private ErrorListener errorListener;
|
||||
private Executor errorListenerExecutor;
|
||||
@ -184,4 +184,17 @@ public abstract class BaseGlShaderProgram implements GlShaderProgram {
|
||||
throw new VideoFrameProcessingException(e);
|
||||
}
|
||||
}
|
||||
|
||||
protected final InputListener getInputListener() {
|
||||
return inputListener;
|
||||
}
|
||||
|
||||
protected final OutputListener getOutputListener() {
|
||||
return outputListener;
|
||||
}
|
||||
|
||||
protected final void onError(Exception e) {
|
||||
errorListenerExecutor.execute(
|
||||
() -> errorListener.onError(VideoFrameProcessingException.from(e)));
|
||||
}
|
||||
}
|
||||
|
@ -22,14 +22,17 @@ import static java.lang.Math.abs;
|
||||
import android.content.Context;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.Size;
|
||||
|
||||
/**
|
||||
* Drops frames by only queuing input frames that are chosen by the frame dropping strategy.
|
||||
* Drops frames by only making selected frames available to the {@link OutputListener}.
|
||||
*
|
||||
* <p>The strategy used is to queue the current frame, x, with timestamp T_x if and only if one of
|
||||
* the following is true:
|
||||
* <p>The current frame, x, with timestamp T_x is {@linkplain OutputListener#onOutputFrameAvailable
|
||||
* made available to the output listener} if and only if one of the following is true:
|
||||
*
|
||||
* <ul>
|
||||
* <li>x is the first frame,
|
||||
@ -40,12 +43,16 @@ import androidx.media3.common.VideoFrameProcessingException;
|
||||
* the next frame. The target frame interval is determined from {@code targetFps}.
|
||||
*/
|
||||
/* package */ final class DefaultFrameDroppingShaderProgram extends FrameCacheGlShaderProgram {
|
||||
|
||||
private final GlObjectsProvider glObjectsProvider;
|
||||
private final boolean useHdr;
|
||||
private final long targetFrameDeltaUs;
|
||||
|
||||
@Nullable private GlTextureInfo previousTexture;
|
||||
private long previousPresentationTimeUs;
|
||||
private long lastQueuedPresentationTimeUs;
|
||||
private boolean isPreviousFrameFirstFrame;
|
||||
private int framesReceived;
|
||||
// A temporary texture owned by this class, separate from the outputTexturePool.
|
||||
@Nullable private GlTextureInfo previousTexture;
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
@ -53,44 +60,107 @@ import androidx.media3.common.VideoFrameProcessingException;
|
||||
* @param context The {@link Context}.
|
||||
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
|
||||
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
|
||||
* @param targetFps The number of frames per second the output video should roughly have.
|
||||
* @param targetFrameRate The number of frames per second the output video should roughly have.
|
||||
*/
|
||||
public DefaultFrameDroppingShaderProgram(Context context, boolean useHdr, float targetFps)
|
||||
public DefaultFrameDroppingShaderProgram(Context context, boolean useHdr, float targetFrameRate)
|
||||
throws VideoFrameProcessingException {
|
||||
super(context, /* capacity= */ 1, useHdr);
|
||||
this.targetFrameDeltaUs = (long) (C.MICROS_PER_SECOND / targetFps);
|
||||
this.useHdr = useHdr;
|
||||
this.targetFrameDeltaUs = (long) (C.MICROS_PER_SECOND / targetFrameRate);
|
||||
lastQueuedPresentationTimeUs = C.TIME_UNSET;
|
||||
previousPresentationTimeUs = C.TIME_UNSET;
|
||||
glObjectsProvider = new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
|
||||
if (previousTexture == null) {
|
||||
super.queueInputFrame(inputTexture, presentationTimeUs);
|
||||
lastQueuedPresentationTimeUs = presentationTimeUs;
|
||||
isPreviousFrameFirstFrame = true;
|
||||
} else if (shouldQueuePreviousFrame(presentationTimeUs)) {
|
||||
super.queueInputFrame(checkNotNull(previousTexture), previousPresentationTimeUs);
|
||||
lastQueuedPresentationTimeUs = previousPresentationTimeUs;
|
||||
} else {
|
||||
inputListener.onInputFrameProcessed(checkNotNull(previousTexture));
|
||||
inputListener.onReadyToAcceptInputFrame();
|
||||
framesReceived++;
|
||||
if (framesReceived == 1) {
|
||||
copyTextureToPreviousFrame(inputTexture, presentationTimeUs);
|
||||
queuePreviousFrame();
|
||||
getInputListener().onInputFrameProcessed(inputTexture);
|
||||
getInputListener().onReadyToAcceptInputFrame();
|
||||
return;
|
||||
}
|
||||
previousTexture = inputTexture;
|
||||
previousPresentationTimeUs = presentationTimeUs;
|
||||
|
||||
if (shouldQueuePreviousFrame(presentationTimeUs)) {
|
||||
queuePreviousFrame();
|
||||
}
|
||||
|
||||
copyTextureToPreviousFrame(inputTexture, presentationTimeUs);
|
||||
getInputListener().onInputFrameProcessed(inputTexture);
|
||||
getInputListener().onReadyToAcceptInputFrame();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void signalEndOfCurrentInputStream() {
|
||||
super.signalEndOfCurrentInputStream();
|
||||
reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
super.flush();
|
||||
reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() throws VideoFrameProcessingException {
|
||||
super.release();
|
||||
try {
|
||||
if (previousTexture != null) {
|
||||
previousTexture.release();
|
||||
}
|
||||
} catch (GlUtil.GlException e) {
|
||||
throw new VideoFrameProcessingException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void reset() {
|
||||
try {
|
||||
if (previousTexture != null) {
|
||||
previousTexture.release();
|
||||
}
|
||||
} catch (GlUtil.GlException e) {
|
||||
onError(e);
|
||||
}
|
||||
lastQueuedPresentationTimeUs = C.TIME_UNSET;
|
||||
previousPresentationTimeUs = C.TIME_UNSET;
|
||||
previousTexture = null;
|
||||
framesReceived = 0;
|
||||
}
|
||||
|
||||
private void copyTextureToPreviousFrame(GlTextureInfo newTexture, long presentationTimeUs) {
|
||||
try {
|
||||
if (previousTexture == null) {
|
||||
int texId = GlUtil.createTexture(newTexture.getWidth(), newTexture.getHeight(), useHdr);
|
||||
previousTexture =
|
||||
glObjectsProvider.createBuffersForTexture(
|
||||
texId, newTexture.getWidth(), newTexture.getHeight());
|
||||
}
|
||||
GlTextureInfo previousTexture = checkNotNull(this.previousTexture);
|
||||
if (previousTexture.getHeight() != newTexture.getHeight()
|
||||
|| previousTexture.getWidth() != newTexture.getWidth()) {
|
||||
previousTexture.release();
|
||||
int texId = GlUtil.createTexture(newTexture.getWidth(), newTexture.getHeight(), useHdr);
|
||||
previousTexture =
|
||||
glObjectsProvider.createBuffersForTexture(
|
||||
texId, newTexture.getWidth(), newTexture.getHeight());
|
||||
}
|
||||
|
||||
GlUtil.focusFramebufferUsingCurrentContext(
|
||||
previousTexture.getFboId(), previousTexture.getWidth(), previousTexture.getHeight());
|
||||
GlUtil.clearFocusedBuffers();
|
||||
drawFrame(newTexture.getTexId(), presentationTimeUs);
|
||||
previousPresentationTimeUs = presentationTimeUs;
|
||||
this.previousTexture = previousTexture;
|
||||
} catch (VideoFrameProcessingException | GlUtil.GlException e) {
|
||||
onError(e);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean shouldQueuePreviousFrame(long currentPresentationTimeUs) {
|
||||
if (isPreviousFrameFirstFrame) {
|
||||
isPreviousFrameFirstFrame = false;
|
||||
if (framesReceived == 2) {
|
||||
// The previous texture has already been queued when it's the first texture.
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -100,4 +170,27 @@ import androidx.media3.common.VideoFrameProcessingException;
|
||||
return abs(previousFrameTimeDeltaUs - targetFrameDeltaUs)
|
||||
< abs(currentFrameTimeDeltaUs - targetFrameDeltaUs);
|
||||
}
|
||||
|
||||
private void queuePreviousFrame() {
|
||||
try {
|
||||
GlTextureInfo previousTexture = checkNotNull(this.previousTexture);
|
||||
Size outputTextureSize = configure(previousTexture.getWidth(), previousTexture.getHeight());
|
||||
outputTexturePool.ensureConfigured(
|
||||
outputTextureSize.getWidth(), outputTextureSize.getHeight());
|
||||
|
||||
// Focus on the next free buffer.
|
||||
GlTextureInfo outputTexture = outputTexturePool.useTexture();
|
||||
|
||||
// Copy frame to fbo.
|
||||
GlUtil.focusFramebufferUsingCurrentContext(
|
||||
outputTexture.getFboId(), outputTexture.getWidth(), outputTexture.getHeight());
|
||||
GlUtil.clearFocusedBuffers();
|
||||
|
||||
drawFrame(previousTexture.getTexId(), previousPresentationTimeUs);
|
||||
getOutputListener().onOutputFrameAvailable(outputTexture, previousPresentationTimeUs);
|
||||
lastQueuedPresentationTimeUs = previousPresentationTimeUs;
|
||||
} catch (VideoFrameProcessingException | GlUtil.GlException e) {
|
||||
onError(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,8 +59,8 @@ import androidx.media3.common.VideoFrameProcessingException;
|
||||
if (framesReceived % n == 0) {
|
||||
super.queueInputFrame(inputTexture, presentationTimeUs);
|
||||
} else {
|
||||
inputListener.onInputFrameProcessed(inputTexture);
|
||||
inputListener.onReadyToAcceptInputFrame();
|
||||
getInputListener().onInputFrameProcessed(inputTexture);
|
||||
getInputListener().onReadyToAcceptInputFrame();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,195 @@
|
||||
/*
|
||||
* Copyright 2023 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||
import androidx.media3.effect.FrameDropEffect;
|
||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/**
|
||||
* Tests to ensure {@link FrameDropEffect} outputs the correct frame associated with a chosen
|
||||
* timestamp.
|
||||
*/
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class FrameDropPixelTest {
|
||||
private static final String ORIGINAL_PNG_ASSET_PATH =
|
||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
|
||||
private static final String MEDIA3_TEST_PNG_ASSET_PATH =
|
||||
"media/bitmap/input_images/media3test.png";
|
||||
private static final String ROTATE_90_PNG_ASSET_PATH =
|
||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/rotate90.png";
|
||||
private static final String SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH =
|
||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_original.png";
|
||||
private static final String SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH =
|
||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_media3test.png";
|
||||
|
||||
private @MonotonicNonNull TextureBitmapReader textureBitmapReader;
|
||||
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
|
||||
|
||||
@EnsuresNonNull("textureBitmapReader")
|
||||
@Before
|
||||
public void setUp() {
|
||||
textureBitmapReader = new TextureBitmapReader();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
checkNotNull(videoFrameProcessorTestRunner).release();
|
||||
}
|
||||
|
||||
@RequiresNonNull("textureBitmapReader")
|
||||
@Test
|
||||
public void frameDrop_withDefaultStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs()
|
||||
throws Exception {
|
||||
String testId =
|
||||
"frameDrop_withDefaultStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs";
|
||||
videoFrameProcessorTestRunner =
|
||||
createDefaultFrameProcessorTestRunnerBuilder(
|
||||
testId, FrameDropEffect.createDefaultFrameDropEffect(/* targetFrameRate= */ 30));
|
||||
|
||||
long expectedPresentationTimeUs1 = 0;
|
||||
long expectedPresentationTimeUs2 = 32_000;
|
||||
long expectedPresentationTimeUs3 = 71_000;
|
||||
Bitmap chosenBitmap1 = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
Bitmap chosenBitmap2 = readBitmap(MEDIA3_TEST_PNG_ASSET_PATH);
|
||||
Bitmap droppedFrameBitmap = readBitmap(ROTATE_90_PNG_ASSET_PATH);
|
||||
queueOneFrameAt(chosenBitmap1, expectedPresentationTimeUs1);
|
||||
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 16_000L);
|
||||
queueOneFrameAt(chosenBitmap2, expectedPresentationTimeUs2);
|
||||
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 48_000L);
|
||||
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 58_000L);
|
||||
queueOneFrameAt(chosenBitmap1, expectedPresentationTimeUs3);
|
||||
queueOneFrameAt(droppedFrameBitmap, /* presentationTimeUs= */ 86_000L);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
|
||||
assertThat(textureBitmapReader.getOutputTimestamps())
|
||||
.containsExactly(
|
||||
expectedPresentationTimeUs1, expectedPresentationTimeUs2, expectedPresentationTimeUs3)
|
||||
.inOrder();
|
||||
assertThat(
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH),
|
||||
textureBitmapReader.getBitmap(expectedPresentationTimeUs1),
|
||||
testId))
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||
assertThat(
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||
readBitmap(SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH),
|
||||
textureBitmapReader.getBitmap(expectedPresentationTimeUs2),
|
||||
testId))
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||
assertThat(
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH),
|
||||
textureBitmapReader.getBitmap(expectedPresentationTimeUs3),
|
||||
testId))
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||
}
|
||||
|
||||
@RequiresNonNull("textureBitmapReader")
|
||||
@Test
|
||||
public void frameDrop_withSimpleStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs()
|
||||
throws Exception {
|
||||
String testId =
|
||||
"frameDrop_withSimpleStrategy_outputsCorrectFramesAtTheCorrectPresentationTimesUs";
|
||||
videoFrameProcessorTestRunner =
|
||||
createDefaultFrameProcessorTestRunnerBuilder(
|
||||
testId,
|
||||
FrameDropEffect.createSimpleFrameDropEffect(
|
||||
/* expectedFrameRate= */ 6, /* targetFrameRate= */ 2));
|
||||
long expectedPresentationTimeUs1 = 500_000;
|
||||
long expectedPresentationTimeUs2 = 1_500_000;
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
readBitmap(ORIGINAL_PNG_ASSET_PATH),
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ 0L,
|
||||
/* frameRate= */ 4);
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
readBitmap(MEDIA3_TEST_PNG_ASSET_PATH),
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ C.MICROS_PER_SECOND,
|
||||
/* frameRate= */ 2);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
|
||||
assertThat(textureBitmapReader.getOutputTimestamps())
|
||||
.containsExactly(expectedPresentationTimeUs1, expectedPresentationTimeUs2)
|
||||
.inOrder();
|
||||
Bitmap actualBitmap1 = textureBitmapReader.getBitmap(expectedPresentationTimeUs1);
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual1", actualBitmap1, /* path= */ null);
|
||||
Bitmap actualBitmap2 = textureBitmapReader.getBitmap(expectedPresentationTimeUs2);
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual2", actualBitmap2, /* path= */ null);
|
||||
assertThat(
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH), actualBitmap1, testId))
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||
assertThat(
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||
readBitmap(SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH), actualBitmap2, testId))
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||
}
|
||||
|
||||
@RequiresNonNull("textureBitmapReader")
|
||||
private VideoFrameProcessorTestRunner createDefaultFrameProcessorTestRunnerBuilder(
|
||||
String testId, FrameDropEffect frameDropEffect) throws VideoFrameProcessingException {
|
||||
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||
.setTextureOutput(
|
||||
checkNotNull(textureBitmapReader)::readBitmapFromTexture,
|
||||
/* textureOutputCapacity= */ 1)
|
||||
.build();
|
||||
return new VideoFrameProcessorTestRunner.Builder()
|
||||
.setTestId(testId)
|
||||
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
|
||||
.setInputType(INPUT_TYPE_BITMAP)
|
||||
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
|
||||
.setEffects(frameDropEffect)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues a {@link Bitmap} into the {@link VideoFrameProcessor} so that exactly one frame is
|
||||
* produced at the given {@code presentationTimeUs}.
|
||||
*/
|
||||
private void queueOneFrameAt(Bitmap bitmap, long presentationTimeUs) {
|
||||
checkNotNull(videoFrameProcessorTestRunner)
|
||||
.queueInputBitmap(
|
||||
bitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ presentationTimeUs,
|
||||
/* frameRate= */ 1);
|
||||
}
|
||||
}
|
@ -27,9 +27,9 @@ import androidx.media3.common.util.Util;
|
||||
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||
import androidx.media3.test.utils.BitmapPixelTestUtil;
|
||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
|
||||
@ -45,7 +45,7 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
|
||||
private @MonotonicNonNull Bitmap outputBitmap;
|
||||
|
||||
public TextureBitmapReader() {
|
||||
outputTimestampsToBitmaps = new ConcurrentHashMap<>();
|
||||
outputTimestampsToBitmaps = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@ -64,6 +64,7 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
|
||||
return checkStateNotNull(outputTimestampsToBitmaps.get(presentationTimeUs));
|
||||
}
|
||||
|
||||
/** Returns the timestamps in the order they were added. */
|
||||
public Set<Long> getOutputTimestamps() {
|
||||
return outputTimestampsToBitmaps.keySet();
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user