Effect: Support input via texture ID

PiperOrigin-RevId: 530624195
This commit is contained in:
tofunmi 2023-05-09 16:02:43 +00:00 committed by Tofunmi Adigun-Hameed
parent 2db2de5993
commit 7875131e2a
9 changed files with 472 additions and 7 deletions

View File

@ -46,6 +46,15 @@ import java.util.concurrent.Executor;
@UnstableApi
public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced.
/** A listener for frame processing events. */
@UnstableApi
public interface OnInputFrameProcessedListener {
/** Called when the given input frame has been processed. */
void onInputFrameProcessed(int textureId) throws VideoFrameProcessingException;
}
/**
* Specifies how the input frames are made available to the {@link VideoFrameProcessor}. One of
* {@link #INPUT_TYPE_SURFACE}, {@link #INPUT_TYPE_BITMAP} or {@link #INPUT_TYPE_TEXTURE_ID}.
@ -165,6 +174,28 @@ public interface VideoFrameProcessor {
// signalled down to the processors.
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/**
* Provides an input texture ID to the {@code VideoFrameProcessor}.
*
* <p>It must be called after the {@link #setOnInputFrameProcessedListener
* onInputFrameProcessedListener} and the {@link #setInputFrameInfo frameInfo} have been set.
*
* <p>Can be called on any thread.
*
* @param textureId The ID of the texture queued to the {@code VideoFrameProcessor}.
* @param presentationTimeUs The presentation time of the queued texture, in microseconds.
*/
void queueInputTexture(int textureId, long presentationTimeUs);
/**
* Sets the {@link OnInputFrameProcessedListener}.
*
* <p>Can be called on any thread.
*
* @param listener The {@link OnInputFrameProcessedListener}.
*/
void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener);
/**
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from.
@ -189,7 +220,7 @@ public interface VideoFrameProcessor {
* Sets information about the input frames.
*
* <p>The new input information is applied from the next frame {@linkplain #registerInputFrame()
* registered} onwards.
* registered} or {@linkplain #queueInputTexture} queued} onwards.
*
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.

View File

@ -387,6 +387,11 @@ public final class GlUtil {
return eglSurface;
}
/** Gets the current {@link EGLContext context}. */
public static EGLContext getCurrentContext() {
return EGL14.eglGetCurrentContext();
}
/**
* Collects all OpenGL errors that occurred since this method was last called and throws a {@link
* GlException} with the combined error message.

View File

@ -43,6 +43,7 @@ import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.UnstableApi;
@ -72,7 +73,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public interface TextureOutputListener {
/** Called when a texture has been rendered to. */
void onTextureRendered(GlTextureInfo outputTexture, long presentationTimeUs)
throws GlUtil.GlException;
throws GlUtil.GlException, VideoFrameProcessingException;
}
/** A factory for {@link DefaultVideoFrameProcessor} instances. */
@ -136,7 +137,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
}
private final boolean enableColorTransfers;
private final GlObjectsProvider glObjectsProvider;
@Nullable private final TextureOutputListener textureOutputListener;
@ -349,6 +349,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
hasRefreshedNextInputFrameInfo = false;
}
@Override
public void queueInputTexture(int textureId, long presentationTimeUs) {
checkNotNull(textureManager).queueInputTexture(textureId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
checkNotNull(textureManager).setOnInputFrameProcessedListener(listener);
}
@Override
public Surface getInputSurface() {
return checkNotNull(textureManager).getInputSurface();
@ -382,6 +392,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
checkNotNull(textureManager).setInputFrameInfo(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = true;
}
@ -564,7 +575,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
// HDR bitmaps are not supported.
inputSwitcher.registerInput(INPUT_TYPE_BITMAP);
}
if (inputColorInfo.colorTransfer != C.COLOR_TRANSFER_SRGB) {
// Image and textureId concatenation not supported.
inputSwitcher.registerInput(INPUT_TYPE_TEXTURE_ID);
}
inputSwitcher.setDownstreamShaderProgram(effectsShaderPrograms.get(0));
setGlObjectProviderOnShaderPrograms(effectsShaderPrograms, glObjectsProvider);

View File

@ -0,0 +1,115 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import android.util.Pair;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessor;
import java.util.ArrayDeque;
import java.util.Queue;
/**
* Manages queueing frames and sending them to a given {@link GlShaderProgram
* consumingGLShaderProgram} at a consumable pace.
*
* <p>Frames are stored as a {@link GlTextureInfo} with a {@code presentationTimeUs}.
*/
// TODO(b/261820382): Converge ChainingGlShaderProgramListener with this class.
/* package */ final class FrameConsumptionManager implements GlShaderProgram.InputListener {
private final GlShaderProgram consumingGlShaderProgram;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this")
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
@GuardedBy("this")
private int consumingGlShaderProgramInputCapacity;
/**
* Creates a new instance.
*
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this {@code
* texIdTextureManager} will be set as the {@link GlShaderProgram.InputListener}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
*/
public FrameConsumptionManager(
GlShaderProgram consumingGlShaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.consumingGlShaderProgram = consumingGlShaderProgram;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>();
}
@Override
public synchronized void onReadyToAcceptInputFrame() {
@Nullable Pair<GlTextureInfo, Long> pendingFrame = availableFrames.poll();
if (pendingFrame == null) {
consumingGlShaderProgramInputCapacity++;
return;
}
long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else {
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs));
}
}
@Override
public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear();
}
public synchronized void queueInputFrame(GlTextureInfo texture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) {
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ texture, presentationTimeUs));
consumingGlShaderProgramInputCapacity--;
} else {
availableFrames.add(Pair.create(texture, presentationTimeUs));
}
}
/**
* Notifies the {@link GlShaderProgram consumingGlShaderProgram} that the current input stream is
* finished once all the pending frames are queued.
*/
public synchronized void signalEndOfCurrentStream() {
if (!availableFrames.isEmpty()) {
availableFrames.add(Pair.create(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else {
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
}
}
/** See {@link VideoFrameProcessor#getPendingInputFrameCount}. */
public synchronized int getPendingFrameCount() {
return availableFrames.size();
}
}

View File

@ -115,7 +115,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
new BitmapTextureManager(samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break;
case VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID: // fall through
case VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID:
samplingShaderProgram =
DefaultShaderProgram.createWithInternalSampler(
context,
/* matrixTransformations= */ ImmutableList.of(),
/* rgbMatrices= */ ImmutableList.of(),
inputColorInfo,
outputColorInfo,
enableColorTransfers,
inputType);
samplingShaderProgram.setGlObjectsProvider(glObjectsProvider);
textureManager =
new TexIdTextureManager(samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break;
default:
throw new VideoFrameProcessingException("Unsupported input type " + inputType);
}

View File

@ -0,0 +1,123 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import android.opengl.GLES10;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Forwards a video frames made available via {@linkplain GLES10#GL_TEXTURE_2D traditional GLES
* texture} to a {@link GlShaderProgram} for consumption.
*
* <p>Public methods in this class can be called from any thread.
*/
/* package */ final class TexIdTextureManager implements TextureManager {
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final FrameConsumptionManager frameConsumptionManager;
private @MonotonicNonNull OnInputFrameProcessedListener frameProcessedListener;
private @MonotonicNonNull FrameInfo inputFrameInfo;
/**
* Creates a new instance.
*
* @param shaderProgram The {@link GlShaderProgram} for which this {@code texIdTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
*/
public TexIdTextureManager(
GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
frameConsumptionManager =
new FrameConsumptionManager(shaderProgram, videoFrameProcessingTaskExecutor);
}
@Override
public void onReadyToAcceptInputFrame() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::onReadyToAcceptInputFrame);
}
@Override
public void onInputFrameProcessed(GlTextureInfo inputTexture) {
videoFrameProcessingTaskExecutor.submit(
() -> checkNotNull(frameProcessedListener).onInputFrameProcessed(inputTexture.texId));
}
@Override
public void onFlush() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::onFlush);
}
@Override
public void queueInputTexture(int inputTexId, long presentationTimeUs) {
FrameInfo frameInfo = checkNotNull(this.inputFrameInfo);
checkNotNull(frameProcessedListener);
videoFrameProcessingTaskExecutor.submit(
() -> {
GlTextureInfo inputTexture =
new GlTextureInfo(
inputTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
frameInfo.width,
frameInfo.height);
frameConsumptionManager.queueInputFrame(inputTexture, presentationTimeUs);
});
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
frameProcessedListener = listener;
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
this.inputFrameInfo = inputFrameInfo;
}
@Override
public int getPendingFrameCount() {
return frameConsumptionManager.getPendingFrameCount();
}
@Override
public void signalEndOfCurrentInputStream() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::signalEndOfCurrentStream);
}
@Override
public void signalEndOfInput() {
// Do nothing.
}
@Override
public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
}

View File

@ -23,6 +23,7 @@ import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
/** A component that handles {@code DefaultVideoFrameProcessor}'s input. */
/* package */ interface TextureManager extends GlShaderProgram.InputListener {
@ -52,6 +53,33 @@ import androidx.media3.common.VideoFrameProcessor;
throw new UnsupportedOperationException();
}
/**
* Provides an input texture ID to the {@code VideoFrameProcessor}.
*
* @see VideoFrameProcessor#queueInputTexture
*/
default void queueInputTexture(int inputTexId, long presentationTimeUs) {
throw new UnsupportedOperationException();
}
/**
* Sets the {@link OnInputFrameProcessedListener}.
*
* @see VideoFrameProcessor#setOnInputFrameProcessedListener
*/
default void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
throw new UnsupportedOperationException();
}
/**
* Sets information about the input frames.
*
* @see VideoFrameProcessor#setInputFrameInfo
*/
default void setInputFrameInfo(FrameInfo inputFrameInfo) {
// Do nothing.
}
/**
* See {@link VideoFrameProcessor#getInputSurface}.
*

View File

@ -17,6 +17,7 @@ package androidx.media3.test.utils;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image;
@ -37,9 +38,11 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
@ -351,6 +354,23 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
public void queueInputTexture(GlTextureInfo inputTexture, long pts) {
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputTexture.width, inputTexture.height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_TEXTURE_ID);
videoFrameProcessor.setOnInputFrameProcessedListener(
texId -> {
try {
GlUtil.deleteTexture(texId);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
});
videoFrameProcessor.queueInputTexture(inputTexture.texId, pts);
}
public void endFrameProcessing() throws InterruptedException {
endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS);
}

View File

@ -22,6 +22,7 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIX
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.test.utils.VideoFrameProcessorTestRunner.VIDEO_FRAME_PROCESSING_WAIT_MS;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
@ -34,11 +35,14 @@ import android.graphics.Bitmap;
import android.view.Surface;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util;
import androidx.media3.effect.BitmapOverlay;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.GlEffect;
import androidx.media3.effect.GlShaderProgram;
@ -46,6 +50,7 @@ import androidx.media3.effect.OverlayEffect;
import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner.BitmapReader;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.EncoderUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -119,6 +124,45 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void noEffects_textureInput_matchesGoldenFile() throws Exception {
String testId = "noEffects_textureInput_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
TextureBitmapReader producersBitmapReader = new TextureBitmapReader();
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(
(outputTexture, presentationTimeUs) ->
inputTextureIntoVideoFrameProcessor(
testId, consumersBitmapReader, outputTexture, presentationTimeUs))
.build();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(producersBitmapReader)
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
texIdProducingVideoFrameProcessorTestRunner.processFirstFrameAndEnd();
texIdProducingVideoFrameProcessorTestRunner.release();
Bitmap actualBitmap = consumersBitmapReader.getBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void bitmapOverlay_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_matchesGoldenFile";
@ -146,6 +190,48 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void bitmapOverlay_textureInput_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_textureInput_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
TextureBitmapReader producersBitmapReader = new TextureBitmapReader();
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(
(outputTexture, presentationTimeUs) ->
inputTextureIntoVideoFrameProcessor(
testId, consumersBitmapReader, outputTexture, presentationTimeUs))
.build();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(producersBitmapReader)
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
.build();
texIdProducingVideoFrameProcessorTestRunner.processFirstFrameAndEnd();
texIdProducingVideoFrameProcessorTestRunner.release();
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
Bitmap actualBitmap = consumersBitmapReader.getBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
String testId = "noEffects_hlg10Input_matchesGoldenFile";
@ -284,6 +370,36 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
}
private void inputTextureIntoVideoFrameProcessor(
String testId,
TextureBitmapReader bitmapReader,
GlTextureInfo texture,
long presentationTimeUs)
throws VideoFrameProcessingException {
GlObjectsProvider contextSharingGlObjectsProvider =
new DefaultGlObjectsProvider(GlUtil.getCurrentContext());
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(bitmapReader::readBitmapFromTexture)
.setGlObjectsProvider(contextSharingGlObjectsProvider)
.build();
videoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(bitmapReader)
.setInputType(VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID)
.build();
videoFrameProcessorTestRunner.queueInputTexture(texture, presentationTimeUs);
try {
videoFrameProcessorTestRunner.endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS / 2);
} catch (InterruptedException e) {
throw new VideoFrameProcessingException(e);
}
}
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
TextureBitmapReader textureBitmapReader = new TextureBitmapReader();
@ -303,8 +419,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
*
* <p>Reads from an OpenGL texture. Only for use on physical devices.
*/
private static final class TextureBitmapReader
implements VideoFrameProcessorTestRunner.BitmapReader {
private static final class TextureBitmapReader implements BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators.
private boolean useHighPrecisionColorComponents;