From f98a10f3f26148b0b15474f52c40f2098264c6ba Mon Sep 17 00:00:00 2001 From: claincly Date: Tue, 27 Jun 2023 16:51:40 +0000 Subject: [PATCH] Move GlShaderProgram creation away from VFP creation PiperOrigin-RevId: 543773418 --- .../media3/common/VideoFrameProcessor.java | 23 +++-- ...deoFrameProcessorImageFrameOutputTest.java | 18 ++-- .../DefaultVideoFrameProcessorPixelTest.java | 6 +- ...FrameProcessorVideoFrameRenderingTest.java | 26 +++--- .../androidx/media3/effect/FrameDropTest.java | 4 +- .../effect/DefaultVideoFrameProcessor.java | 88 +++++++------------ .../video/MediaCodecVideoRenderer.java | 5 +- .../utils/VideoFrameProcessorTestRunner.java | 24 +---- ...ocessorMultipleTextureOutputPixelTest.java | 6 +- .../transformer/VideoSamplePipeline.java | 1 - 10 files changed, 70 insertions(+), 131 deletions(-) diff --git a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java index 4735371171..373ab2e5d9 100644 --- a/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java +++ b/libraries/common/src/main/java/androidx/media3/common/VideoFrameProcessor.java @@ -34,14 +34,11 @@ import java.util.concurrent.Executor; /** * Interface for a video frame processor that applies changes to individual video frames. * - *

The changes are specified by {@link Effect} instances passed to {@link Factory#create}. + *

The changes are specified by {@link Effect} instances passed to {@link #registerInputStream}. * *

Manages its input {@link Surface}, which can be accessed via {@link #getInputSurface()}. The * output {@link Surface} must be set by the caller using {@link * #setOutputSurfaceInfo(SurfaceInfo)}. - * - *

The caller must {@linkplain #registerInputFrame() register} input frames before rendering them - * to the input {@link Surface}. */ @UnstableApi public interface VideoFrameProcessor { @@ -57,7 +54,12 @@ public interface VideoFrameProcessor { @IntDef({INPUT_TYPE_SURFACE, INPUT_TYPE_BITMAP, INPUT_TYPE_TEXTURE_ID}) @interface InputType {} - /** Input frames come from a {@link #getInputSurface surface}. */ + /** + * Input frames come from a {@link #getInputSurface surface}. + * + *

When receiving input from a Surface, the caller must {@linkplain #registerInputFrame() + * register} input frames before rendering them to the input {@link Surface}. + */ int INPUT_TYPE_SURFACE = 1; /** Input frames come from a {@link Bitmap}. */ @@ -77,8 +79,6 @@ public interface VideoFrameProcessor { * Creates a new {@link VideoFrameProcessor} instance. * * @param context A {@link Context}. - * @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code - * outputColorInfo}'s color space. * @param debugViewProvider A {@link DebugViewProvider}. * @param inputColorInfo The {@link ColorInfo} for the input frames. * @param outputColorInfo The {@link ColorInfo} for the output frames. @@ -95,7 +95,6 @@ public interface VideoFrameProcessor { */ VideoFrameProcessor create( Context context, - List effects, DebugViewProvider debugViewProvider, ColorInfo inputColorInfo, ColorInfo outputColorInfo, @@ -203,16 +202,14 @@ public interface VideoFrameProcessor { Surface getInputSurface(); /** - * Informs the {@code VideoFrameProcessor} that a new input stream will be queued. + * Informs the {@code VideoFrameProcessor} that a new input stream will be queued with the list of + * {@link Effect Effects} to apply to the new input stream. * *

Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input * stream differs from that of the current input stream. * * @param inputType The {@link InputType} of the new input stream. - * @param effects The list of {@link Effect effects} to apply to the new input stream. The list is - * ignored for the first input stream registered after {@linkplain Factory#create creating the - * VideoFrameProcessor}. The first input stream will use the effects passed in in {@link - * Factory#create}. + * @param effects The list of {@link Effect effects} to apply to the new input stream. */ // TODO(b/286032822) Merge this and setInputFrameInfo. void registerInputStream(@InputType int inputType, List effects); diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorImageFrameOutputTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorImageFrameOutputTest.java index 98204ed3e9..be659202aa 100644 --- a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorImageFrameOutputTest.java +++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorImageFrameOutputTest.java @@ -65,17 +65,17 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest { String testId = "imageInput_queueThreeBitmaps_outputsCorrectNumberOfFrames"; videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 2); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(SCALE_WIDE_PNG_ASSET_PATH), 2 * C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 3); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH), 3 * C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, @@ -93,7 +93,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest { videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); for (int i = 0; i < 20; i++) { - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, @@ -118,7 +118,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest { .build(); long offsetUs = 1_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs, @@ -141,13 +141,13 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest { .build(); long offsetUs1 = 1_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs1, /* frameRate= */ 2); long offsetUs2 = 2_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(SCALE_WIDE_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs2, @@ -175,13 +175,13 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest { .setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add) .build(); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 2); videoFrameProcessorTestRunner.endFrameProcessing(); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ 2 * C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorPixelTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorPixelTest.java index 8bf35f19ca..32f4189be1 100644 --- a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorPixelTest.java +++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorPixelTest.java @@ -164,7 +164,7 @@ public final class DefaultVideoFrameProcessorPixelTest { Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(IMAGE_TO_VIDEO_PNG_ASSET_PATH); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); videoFrameProcessorTestRunner.endFrameProcessing(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); @@ -193,7 +193,7 @@ public final class DefaultVideoFrameProcessorPixelTest { Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(IMAGE_TO_CROPPED_VIDEO_PNG_ASSET_PATH); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); videoFrameProcessorTestRunner.endFrameProcessing(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); @@ -220,7 +220,7 @@ public final class DefaultVideoFrameProcessorPixelTest { .build(); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); videoFrameProcessorTestRunner.endFrameProcessing(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java index 48c13a2630..99b7e9ea97 100644 --- a/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java +++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/DefaultVideoFrameProcessorVideoFrameRenderingTest.java @@ -299,7 +299,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest { .build() .create( getApplicationContext(), - ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer), DebugViewProvider.NONE, /* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, /* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, @@ -342,20 +341,17 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest { videoFrameProcessingEndedCountDownLatch.countDown(); } })); - defaultVideoFrameProcessor - .getTaskExecutor() - .submit( - () -> { - blankFrameProducer.configureGlObjects(); - // A frame needs to be registered despite not queuing any external input to ensure - // that the video frame processor knows about the stream offset. - checkNotNull(defaultVideoFrameProcessor) - .registerInputStream(INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of()); - defaultVideoFrameProcessor.setInputFrameInfo( - new FrameInfo.Builder(WIDTH, HEIGHT).build()); - blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs); - defaultVideoFrameProcessor.signalEndOfInput(); - }); + + defaultVideoFrameProcessor.getTaskExecutor().submit(blankFrameProducer::configureGlObjects); + // A frame needs to be registered despite not queuing any external input to ensure + // that the video frame processor knows about the stream offset. + checkNotNull(defaultVideoFrameProcessor) + .registerInputStream( + INPUT_TYPE_SURFACE, + /* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer)); + defaultVideoFrameProcessor.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build()); + blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs); + defaultVideoFrameProcessor.signalEndOfInput(); videoFrameProcessingEndedCountDownLatch.await(); @Nullable Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get(); diff --git a/libraries/effect/src/androidTest/java/androidx/media3/effect/FrameDropTest.java b/libraries/effect/src/androidTest/java/androidx/media3/effect/FrameDropTest.java index 20583691d1..0bfa1b5c15 100644 --- a/libraries/effect/src/androidTest/java/androidx/media3/effect/FrameDropTest.java +++ b/libraries/effect/src/androidTest/java/androidx/media3/effect/FrameDropTest.java @@ -70,7 +70,6 @@ public class FrameDropTest { .setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add) .build(); - videoFrameProcessorTestRunner.registerInputStream(INPUT_TYPE_BITMAP); ImmutableList timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86); for (int timestampMs : timestampsMs) { videoFrameProcessorTestRunner.queueInputBitmap( @@ -96,7 +95,6 @@ public class FrameDropTest { /* expectedFrameRate= */ 6, /* targetFrameRate= */ 2)) .build(); - videoFrameProcessorTestRunner.registerInputStream(INPUT_TYPE_BITMAP); videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, @@ -123,7 +121,7 @@ public class FrameDropTest { /* expectedFrameRate= */ 3, /* targetFrameRate= */ 3)) .build(); - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, diff --git a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java index 4d0c11654d..8e83ffa058 100644 --- a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java +++ b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java @@ -183,8 +183,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { /** * {@inheritDoc} * - *

All {@link Effect} instances must be {@link GlEffect} instances. - * *

Using HDR {@code inputColorInfo} requires the {@code EXT_YUV_target} OpenGL extension. * *

Using HDR {@code inputColorInfo} or {@code outputColorInfo} requires OpenGL ES 3.0. @@ -219,7 +217,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { @Override public DefaultVideoFrameProcessor create( Context context, - List effects, DebugViewProvider debugViewProvider, ColorInfo inputColorInfo, ColorInfo outputColorInfo, @@ -256,7 +253,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { () -> createOpenGlObjectsAndFrameProcessor( context, - effects, debugViewProvider, inputColorInfo, outputColorInfo, @@ -321,7 +317,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor, Listener listener, Executor listenerExecutor, - ImmutableList intermediateGlShaderPrograms, FinalShaderProgramWrapper finalShaderProgramWrapper, boolean renderFramesAutomatically, ColorInfo outputColorInfo) { @@ -354,7 +349,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { DebugTraceUtil.recordVideoFrameProcessorSignalEos(); } }); - this.intermediateGlShaderPrograms = new ArrayList<>(intermediateGlShaderPrograms); + this.intermediateGlShaderPrograms = new ArrayList<>(); } /** Returns the task executor that runs video frame processing tasks. */ @@ -373,7 +368,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { * {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information. * *

This method must only be called when the {@link VideoFrameProcessor} is {@linkplain - * Factory#create created} with {@link #INPUT_TYPE_SURFACE}. + * VideoFrameProcessor.Factory#create created} with {@link #INPUT_TYPE_SURFACE}. * * @param width The default width for input buffers, in pixels. * @param height The default height for input buffers, in pixels. @@ -417,10 +412,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { public void registerInputStream(@InputType int inputType, List effects) { synchronized (lock) { if (!processingInput) { + videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects)); inputSwitcher.switchToInput(inputType); processingInput = true; - activeEffects.clear(); - activeEffects.addAll(effects); return; } } @@ -444,35 +438,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { // Shader program recreation must be on GL thread. Currently the calling thread is blocked // until all shader programs are recreated, so that DefaultVideoFrameProcessor doesn't receive // a new frame from the new input stream prematurely. - videoFrameProcessingTaskExecutor.submitAndBlock( - () -> { - try { - for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) { - intermediateGlShaderPrograms.get(i).release(); - } - intermediateGlShaderPrograms.clear(); - intermediateGlShaderPrograms.addAll( - createGlShaderPrograms( - context, effects, outputColorInfo, finalShaderProgramWrapper)); - } catch (VideoFrameProcessingException e) { - listenerExecutor.execute(() -> listener.onError(e)); - return; - } - - inputSwitcher.setDownstreamShaderProgram( - getFirst( - intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper)); - chainShaderProgramsWithListeners( - glObjectsProvider, - intermediateGlShaderPrograms, - finalShaderProgramWrapper, - videoFrameProcessingTaskExecutor, - listener, - listenerExecutor); - - activeEffects.clear(); - activeEffects.addAll(effects); - }); + videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects)); } inputSwitcher.switchToInput(inputType); } @@ -593,7 +559,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { */ private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor( Context context, - List effects, DebugViewProvider debugViewProvider, ColorInfo inputColorInfo, ColorInfo outputColorInfo, @@ -662,12 +627,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { textureOutputListener, textureOutputCapacity); - // TODO(b/269424561): Move effect creation to registerInputStream(). - // The GlShaderPrograms that should be inserted in between InputSwitcher and - // FinalShaderProgramWrapper. - ImmutableList intermediateGlShaderPrograms = - createGlShaderPrograms(context, effects, outputColorInfo, finalShaderProgramWrapper); - inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_SURFACE); if (!ColorInfo.isTransferHdr(inputColorInfo)) { // HDR bitmap input is not supported. Bitmaps are always sRGB/Full range/BT.709. @@ -678,17 +637,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_TEXTURE_ID); } - inputSwitcher.setDownstreamShaderProgram( - getFirst(intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper)); - - chainShaderProgramsWithListeners( - glObjectsProvider, - intermediateGlShaderPrograms, - finalShaderProgramWrapper, - videoFrameProcessingTaskExecutor, - listener, - videoFrameProcessorListenerExecutor); - return new DefaultVideoFrameProcessor( context, glObjectsProvider, @@ -698,7 +646,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { videoFrameProcessingTaskExecutor, listener, videoFrameProcessorListenerExecutor, - intermediateGlShaderPrograms, finalShaderProgramWrapper, renderFramesAutomatically, outputColorInfo); @@ -795,6 +742,33 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { } } + /** Configures the {@link GlShaderProgram} instances for {@code effects}. */ + private void configureEffects(List effects) throws VideoFrameProcessingException { + if (!intermediateGlShaderPrograms.isEmpty()) { + for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) { + intermediateGlShaderPrograms.get(i).release(); + } + intermediateGlShaderPrograms.clear(); + } + + // The GlShaderPrograms that should be inserted in between InputSwitcher and + // FinalShaderProgramWrapper. + intermediateGlShaderPrograms.addAll( + createGlShaderPrograms(context, effects, outputColorInfo, finalShaderProgramWrapper)); + inputSwitcher.setDownstreamShaderProgram( + getFirst(intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper)); + chainShaderProgramsWithListeners( + glObjectsProvider, + intermediateGlShaderPrograms, + finalShaderProgramWrapper, + videoFrameProcessingTaskExecutor, + listener, + listenerExecutor); + + activeEffects.clear(); + activeEffects.addAll(effects); + } + /** * Releases the {@link GlShaderProgram} instances and destroys the OpenGL context. * diff --git a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java index 76ccd9beca..8aae18bbad 100644 --- a/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java +++ b/libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java @@ -2067,7 +2067,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { videoFrameProcessor = videoFrameProcessorFactory.create( renderer.context, - checkNotNull(videoEffects), DebugViewProvider.NONE, inputAndOutputColorInfos.first, inputAndOutputColorInfos.second, @@ -2125,7 +2124,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { }); videoFrameProcessor.registerInputStream( - VideoFrameProcessor.INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of()); + VideoFrameProcessor.INPUT_TYPE_SURFACE, videoEffects); this.initialStreamOffsetUs = initialStreamOffsetUs; } catch (Exception e) { throw renderer.createRendererException( @@ -2442,7 +2441,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { @Override public VideoFrameProcessor create( Context context, - List effects, DebugViewProvider debugViewProvider, ColorInfo inputColorInfo, ColorInfo outputColorInfo, @@ -2454,7 +2452,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { .get() .create( context, - effects, debugViewProvider, inputColorInfo, outputColorInfo, diff --git a/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java b/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java index 8a43c37b49..a610abe438 100644 --- a/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java +++ b/libraries/test_utils/src/main/java/androidx/media3/test/utils/VideoFrameProcessorTestRunner.java @@ -15,9 +15,7 @@ */ package androidx.media3.test.utils; -import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP; import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE; -import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID; import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image; @@ -278,7 +276,6 @@ public final class VideoFrameProcessorTestRunner { videoFrameProcessor = videoFrameProcessorFactory.create( getApplicationContext(), - effects, DebugViewProvider.NONE, inputColorInfo, outputColorInfo, @@ -314,7 +311,7 @@ public final class VideoFrameProcessorTestRunner { videoFrameProcessingEnded = true; } }); - videoFrameProcessor.registerInputStream(inputType, /* effects= */ ImmutableList.of()); + videoFrameProcessor.registerInputStream(inputType, effects); } public void processFirstFrameAndEnd() throws Exception { @@ -329,8 +326,6 @@ public final class VideoFrameProcessorTestRunner { mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) .setPixelWidthHeightRatio(pixelWidthHeightRatio) .build()); - videoFrameProcessor.registerInputStream( - INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of()); videoFrameProcessor.registerInputFrame(); } @@ -343,10 +338,6 @@ public final class VideoFrameProcessorTestRunner { endFrameProcessing(); } - public void registerInputStream(@InputType int inputType) { - videoFrameProcessor.registerInputStream(inputType, ImmutableList.of()); - } - public void queueInputBitmap( Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) { videoFrameProcessor.setInputFrameInfo( @@ -357,24 +348,11 @@ public final class VideoFrameProcessorTestRunner { videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); } - public void registerAndQueueInputBitmap( - Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) { - videoFrameProcessor.setInputFrameInfo( - new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight()) - .setPixelWidthHeightRatio(pixelWidthHeightRatio) - .setOffsetToAddUs(offsetToAddUs) - .build()); - videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP, /* effects= */ ImmutableList.of()); - videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); - } - public void queueInputTexture(GlTextureInfo inputTexture, long pts) { videoFrameProcessor.setInputFrameInfo( new FrameInfo.Builder(inputTexture.getWidth(), inputTexture.getHeight()) .setPixelWidthHeightRatio(pixelWidthHeightRatio) .build()); - videoFrameProcessor.registerInputStream( - INPUT_TYPE_TEXTURE_ID, /* effects= */ ImmutableList.of()); videoFrameProcessor.setOnInputFrameProcessedListener( texId -> { try { diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/DefaultVideoFrameProcessorMultipleTextureOutputPixelTest.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/DefaultVideoFrameProcessorMultipleTextureOutputPixelTest.java index 57c65cae6c..59a332ef98 100644 --- a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/DefaultVideoFrameProcessorMultipleTextureOutputPixelTest.java +++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/mh/DefaultVideoFrameProcessorMultipleTextureOutputPixelTest.java @@ -68,7 +68,7 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest { videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build(); long offsetUs = 1_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ 3 * C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs, @@ -95,13 +95,13 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest { videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build(); long offsetUs1 = 1_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(ORIGINAL_PNG_ASSET_PATH), /* durationUs= */ C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs1, /* frameRate= */ 2); long offsetUs2 = 2_000_000L; - videoFrameProcessorTestRunner.registerAndQueueInputBitmap( + videoFrameProcessorTestRunner.queueInputBitmap( readBitmap(MEDIA3_TEST_PNG_ASSET_PATH), /* durationUs= */ 3 * C.MICROS_PER_SECOND, /* offsetToAddUs= */ offsetUs2, diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSamplePipeline.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSamplePipeline.java index 5d6546d302..4b52db679d 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSamplePipeline.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSamplePipeline.java @@ -155,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure; videoFrameProcessor = videoFrameProcessorFactory.create( context, - createEffectListWithPresentation(effects, presentation), debugViewProvider, videoFrameProcessorInputColor, videoFrameProcessorOutputColor,