Move GlShaderProgram creation away from VFP creation

PiperOrigin-RevId: 543773418
This commit is contained in:
claincly 2023-06-27 16:51:40 +00:00 committed by Tianyi Feng
parent b358d075a1
commit f98a10f3f2
10 changed files with 70 additions and 131 deletions

View File

@ -34,14 +34,11 @@ import java.util.concurrent.Executor;
/** /**
* Interface for a video frame processor that applies changes to individual video frames. * Interface for a video frame processor that applies changes to individual video frames.
* *
* <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}. * <p>The changes are specified by {@link Effect} instances passed to {@link #registerInputStream}.
* *
* <p>Manages its input {@link Surface}, which can be accessed via {@link #getInputSurface()}. The * <p>Manages its input {@link Surface}, which can be accessed via {@link #getInputSurface()}. The
* output {@link Surface} must be set by the caller using {@link * output {@link Surface} must be set by the caller using {@link
* #setOutputSurfaceInfo(SurfaceInfo)}. * #setOutputSurfaceInfo(SurfaceInfo)}.
*
* <p>The caller must {@linkplain #registerInputFrame() register} input frames before rendering them
* to the input {@link Surface}.
*/ */
@UnstableApi @UnstableApi
public interface VideoFrameProcessor { public interface VideoFrameProcessor {
@ -57,7 +54,12 @@ public interface VideoFrameProcessor {
@IntDef({INPUT_TYPE_SURFACE, INPUT_TYPE_BITMAP, INPUT_TYPE_TEXTURE_ID}) @IntDef({INPUT_TYPE_SURFACE, INPUT_TYPE_BITMAP, INPUT_TYPE_TEXTURE_ID})
@interface InputType {} @interface InputType {}
/** Input frames come from a {@link #getInputSurface surface}. */ /**
* Input frames come from a {@link #getInputSurface surface}.
*
* <p>When receiving input from a Surface, the caller must {@linkplain #registerInputFrame()
* register} input frames before rendering them to the input {@link Surface}.
*/
int INPUT_TYPE_SURFACE = 1; int INPUT_TYPE_SURFACE = 1;
/** Input frames come from a {@link Bitmap}. */ /** Input frames come from a {@link Bitmap}. */
@ -77,8 +79,6 @@ public interface VideoFrameProcessor {
* Creates a new {@link VideoFrameProcessor} instance. * Creates a new {@link VideoFrameProcessor} instance.
* *
* @param context A {@link Context}. * @param context A {@link Context}.
* @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code
* outputColorInfo}'s color space.
* @param debugViewProvider A {@link DebugViewProvider}. * @param debugViewProvider A {@link DebugViewProvider}.
* @param inputColorInfo The {@link ColorInfo} for the input frames. * @param inputColorInfo The {@link ColorInfo} for the input frames.
* @param outputColorInfo The {@link ColorInfo} for the output frames. * @param outputColorInfo The {@link ColorInfo} for the output frames.
@ -95,7 +95,6 @@ public interface VideoFrameProcessor {
*/ */
VideoFrameProcessor create( VideoFrameProcessor create(
Context context, Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@ -203,16 +202,14 @@ public interface VideoFrameProcessor {
Surface getInputSurface(); Surface getInputSurface();
/** /**
* Informs the {@code VideoFrameProcessor} that a new input stream will be queued. * Informs the {@code VideoFrameProcessor} that a new input stream will be queued with the list of
* {@link Effect Effects} to apply to the new input stream.
* *
* <p>Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input * <p>Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input
* stream differs from that of the current input stream. * stream differs from that of the current input stream.
* *
* @param inputType The {@link InputType} of the new input stream. * @param inputType The {@link InputType} of the new input stream.
* @param effects The list of {@link Effect effects} to apply to the new input stream. The list is * @param effects The list of {@link Effect effects} to apply to the new input stream.
* ignored for the first input stream registered after {@linkplain Factory#create creating the
* VideoFrameProcessor}. The first input stream will use the effects passed in in {@link
* Factory#create}.
*/ */
// TODO(b/286032822) Merge this and setInputFrameInfo. // TODO(b/286032822) Merge this and setInputFrameInfo.
void registerInputStream(@InputType int inputType, List<Effect> effects); void registerInputStream(@InputType int inputType, List<Effect> effects);

View File

@ -65,17 +65,17 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
String testId = "imageInput_queueThreeBitmaps_outputsCorrectNumberOfFrames"; String testId = "imageInput_queueThreeBitmaps_outputsCorrectNumberOfFrames";
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
C.MICROS_PER_SECOND, C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,
/* frameRate= */ 2); /* frameRate= */ 2);
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(SCALE_WIDE_PNG_ASSET_PATH), readBitmap(SCALE_WIDE_PNG_ASSET_PATH),
2 * C.MICROS_PER_SECOND, 2 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,
/* frameRate= */ 3); /* frameRate= */ 3);
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH), readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH),
3 * C.MICROS_PER_SECOND, 3 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,
@ -93,7 +93,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
for (int i = 0; i < 20; i++) { for (int i = 0; i < 20; i++) {
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,
@ -118,7 +118,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
.build(); .build();
long offsetUs = 1_000_000L; long offsetUs = 1_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs, /* offsetToAddUs= */ offsetUs,
@ -141,13 +141,13 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
.build(); .build();
long offsetUs1 = 1_000_000L; long offsetUs1 = 1_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs1, /* offsetToAddUs= */ offsetUs1,
/* frameRate= */ 2); /* frameRate= */ 2);
long offsetUs2 = 2_000_000L; long offsetUs2 = 2_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(SCALE_WIDE_PNG_ASSET_PATH), readBitmap(SCALE_WIDE_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs2, /* offsetToAddUs= */ offsetUs2,
@ -175,13 +175,13 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add) .setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build(); .build();
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,
/* frameRate= */ 2); /* frameRate= */ 2);
videoFrameProcessorTestRunner.endFrameProcessing(); videoFrameProcessorTestRunner.endFrameProcessing();
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 2 * C.MICROS_PER_SECOND, /* durationUs= */ 2 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,

View File

@ -164,7 +164,7 @@ public final class DefaultVideoFrameProcessorPixelTest {
Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(IMAGE_TO_VIDEO_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(IMAGE_TO_VIDEO_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1);
videoFrameProcessorTestRunner.endFrameProcessing(); videoFrameProcessorTestRunner.endFrameProcessing();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
@ -193,7 +193,7 @@ public final class DefaultVideoFrameProcessorPixelTest {
Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(IMAGE_TO_CROPPED_VIDEO_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(IMAGE_TO_CROPPED_VIDEO_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1);
videoFrameProcessorTestRunner.endFrameProcessing(); videoFrameProcessorTestRunner.endFrameProcessing();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
@ -220,7 +220,7 @@ public final class DefaultVideoFrameProcessorPixelTest {
.build(); .build();
Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1); originalBitmap, C.MICROS_PER_SECOND, /* offsetToAddUs= */ 0L, /* frameRate= */ 1);
videoFrameProcessorTestRunner.endFrameProcessing(); videoFrameProcessorTestRunner.endFrameProcessing();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap(); Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();

View File

@ -299,7 +299,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
.build() .build()
.create( .create(
getApplicationContext(), getApplicationContext(),
ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
DebugViewProvider.NONE, DebugViewProvider.NONE,
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, /* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, /* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
@ -342,20 +341,17 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
videoFrameProcessingEndedCountDownLatch.countDown(); videoFrameProcessingEndedCountDownLatch.countDown();
} }
})); }));
defaultVideoFrameProcessor
.getTaskExecutor() defaultVideoFrameProcessor.getTaskExecutor().submit(blankFrameProducer::configureGlObjects);
.submit( // A frame needs to be registered despite not queuing any external input to ensure
() -> { // that the video frame processor knows about the stream offset.
blankFrameProducer.configureGlObjects(); checkNotNull(defaultVideoFrameProcessor)
// A frame needs to be registered despite not queuing any external input to ensure .registerInputStream(
// that the video frame processor knows about the stream offset. INPUT_TYPE_SURFACE,
checkNotNull(defaultVideoFrameProcessor) /* effects= */ ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer));
.registerInputStream(INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of()); defaultVideoFrameProcessor.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build());
defaultVideoFrameProcessor.setInputFrameInfo( blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
new FrameInfo.Builder(WIDTH, HEIGHT).build()); defaultVideoFrameProcessor.signalEndOfInput();
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput();
});
videoFrameProcessingEndedCountDownLatch.await(); videoFrameProcessingEndedCountDownLatch.await();
@Nullable @Nullable
Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get(); Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get();

View File

@ -70,7 +70,6 @@ public class FrameDropTest {
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add) .setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build(); .build();
videoFrameProcessorTestRunner.registerInputStream(INPUT_TYPE_BITMAP);
ImmutableList<Integer> timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86); ImmutableList<Integer> timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86);
for (int timestampMs : timestampsMs) { for (int timestampMs : timestampsMs) {
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
@ -96,7 +95,6 @@ public class FrameDropTest {
/* expectedFrameRate= */ 6, /* targetFrameRate= */ 2)) /* expectedFrameRate= */ 6, /* targetFrameRate= */ 2))
.build(); .build();
videoFrameProcessorTestRunner.registerInputStream(INPUT_TYPE_BITMAP);
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
@ -123,7 +121,7 @@ public class FrameDropTest {
/* expectedFrameRate= */ 3, /* targetFrameRate= */ 3)) /* expectedFrameRate= */ 3, /* targetFrameRate= */ 3))
.build(); .build();
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L, /* offsetToAddUs= */ 0L,

View File

@ -183,8 +183,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
* <p>All {@link Effect} instances must be {@link GlEffect} instances.
*
* <p>Using HDR {@code inputColorInfo} requires the {@code EXT_YUV_target} OpenGL extension. * <p>Using HDR {@code inputColorInfo} requires the {@code EXT_YUV_target} OpenGL extension.
* *
* <p>Using HDR {@code inputColorInfo} or {@code outputColorInfo} requires OpenGL ES 3.0. * <p>Using HDR {@code inputColorInfo} or {@code outputColorInfo} requires OpenGL ES 3.0.
@ -219,7 +217,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public DefaultVideoFrameProcessor create( public DefaultVideoFrameProcessor create(
Context context, Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@ -256,7 +253,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
() -> () ->
createOpenGlObjectsAndFrameProcessor( createOpenGlObjectsAndFrameProcessor(
context, context,
effects,
debugViewProvider, debugViewProvider,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
@ -321,7 +317,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor, VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
Listener listener, Listener listener,
Executor listenerExecutor, Executor listenerExecutor,
ImmutableList<GlShaderProgram> intermediateGlShaderPrograms,
FinalShaderProgramWrapper finalShaderProgramWrapper, FinalShaderProgramWrapper finalShaderProgramWrapper,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
ColorInfo outputColorInfo) { ColorInfo outputColorInfo) {
@ -354,7 +349,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
DebugTraceUtil.recordVideoFrameProcessorSignalEos(); DebugTraceUtil.recordVideoFrameProcessorSignalEos();
} }
}); });
this.intermediateGlShaderPrograms = new ArrayList<>(intermediateGlShaderPrograms); this.intermediateGlShaderPrograms = new ArrayList<>();
} }
/** Returns the task executor that runs video frame processing tasks. */ /** Returns the task executor that runs video frame processing tasks. */
@ -373,7 +368,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information. * {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
* *
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain * <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}. * VideoFrameProcessor.Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
* *
* @param width The default width for input buffers, in pixels. * @param width The default width for input buffers, in pixels.
* @param height The default height for input buffers, in pixels. * @param height The default height for input buffers, in pixels.
@ -417,10 +412,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void registerInputStream(@InputType int inputType, List<Effect> effects) { public void registerInputStream(@InputType int inputType, List<Effect> effects) {
synchronized (lock) { synchronized (lock) {
if (!processingInput) { if (!processingInput) {
videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
inputSwitcher.switchToInput(inputType); inputSwitcher.switchToInput(inputType);
processingInput = true; processingInput = true;
activeEffects.clear();
activeEffects.addAll(effects);
return; return;
} }
} }
@ -444,35 +438,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
// Shader program recreation must be on GL thread. Currently the calling thread is blocked // Shader program recreation must be on GL thread. Currently the calling thread is blocked
// until all shader programs are recreated, so that DefaultVideoFrameProcessor doesn't receive // until all shader programs are recreated, so that DefaultVideoFrameProcessor doesn't receive
// a new frame from the new input stream prematurely. // a new frame from the new input stream prematurely.
videoFrameProcessingTaskExecutor.submitAndBlock( videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
() -> {
try {
for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) {
intermediateGlShaderPrograms.get(i).release();
}
intermediateGlShaderPrograms.clear();
intermediateGlShaderPrograms.addAll(
createGlShaderPrograms(
context, effects, outputColorInfo, finalShaderProgramWrapper));
} catch (VideoFrameProcessingException e) {
listenerExecutor.execute(() -> listener.onError(e));
return;
}
inputSwitcher.setDownstreamShaderProgram(
getFirst(
intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper));
chainShaderProgramsWithListeners(
glObjectsProvider,
intermediateGlShaderPrograms,
finalShaderProgramWrapper,
videoFrameProcessingTaskExecutor,
listener,
listenerExecutor);
activeEffects.clear();
activeEffects.addAll(effects);
});
} }
inputSwitcher.switchToInput(inputType); inputSwitcher.switchToInput(inputType);
} }
@ -593,7 +559,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
*/ */
private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor( private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor(
Context context, Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@ -662,12 +627,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
textureOutputListener, textureOutputListener,
textureOutputCapacity); textureOutputCapacity);
// TODO(b/269424561): Move effect creation to registerInputStream().
// The GlShaderPrograms that should be inserted in between InputSwitcher and
// FinalShaderProgramWrapper.
ImmutableList<GlShaderProgram> intermediateGlShaderPrograms =
createGlShaderPrograms(context, effects, outputColorInfo, finalShaderProgramWrapper);
inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_SURFACE); inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_SURFACE);
if (!ColorInfo.isTransferHdr(inputColorInfo)) { if (!ColorInfo.isTransferHdr(inputColorInfo)) {
// HDR bitmap input is not supported. Bitmaps are always sRGB/Full range/BT.709. // HDR bitmap input is not supported. Bitmaps are always sRGB/Full range/BT.709.
@ -678,17 +637,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_TEXTURE_ID); inputSwitcher.registerInput(inputColorInfo, INPUT_TYPE_TEXTURE_ID);
} }
inputSwitcher.setDownstreamShaderProgram(
getFirst(intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper));
chainShaderProgramsWithListeners(
glObjectsProvider,
intermediateGlShaderPrograms,
finalShaderProgramWrapper,
videoFrameProcessingTaskExecutor,
listener,
videoFrameProcessorListenerExecutor);
return new DefaultVideoFrameProcessor( return new DefaultVideoFrameProcessor(
context, context,
glObjectsProvider, glObjectsProvider,
@ -698,7 +646,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
videoFrameProcessingTaskExecutor, videoFrameProcessingTaskExecutor,
listener, listener,
videoFrameProcessorListenerExecutor, videoFrameProcessorListenerExecutor,
intermediateGlShaderPrograms,
finalShaderProgramWrapper, finalShaderProgramWrapper,
renderFramesAutomatically, renderFramesAutomatically,
outputColorInfo); outputColorInfo);
@ -795,6 +742,33 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
} }
/** Configures the {@link GlShaderProgram} instances for {@code effects}. */
private void configureEffects(List<Effect> effects) throws VideoFrameProcessingException {
if (!intermediateGlShaderPrograms.isEmpty()) {
for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) {
intermediateGlShaderPrograms.get(i).release();
}
intermediateGlShaderPrograms.clear();
}
// The GlShaderPrograms that should be inserted in between InputSwitcher and
// FinalShaderProgramWrapper.
intermediateGlShaderPrograms.addAll(
createGlShaderPrograms(context, effects, outputColorInfo, finalShaderProgramWrapper));
inputSwitcher.setDownstreamShaderProgram(
getFirst(intermediateGlShaderPrograms, /* defaultValue= */ finalShaderProgramWrapper));
chainShaderProgramsWithListeners(
glObjectsProvider,
intermediateGlShaderPrograms,
finalShaderProgramWrapper,
videoFrameProcessingTaskExecutor,
listener,
listenerExecutor);
activeEffects.clear();
activeEffects.addAll(effects);
}
/** /**
* Releases the {@link GlShaderProgram} instances and destroys the OpenGL context. * Releases the {@link GlShaderProgram} instances and destroys the OpenGL context.
* *

View File

@ -2067,7 +2067,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
videoFrameProcessor = videoFrameProcessor =
videoFrameProcessorFactory.create( videoFrameProcessorFactory.create(
renderer.context, renderer.context,
checkNotNull(videoEffects),
DebugViewProvider.NONE, DebugViewProvider.NONE,
inputAndOutputColorInfos.first, inputAndOutputColorInfos.first,
inputAndOutputColorInfos.second, inputAndOutputColorInfos.second,
@ -2125,7 +2124,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}); });
videoFrameProcessor.registerInputStream( videoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of()); VideoFrameProcessor.INPUT_TYPE_SURFACE, videoEffects);
this.initialStreamOffsetUs = initialStreamOffsetUs; this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) { } catch (Exception e) {
throw renderer.createRendererException( throw renderer.createRendererException(
@ -2442,7 +2441,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
public VideoFrameProcessor create( public VideoFrameProcessor create(
Context context, Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@ -2454,7 +2452,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
.get() .get()
.create( .create(
context, context,
effects,
debugViewProvider, debugViewProvider,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,

View File

@ -15,9 +15,7 @@
*/ */
package androidx.media3.test.utils; package androidx.media3.test.utils;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE; import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image; import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image;
@ -278,7 +276,6 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor = videoFrameProcessor =
videoFrameProcessorFactory.create( videoFrameProcessorFactory.create(
getApplicationContext(), getApplicationContext(),
effects,
DebugViewProvider.NONE, DebugViewProvider.NONE,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
@ -314,7 +311,7 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessingEnded = true; videoFrameProcessingEnded = true;
} }
}); });
videoFrameProcessor.registerInputStream(inputType, /* effects= */ ImmutableList.of()); videoFrameProcessor.registerInputStream(inputType, effects);
} }
public void processFirstFrameAndEnd() throws Exception { public void processFirstFrameAndEnd() throws Exception {
@ -329,8 +326,6 @@ public final class VideoFrameProcessorTestRunner {
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());
videoFrameProcessor.registerInputStream(
INPUT_TYPE_SURFACE, /* effects= */ ImmutableList.of());
videoFrameProcessor.registerInputFrame(); videoFrameProcessor.registerInputFrame();
} }
@ -343,10 +338,6 @@ public final class VideoFrameProcessorTestRunner {
endFrameProcessing(); endFrameProcessing();
} }
public void registerInputStream(@InputType int inputType) {
videoFrameProcessor.registerInputStream(inputType, ImmutableList.of());
}
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) { Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) {
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
@ -357,24 +348,11 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
public void registerAndQueueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetToAddUs, float frameRate) {
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.setOffsetToAddUs(offsetToAddUs)
.build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP, /* effects= */ ImmutableList.of());
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
public void queueInputTexture(GlTextureInfo inputTexture, long pts) { public void queueInputTexture(GlTextureInfo inputTexture, long pts) {
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputTexture.getWidth(), inputTexture.getHeight()) new FrameInfo.Builder(inputTexture.getWidth(), inputTexture.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());
videoFrameProcessor.registerInputStream(
INPUT_TYPE_TEXTURE_ID, /* effects= */ ImmutableList.of());
videoFrameProcessor.setOnInputFrameProcessedListener( videoFrameProcessor.setOnInputFrameProcessedListener(
texId -> { texId -> {
try { try {

View File

@ -68,7 +68,7 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest {
videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build();
long offsetUs = 1_000_000L; long offsetUs = 1_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 3 * C.MICROS_PER_SECOND, /* durationUs= */ 3 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs, /* offsetToAddUs= */ offsetUs,
@ -95,13 +95,13 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest {
videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build();
long offsetUs1 = 1_000_000L; long offsetUs1 = 1_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND, /* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs1, /* offsetToAddUs= */ offsetUs1,
/* frameRate= */ 2); /* frameRate= */ 2);
long offsetUs2 = 2_000_000L; long offsetUs2 = 2_000_000L;
videoFrameProcessorTestRunner.registerAndQueueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(MEDIA3_TEST_PNG_ASSET_PATH), readBitmap(MEDIA3_TEST_PNG_ASSET_PATH),
/* durationUs= */ 3 * C.MICROS_PER_SECOND, /* durationUs= */ 3 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs2, /* offsetToAddUs= */ offsetUs2,

View File

@ -155,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure;
videoFrameProcessor = videoFrameProcessor =
videoFrameProcessorFactory.create( videoFrameProcessorFactory.create(
context, context,
createEffectListWithPresentation(effects, presentation),
debugViewProvider, debugViewProvider,
videoFrameProcessorInputColor, videoFrameProcessorInputColor,
videoFrameProcessorOutputColor, videoFrameProcessorOutputColor,