From e2882c051b35c8b3f71fb63992680c84ae048668 Mon Sep 17 00:00:00 2001 From: claincly Date: Tue, 12 Sep 2023 09:43:01 -0700 Subject: [PATCH] Implement MultipleInputVideoGraph Also adds DebugTraceUtil category to track MultipleInputVideoGraph events. PiperOrigin-RevId: 564749202 --- .../media3/effect/DebugTraceUtil.java | 7 + .../effect/DefaultVideoFrameProcessor.java | 30 +- .../transformer/MultipleInputVideoGraph.java | 482 ++++++++++++++++++ .../transformer/SingleInputVideoGraph.java | 19 +- .../media3/transformer/Transformer.java | 11 +- .../transformer/TransformerInternal.java | 6 +- .../media3/transformer/VideoGraph.java | 25 +- .../transformer/VideoSampleExporter.java | 18 +- 8 files changed, 560 insertions(+), 38 deletions(-) create mode 100644 libraries/transformer/src/main/java/androidx/media3/transformer/MultipleInputVideoGraph.java diff --git a/libraries/effect/src/main/java/androidx/media3/effect/DebugTraceUtil.java b/libraries/effect/src/main/java/androidx/media3/effect/DebugTraceUtil.java index 092c532fb5..d237972fb3 100644 --- a/libraries/effect/src/main/java/androidx/media3/effect/DebugTraceUtil.java +++ b/libraries/effect/src/main/java/androidx/media3/effect/DebugTraceUtil.java @@ -55,7 +55,9 @@ public final class DebugTraceUtil { EVENT_VFP_QUEUE_BITMAP, EVENT_VFP_QUEUE_TEXTURE, EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, + EVENT_VFP_OUTPUT_TEXTURE_RENDERED, EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, + EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED, EVENT_ENCODER_ENCODED_FRAME, EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO, EVENT_MUXER_WRITE_SAMPLE_VIDEO, @@ -84,7 +86,10 @@ public final class DebugTraceUtil { public static final String EVENT_VFP_QUEUE_BITMAP = "VFP-QueueBitmap"; public static final String EVENT_VFP_QUEUE_TEXTURE = "VFP-QueueTexture"; public static final String EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE = "VFP-RenderedToOutputSurface"; + public static final String EVENT_VFP_OUTPUT_TEXTURE_RENDERED = "VFP-OutputTextureRendered"; public static final String EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM = "VFP-FinishOneInputStream"; + public static final String EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED = + "COMP-OutputTextureRendered"; public static final String EVENT_ENCODER_ENCODED_FRAME = "Encoder-EncodedFrame"; public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO = "Muxer-CanWriteSample_Video"; public static final String EVENT_MUXER_WRITE_SAMPLE_VIDEO = "Muxer-WriteSample_Video"; @@ -115,7 +120,9 @@ public final class DebugTraceUtil { EVENT_VFP_QUEUE_BITMAP, EVENT_VFP_QUEUE_TEXTURE, EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, + EVENT_VFP_OUTPUT_TEXTURE_RENDERED, EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, + EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED, EVENT_ENCODER_ENCODED_FRAME, EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO, EVENT_MUXER_WRITE_SAMPLE_VIDEO, diff --git a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java index 0e5762a956..89f6a69e5e 100644 --- a/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java +++ b/libraries/effect/src/main/java/androidx/media3/effect/DefaultVideoFrameProcessor.java @@ -15,7 +15,6 @@ */ package androidx.media3.effect; -import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE; import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkState; @@ -87,8 +86,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { /** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */ public static final class Builder { private boolean enableColorTransfers; + @Nullable private ExecutorService executorService; private @MonotonicNonNull GlObjectsProvider glObjectsProvider; - private @MonotonicNonNull ExecutorService executorService; private GlTextureProducer.@MonotonicNonNull Listener textureOutputListener; private int textureOutputCapacity; @@ -97,6 +96,14 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { enableColorTransfers = true; } + private Builder(Factory factory) { + enableColorTransfers = factory.enableColorTransfers; + executorService = factory.executorService; + glObjectsProvider = factory.glObjectsProvider; + textureOutputListener = factory.textureOutputListener; + textureOutputCapacity = factory.textureOutputCapacity; + } + /** * Sets whether to transfer colors to an intermediate color space when applying effects. * @@ -122,18 +129,18 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { /** * Sets the {@link Util#newSingleThreadScheduledExecutor} to execute GL commands from. * - *

If set, the {@link ExecutorService} must be {@linkplain ExecutorService#shutdown shut - * down} by the caller after all {@linkplain VideoFrameProcessor VideoFrameProcessors} using - * it have been {@linkplain #release released}. + *

If set to a non-null value, the {@link ExecutorService} must be {@linkplain + * ExecutorService#shutdown shut down} by the caller after all {@linkplain VideoFrameProcessor + * VideoFrameProcessors} using it have been {@linkplain #release released}. * *

The default value is a new {@link Util#newSingleThreadScheduledExecutor}, owned and - * {@link ExecutorService#shutdown} by the created {@link DefaultVideoFrameProcessor}. + * {@link ExecutorService#shutdown} by the created {@link DefaultVideoFrameProcessor}. Setting + * a {@code null} {@link ExecutorService} is equivalent to using the default value. * * @param executorService The {@link ExecutorService}. */ @CanIgnoreReturnValue - @VisibleForTesting(otherwise = PACKAGE_PRIVATE) - public Builder setExecutorService(ExecutorService executorService) { + public Builder setExecutorService(@Nullable ExecutorService executorService) { this.executorService = executorService; return this; } @@ -156,7 +163,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { * @param textureOutputCapacity The amount of output textures that may be allocated at a time * before texture output blocks. Must be greater than or equal to 1. */ - @VisibleForTesting @CanIgnoreReturnValue public Builder setTextureOutput( GlTextureProducer.Listener textureOutputListener, @@ -197,6 +203,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { this.textureOutputCapacity = textureOutputCapacity; } + public Builder buildUpon() { + return new Builder(this); + } + /** * {@inheritDoc} * @@ -266,6 +276,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { boolean shouldShutdownExecutorService = executorService == null; ExecutorService instanceExecutorService = executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService; + VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor = new VideoFrameProcessingTaskExecutor( instanceExecutorService, shouldShutdownExecutorService, listener::onError); @@ -427,6 +438,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { if (!inputStreamRegisteredCondition.isOpen()) { return false; } + inputSwitcher.activeTextureManager().queueInputTexture(textureId, presentationTimeUs); return true; } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/MultipleInputVideoGraph.java b/libraries/transformer/src/main/java/androidx/media3/transformer/MultipleInputVideoGraph.java new file mode 100644 index 0000000000..a12920da78 --- /dev/null +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/MultipleInputVideoGraph.java @@ -0,0 +1,482 @@ +/* + * Copyright 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package androidx.media3.transformer; + +import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID; +import static androidx.media3.common.util.Assertions.checkNotNull; +import static androidx.media3.common.util.Assertions.checkState; +import static androidx.media3.common.util.Assertions.checkStateNotNull; +import static androidx.media3.common.util.Util.contains; +import static androidx.media3.common.util.Util.newSingleThreadScheduledExecutor; +import static androidx.media3.effect.DebugTraceUtil.EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED; +import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_OUTPUT_TEXTURE_RENDERED; +import static androidx.media3.effect.DebugTraceUtil.logEvent; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import android.content.Context; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLSurface; +import android.util.SparseArray; +import androidx.annotation.Nullable; +import androidx.media3.common.C; +import androidx.media3.common.ColorInfo; +import androidx.media3.common.DebugViewProvider; +import androidx.media3.common.Effect; +import androidx.media3.common.FrameInfo; +import androidx.media3.common.GlObjectsProvider; +import androidx.media3.common.GlTextureInfo; +import androidx.media3.common.VideoFrameProcessingException; +import androidx.media3.common.VideoFrameProcessor; +import androidx.media3.common.util.Consumer; +import androidx.media3.common.util.GlUtil; +import androidx.media3.effect.DefaultGlObjectsProvider; +import androidx.media3.effect.DefaultVideoCompositor; +import androidx.media3.effect.DefaultVideoFrameProcessor; +import androidx.media3.effect.GlTextureProducer; +import androidx.media3.effect.VideoCompositor; +import com.google.common.util.concurrent.MoreExecutors; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** A {@link VideoGraph} that handles multiple input streams. */ +/* package */ final class MultipleInputVideoGraph implements VideoGraph { + + public static final class Factory implements VideoGraph.Factory { + + @Override + public MultipleInputVideoGraph create( + Context context, + ColorInfo inputColorInfo, + ColorInfo outputColorInfo, + Consumer errorConsumer, + DebugViewProvider debugViewProvider, + Listener listener, + Executor listenerExecutor, + List compositionEffects, + long initialTimestampOffsetUs) { + return new MultipleInputVideoGraph( + context, + inputColorInfo, + outputColorInfo, + errorConsumer, + debugViewProvider, + listener, + listenerExecutor, + compositionEffects, + initialTimestampOffsetUs); + } + } + + private static final String SHARED_EXECUTOR_NAME = "Transformer:MultipleInputVideoGraph:Thread"; + + private static final long RELEASE_WAIT_TIME_MS = 1_000; + private static final int PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY = 2; + private static final int COMPOSITOR_TEXTURE_OUTPUT_CAPACITY = 1; + + private final Context context; + private final ColorInfo inputColorInfo; + private final ColorInfo outputColorInfo; + private final Consumer errorConsumer; + private final GlObjectsProvider glObjectsProvider; + private final DebugViewProvider debugViewProvider; + private final Listener listener; + private final Executor listenerExecutor; + private final List compositionEffects; + private final List preProcessingWrappers; + + private final ExecutorService sharedExecutorService; + + private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory; + private final Queue compositorOutputTextures; + private final SparseArray compositorOutputTextureReleases; + + private final long initialTimestampOffsetUs; + + @Nullable private VideoFrameProcessor compositionVideoFrameProcessor; + @Nullable private VideoCompositor videoCompositor; + + private boolean compositionVideoFrameProcessorInputStreamRegistered; + private boolean compositionVideoFrameProcessorInputStreamRegistrationCompleted; + private boolean compositorEnded; + private boolean released; + private long lastRenderedPresentationTimeUs; + + private volatile boolean hasProducedFrameWithTimestampZero; + + // TODO - b/289986435: Remove errorConsumer and use Listener.onError(). + private MultipleInputVideoGraph( + Context context, + ColorInfo inputColorInfo, + ColorInfo outputColorInfo, + Consumer errorConsumer, + DebugViewProvider debugViewProvider, + Listener listener, + Executor listenerExecutor, + List compositionEffects, + long initialTimestampOffsetUs) { + this.context = context; + this.inputColorInfo = inputColorInfo; + this.outputColorInfo = outputColorInfo; + this.errorConsumer = errorConsumer; + this.debugViewProvider = debugViewProvider; + this.listener = listener; + this.listenerExecutor = listenerExecutor; + this.compositionEffects = new ArrayList<>(compositionEffects); + this.initialTimestampOffsetUs = initialTimestampOffsetUs; + lastRenderedPresentationTimeUs = C.TIME_UNSET; + preProcessingWrappers = new ArrayList<>(); + sharedExecutorService = newSingleThreadScheduledExecutor(SHARED_EXECUTOR_NAME); + glObjectsProvider = new SingleContextGlObjectsProvider(); + // TODO - b/289986435: Support injecting VideoFrameProcessor.Factory. + videoFrameProcessorFactory = + new DefaultVideoFrameProcessor.Factory.Builder() + .setGlObjectsProvider(glObjectsProvider) + .setExecutorService(sharedExecutorService) + .build(); + compositorOutputTextures = new ArrayDeque<>(); + compositorOutputTextureReleases = new SparseArray<>(); + } + + /** + * {@inheritDoc} + * + *

This method must be called at most once. + */ + @Override + public void initialize() throws VideoFrameProcessingException { + checkState( + preProcessingWrappers.isEmpty() + && videoCompositor == null + && compositionVideoFrameProcessor == null + && !released); + + // Setting up the compositionVideoFrameProcessor + compositionVideoFrameProcessor = + videoFrameProcessorFactory.create( + context, + debugViewProvider, + // Pre-processing VideoFrameProcessors have converted the inputColor to outputColor + // already. + /* inputColorInfo= */ outputColorInfo, + outputColorInfo, + /* renderFramesAutomatically= */ true, + /* listenerExecutor= */ MoreExecutors.directExecutor(), + new VideoFrameProcessor.Listener() { + @Override + public void onInputStreamRegistered( + @VideoFrameProcessor.InputType int inputType, + List effects, + FrameInfo frameInfo) { + compositionVideoFrameProcessorInputStreamRegistrationCompleted = true; + queueCompositionOutputInternal(); + } + + @Override + public void onOutputSizeChanged(int width, int height) { + checkNotNull(compositionVideoFrameProcessor) + .setOutputSurfaceInfo(listener.onOutputSizeChanged(width, height)); + } + + @Override + public void onOutputFrameAvailableForRendering(long presentationTimeUs) { + if (presentationTimeUs == 0) { + hasProducedFrameWithTimestampZero = true; + } + lastRenderedPresentationTimeUs = presentationTimeUs; + } + + @Override + public void onError(VideoFrameProcessingException exception) { + handleException(exception); + } + + @Override + public void onEnded() { + listenerExecutor.execute(() -> listener.onEnded(lastRenderedPresentationTimeUs)); + } + }); + // Release the compositor's output texture. + compositionVideoFrameProcessor.setOnInputFrameProcessedListener( + (textureId, syncObject) -> { + checkState(contains(compositorOutputTextureReleases, textureId)); + compositorOutputTextureReleases.get(textureId).release(); + compositorOutputTextureReleases.remove(textureId); + queueCompositionOutputInternal(); + }); + + // Setting up the compositor. + videoCompositor = + new DefaultVideoCompositor( + context, + glObjectsProvider, + new DefaultVideoCompositor.Settings(), + sharedExecutorService, + new VideoCompositor.Listener() { + @Override + public void onError(VideoFrameProcessingException exception) { + handleException(exception); + } + + @Override + public void onEnded() { + compositorEnded = true; + if (compositorOutputTextures.isEmpty()) { + compositionVideoFrameProcessor.signalEndOfInput(); + } else { + queueCompositionOutputInternal(); + } + } + }, + /* textureOutputListener= */ this::processCompositorOutputTexture, + COMPOSITOR_TEXTURE_OUTPUT_CAPACITY); + } + + @Override + public GraphInput createInput() throws VideoFrameProcessingException { + checkStateNotNull(videoCompositor); + + int videoCompositorInputId = videoCompositor.registerInputSource(); + // Creating a new VideoFrameProcessor for the input. + VideoFrameProcessingWrapper preProcessingVideoFrameProcessorWrapper = + new VideoFrameProcessingWrapper( + context, + videoFrameProcessorFactory + .buildUpon() + .setTextureOutput( + // Texture output to compositor. + (textureProducer, texture, presentationTimeUs, syncObject) -> { + logEvent(EVENT_VFP_OUTPUT_TEXTURE_RENDERED, presentationTimeUs); + checkNotNull(videoCompositor) + .queueInputTexture( + videoCompositorInputId, + textureProducer, + texture, + // Color is converted to outputColor in pre processing. + /* colorInfo= */ outputColorInfo, + presentationTimeUs); + }, + PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY) + .build(), + inputColorInfo, + outputColorInfo, + DebugViewProvider.NONE, + listenerExecutor, + new VideoFrameProcessor.Listener() { + @Override + public void onInputStreamRegistered( + @VideoFrameProcessor.InputType int inputType, + List effects, + FrameInfo frameInfo) { + // Do nothing. + } + + @Override + public void onOutputSizeChanged(int width, int height) {} + + @Override + public void onOutputFrameAvailableForRendering(long presentationTimeUs) {} + + @Override + public void onError(VideoFrameProcessingException ex) { + errorConsumer.accept(ExportException.createForVideoFrameProcessingException(ex)); + } + + @Override + public void onEnded() { + checkNotNull(videoCompositor).signalEndOfInputSource(videoCompositorInputId); + } + }, + /* renderFramesAutomatically= */ true, + /* presentation= */ null, + initialTimestampOffsetUs); + preProcessingWrappers.add(preProcessingVideoFrameProcessorWrapper); + return preProcessingVideoFrameProcessorWrapper; + } + + @Override + public boolean hasProducedFrameWithTimestampZero() { + return hasProducedFrameWithTimestampZero; + } + + @Override + public void release() { + if (released) { + return; + } + + // Needs to release the frame processors before their internal executor services are released. + for (int i = 0; i < preProcessingWrappers.size(); i++) { + preProcessingWrappers.get(i).release(); + } + preProcessingWrappers.clear(); + + if (videoCompositor != null) { + videoCompositor.release(); + videoCompositor = null; + } + + if (compositionVideoFrameProcessor != null) { + compositionVideoFrameProcessor.release(); + compositionVideoFrameProcessor = null; + } + + sharedExecutorService.shutdown(); + try { + sharedExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + listenerExecutor.execute(() -> listener.onError(VideoFrameProcessingException.from(e))); + } + + released = true; + } + + private void handleException(Exception e) { + errorConsumer.accept( + ExportException.createForVideoFrameProcessingException( + e instanceof VideoFrameProcessingException + ? (VideoFrameProcessingException) e + : VideoFrameProcessingException.from(e))); + } + + private void processCompositorOutputTexture( + GlTextureProducer textureProducer, + GlTextureInfo outputTexture, + long presentationTimeUs, + long syncObject) { + checkStateNotNull(compositionVideoFrameProcessor); + checkState(!compositorEnded); + logEvent(EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED, presentationTimeUs); + + compositorOutputTextures.add( + new CompositorOutputTextureInfo(outputTexture, presentationTimeUs)); + compositorOutputTextureReleases.put( + outputTexture.texId, + new CompositorOutputTextureRelease(textureProducer, presentationTimeUs)); + + if (!compositionVideoFrameProcessorInputStreamRegistered) { + checkNotNull(compositionVideoFrameProcessor) + .registerInputStream( + INPUT_TYPE_TEXTURE_ID, + compositionEffects, + new FrameInfo.Builder(outputTexture.width, outputTexture.height).build()); + compositionVideoFrameProcessorInputStreamRegistered = true; + // Return as the VideoFrameProcessor rejects input textures until the input is registered. + return; + } + queueCompositionOutputInternal(); + } + + private void queueCompositionOutputInternal() { + checkStateNotNull(compositionVideoFrameProcessor); + if (!compositionVideoFrameProcessorInputStreamRegistrationCompleted) { + return; + } + + @Nullable CompositorOutputTextureInfo outputTexture = compositorOutputTextures.peek(); + if (outputTexture == null) { + return; + } + + checkState( + checkNotNull(compositionVideoFrameProcessor) + .queueInputTexture( + outputTexture.glTextureInfo.texId, outputTexture.presentationTimeUs)); + compositorOutputTextures.remove(); + if (compositorEnded && compositorOutputTextures.isEmpty()) { + checkNotNull(compositionVideoFrameProcessor).signalEndOfInput(); + } + } + + private static final class CompositorOutputTextureInfo { + public final GlTextureInfo glTextureInfo; + public final long presentationTimeUs; + + private CompositorOutputTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) { + this.glTextureInfo = glTextureInfo; + this.presentationTimeUs = presentationTimeUs; + } + } + + private static final class CompositorOutputTextureRelease { + private final GlTextureProducer textureProducer; + private final long presentationTimeUs; + + public CompositorOutputTextureRelease( + GlTextureProducer textureProducer, long presentationTimeUs) { + this.textureProducer = textureProducer; + this.presentationTimeUs = presentationTimeUs; + } + + public void release() { + textureProducer.releaseOutputTexture(presentationTimeUs); + } + } + + /** + * A {@link GlObjectsProvider} that creates a new {@link EGLContext} in {@link #createEglContext} + * with the same shared EGLContext. + */ + private static final class SingleContextGlObjectsProvider implements GlObjectsProvider { + private final GlObjectsProvider glObjectsProvider; + private @MonotonicNonNull EGLContext singleEglContext; + + public SingleContextGlObjectsProvider() { + this.glObjectsProvider = new DefaultGlObjectsProvider(); + } + + @Override + public EGLContext createEglContext( + EGLDisplay eglDisplay, int openGlVersion, int[] configAttributes) + throws GlUtil.GlException { + if (singleEglContext == null) { + singleEglContext = + glObjectsProvider.createEglContext(eglDisplay, openGlVersion, configAttributes); + } + return singleEglContext; + } + + @Override + public EGLSurface createEglSurface( + EGLDisplay eglDisplay, + Object surface, + @C.ColorTransfer int colorTransfer, + boolean isEncoderInputSurface) + throws GlUtil.GlException { + return glObjectsProvider.createEglSurface( + eglDisplay, surface, colorTransfer, isEncoderInputSurface); + } + + @Override + public EGLSurface createFocusedPlaceholderEglSurface( + EGLContext eglContext, EGLDisplay eglDisplay) throws GlUtil.GlException { + return glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay); + } + + @Override + public GlTextureInfo createBuffersForTexture(int texId, int width, int height) + throws GlUtil.GlException { + return glObjectsProvider.createBuffersForTexture(texId, width, height); + } + } +} diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java index c51b5ae5f3..0c117aff3f 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java @@ -125,7 +125,17 @@ import java.util.concurrent.Executor; *

This method must be called at most once. */ @Override - public void initialize() throws VideoFrameProcessingException { + public void initialize() { + // Initialization is deferred to createInput(). + } + + /** + * {@inheritDoc} + * + *

This method must only be called once. + */ + @Override + public GraphInput createInput() throws VideoFrameProcessingException { checkStateNotNull(videoFrameProcessingWrapper == null && !released); videoFrameProcessingWrapper = @@ -175,12 +185,7 @@ import java.util.concurrent.Executor; renderFramesAutomatically, presentation, initialTimestampOffsetUs); - } - - /** Returns the {@link GraphInput}. */ - @Override - public GraphInput getInput() { - return checkNotNull(videoFrameProcessingWrapper); + return videoFrameProcessingWrapper; } @Override diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/Transformer.java b/libraries/transformer/src/main/java/androidx/media3/transformer/Transformer.java index 0bee9c8058..3a8a1c6f15 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/Transformer.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/Transformer.java @@ -782,12 +782,10 @@ public final class Transformer { *

This method is under development. A {@link Composition} must meet the following conditions: * *

* *

{@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the @@ -972,11 +970,6 @@ public final class Transformer { private void startInternal( Composition composition, MuxerWrapper muxerWrapper, ComponentListener componentListener) { checkArgument(composition.effects.audioProcessors.isEmpty()); - // Only supports Presentation in video effects. - ImmutableList videoEffects = composition.effects.videoEffects; - checkArgument( - videoEffects.isEmpty() - || (videoEffects.size() == 1 && videoEffects.get(0) instanceof Presentation)); verifyApplicationThread(); checkState(transformerInternal == null, "There is already an export in progress."); HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null); diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java index 76f3a5fc99..f7391c27b8 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java @@ -570,7 +570,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; fallbackListener)); } else { - ImmutableList compositionVideoEffects = composition.effects.videoEffects; // TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP. assetLoaderInputTracker.registerSampleExporter( C.TRACK_TYPE_VIDEO, @@ -578,14 +577,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; context, firstAssetLoaderInputFormat, transformationRequest, - compositionVideoEffects, + composition.effects.videoEffects, videoFrameProcessorFactory, encoderFactory, muxerWrapper, /* errorConsumer= */ this::onError, fallbackListener, debugViewProvider, - videoSampleTimestampOffsetUs)); + videoSampleTimestampOffsetUs, + /* hasMultipleInputs= */ composition.sequences.size() > 1)); } } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java index 4a94f12a28..1043eebe1a 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java @@ -68,14 +68,23 @@ import java.util.concurrent.Executor; * * @param width The new output width in pixels. * @param height The new output width in pixels. - * @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code - * null} if the output is not needed. + * @return A {@link SurfaceInfo} to which the {@link VideoGraph} renders to, or {@code null} if + * the output is not needed. */ + // TODO - b/289985577: Consider returning void from this method. @Nullable SurfaceInfo onOutputSizeChanged(int width, int height); - /** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */ + /** Called after the {@link VideoGraph} has rendered its final output frame. */ void onEnded(long finalFramePresentationTimeUs); + + /** + * Called when an exception occurs during video frame processing. + * + *

If this is called, the calling {@link VideoGraph} must immediately be {@linkplain + * #release() released}. + */ + void onError(VideoFrameProcessingException exception); } /** @@ -93,15 +102,21 @@ import java.util.concurrent.Executor; *

This method must be called after successfully {@linkplain #initialize() initializing} the * {@code VideoGraph}. * + *

This method must called exactly once for every input stream. + * *

If the method throws any {@link Exception}, the caller must call {@link #release}. */ - GraphInput getInput() throws VideoFrameProcessingException; + GraphInput createInput() throws VideoFrameProcessingException; /** * Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp. */ boolean hasProducedFrameWithTimestampZero(); - /** Releases the associated resources. */ + /** + * Releases the associated resources. + * + *

This {@code VideoGraph} instance must not be used after this method is called. + */ void release(); } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java index 0c098fd372..ab1a58702c 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java @@ -84,7 +84,8 @@ import org.checkerframework.dataflow.qual.Pure; Consumer errorConsumer, FallbackListener fallbackListener, DebugViewProvider debugViewProvider, - long initialTimestampOffsetUs) + long initialTimestampOffsetUs, + boolean hasMultipleInputs) throws ExportException { // TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder // output format instead of the extractor output format, to match AudioSampleExporter behavior. @@ -143,7 +144,9 @@ import org.checkerframework.dataflow.qual.Pure; videoGraph = new VideoGraphWrapper( context, - new SingleInputVideoGraph.Factory(videoFrameProcessorFactory), + hasMultipleInputs + ? new MultipleInputVideoGraph.Factory() + : new SingleInputVideoGraph.Factory(videoFrameProcessorFactory), videoGraphInputColor, videoGraphOutputColor, errorConsumer, @@ -159,7 +162,7 @@ import org.checkerframework.dataflow.qual.Pure; public GraphInput getInput(EditedMediaItem editedMediaItem, Format format) throws ExportException { try { - return videoGraph.getInput(); + return videoGraph.createInput(); } catch (VideoFrameProcessingException e) { throw ExportException.createForVideoFrameProcessingException(e); } @@ -517,14 +520,19 @@ import org.checkerframework.dataflow.qual.Pure; } } + @Override + public void onError(VideoFrameProcessingException e) { + errorConsumer.accept(ExportException.createForVideoFrameProcessingException(e)); + } + @Override public void initialize() throws VideoFrameProcessingException { videoGraph.initialize(); } @Override - public GraphInput getInput() throws VideoFrameProcessingException { - return videoGraph.getInput(); + public GraphInput createInput() throws VideoFrameProcessingException { + return videoGraph.createInput(); } @Override