diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java
index b28a1af642..4dd801041e 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleInputVideoGraph.java
@@ -16,80 +16,82 @@
package androidx.media3.transformer;
-import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
-import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
-import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
+import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context;
-import android.graphics.Bitmap;
-import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
-import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo;
-import androidx.media3.common.MimeTypes;
-import androidx.media3.common.OnInputFrameProcessedListener;
-import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer;
-import androidx.media3.common.util.Size;
-import androidx.media3.common.util.TimestampIterator;
import androidx.media3.effect.Presentation;
-import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.Executor;
-import java.util.concurrent.atomic.AtomicLong;
-/** Processes decoded video frames from one single input. */
-/* package */ final class SingleInputVideoGraph {
+/** A {@link VideoGraph} that handles one input stream. */
+/* package */ final class SingleInputVideoGraph implements VideoGraph {
- /**
- * Listener for video frame processing events.
- *
- *
The methods are called from the GL thread.
- */
- public interface Listener {
- /**
- * Called when the output size changes.
- *
- * @param width The new output width in pixels.
- * @param height The new output width in pixels.
- * @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code
- * null} if the output is not needed.
- */
- @Nullable
- SurfaceInfo onOutputSizeChanged(int width, int height);
+ /** A factory for creating a {@link SingleInputVideoGraph}. */
+ public static final class Factory implements VideoGraph.Factory {
- /** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */
- void onEnded(long finalFramePresentationTimeUs);
+ private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
+
+ public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
+ this.videoFrameProcessorFactory = videoFrameProcessorFactory;
+ }
+
+ @Override
+ public VideoGraph create(
+ Context context,
+ ColorInfo inputColorInfo,
+ ColorInfo outputColorInfo,
+ Consumer errorConsumer,
+ DebugViewProvider debugViewProvider,
+ Listener listener,
+ Executor listenerExecutor,
+ List compositionEffects) {
+ @Nullable Presentation presentation = null;
+ for (int i = 0; i < compositionEffects.size(); i++) {
+ Effect effect = compositionEffects.get(i);
+ if (effect instanceof Presentation) {
+ presentation = (Presentation) effect;
+ }
+ }
+ return new SingleInputVideoGraph(
+ context,
+ videoFrameProcessorFactory,
+ inputColorInfo,
+ outputColorInfo,
+ listener,
+ errorConsumer,
+ debugViewProvider,
+ listenerExecutor,
+ /* renderFramesAutomatically= */ true,
+ presentation);
+ }
}
- private final VideoFrameProcessingWrapper videoFrameProcessingWrapper;
+ private final Context context;
+ private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
+ private final ColorInfo inputColorInfo;
+ private final ColorInfo outputColorInfo;
+ private final Listener listener;
+ private final Consumer errorConsumer;
+ private final DebugViewProvider debugViewProvider;
+ private final Executor listenerExecutor;
+ private final boolean renderFramesAutomatically;
+ @Nullable private final Presentation presentation;
+ @Nullable private VideoFrameProcessingWrapper videoFrameProcessingWrapper;
+
+ private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero;
- /**
- * Creates a new instance.
- *
- * @param context A {@link Context}.
- * @param videoFrameProcessorFactory A {@link VideoFrameProcessor.Factory}.
- * @param inputColorInfo The {@link ColorInfo} for the input frames.
- * @param outputColorInfo The {@link ColorInfo} for the output frames.
- * @param listener A {@link Listener}.
- * @param errorConsumer A {@link Consumer} of {@link ExportException}.
- * @param debugViewProvider A {@link DebugViewProvider}.
- * @param listenerExecutor An {@link Executor} on which {@link VideoFrameProcessor.Listener}
- * methods are called.
- * @param renderFramesAutomatically Whether to automatically render output frames. Use {@code
- * false} when controlling the presentation of output frames.
- * @param presentation A {@link Presentation} to apply to processed frames.
- * @throws VideoFrameProcessingException When video frame processing fails.
- */
- public SingleInputVideoGraph(
+ private SingleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo inputColorInfo,
@@ -99,8 +101,27 @@ import java.util.concurrent.atomic.AtomicLong;
DebugViewProvider debugViewProvider,
Executor listenerExecutor,
boolean renderFramesAutomatically,
- @Nullable Presentation presentation)
- throws VideoFrameProcessingException {
+ @Nullable Presentation presentation) {
+ this.context = context;
+ this.videoFrameProcessorFactory = videoFrameProcessorFactory;
+ this.inputColorInfo = inputColorInfo;
+ this.outputColorInfo = outputColorInfo;
+ this.listener = listener;
+ this.errorConsumer = errorConsumer;
+ this.debugViewProvider = debugViewProvider;
+ this.listenerExecutor = listenerExecutor;
+ this.renderFramesAutomatically = renderFramesAutomatically;
+ this.presentation = presentation;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * This method must be called at most once.
+ */
+ @Override
+ public void initialize() throws VideoFrameProcessingException {
+ checkStateNotNull(videoFrameProcessingWrapper == null && !released);
videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper(
@@ -151,150 +172,26 @@ import java.util.concurrent.atomic.AtomicLong;
}
/** Returns the {@link GraphInput}. */
+ @Override
public GraphInput getInput() {
- return videoFrameProcessingWrapper;
+ return checkNotNull(videoFrameProcessingWrapper);
}
- /* package */ boolean hasProducedFrameWithTimestampZero() {
+ @Override
+ public boolean hasProducedFrameWithTimestampZero() {
return hasProducedFrameWithTimestampZero;
}
+ @Override
public void release() {
- videoFrameProcessingWrapper.release();
- }
-
- private static final class VideoFrameProcessingWrapper implements GraphInput {
- private final VideoFrameProcessor videoFrameProcessor;
- private final AtomicLong mediaItemOffsetUs;
- private final ColorInfo inputColorInfo;
-
- @Nullable private final Presentation presentation;
-
- public VideoFrameProcessingWrapper(
- Context context,
- VideoFrameProcessor.Factory videoFrameProcessorFactory,
- ColorInfo inputColorInfo,
- ColorInfo outputColorInfo,
- DebugViewProvider debugViewProvider,
- Executor listenerExecutor,
- VideoFrameProcessor.Listener listener,
- boolean renderFramesAutomatically,
- @Nullable Presentation presentation)
- throws VideoFrameProcessingException {
- this.videoFrameProcessor =
- videoFrameProcessorFactory.create(
- context,
- debugViewProvider,
- inputColorInfo,
- outputColorInfo,
- renderFramesAutomatically,
- listenerExecutor,
- listener);
- this.mediaItemOffsetUs = new AtomicLong();
- this.inputColorInfo = inputColorInfo;
- this.presentation = presentation;
+ if (released) {
+ return;
}
- @Override
- public void onMediaItemChanged(
- EditedMediaItem editedMediaItem,
- long durationUs,
- @Nullable Format trackFormat,
- boolean isLast) {
- if (trackFormat != null) {
- Size decodedSize = getDecodedSize(trackFormat);
- videoFrameProcessor.registerInputStream(
- getInputType(checkNotNull(trackFormat.sampleMimeType)),
- createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
- new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
- .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
- .setOffsetToAddUs(mediaItemOffsetUs.get())
- .build());
- }
- mediaItemOffsetUs.addAndGet(durationUs);
- }
-
- @Override
- public @InputResult int queueInputBitmap(
- Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
- return videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs)
- ? INPUT_RESULT_SUCCESS
- : INPUT_RESULT_TRY_AGAIN_LATER;
- }
-
- @Override
- public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
- videoFrameProcessor.setOnInputFrameProcessedListener(listener);
- }
-
- @Override
- public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
- return videoFrameProcessor.queueInputTexture(texId, presentationTimeUs)
- ? INPUT_RESULT_SUCCESS
- : INPUT_RESULT_TRY_AGAIN_LATER;
- }
-
- @Override
- public Surface getInputSurface() {
- return videoFrameProcessor.getInputSurface();
- }
-
- @Override
- public ColorInfo getExpectedInputColorInfo() {
- return inputColorInfo;
- }
-
- @Override
- public int getPendingVideoFrameCount() {
- return videoFrameProcessor.getPendingInputFrameCount();
- }
-
- @Override
- public boolean registerVideoFrame(long presentationTimeUs) {
- return videoFrameProcessor.registerInputFrame();
- }
-
- @Override
- public void signalEndOfVideoInput() {
- videoFrameProcessor.signalEndOfInput();
- }
-
- public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
- videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
- }
-
- public void release() {
- videoFrameProcessor.release();
- }
-
- private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
- if (MimeTypes.isImage(sampleMimeType)) {
- return INPUT_TYPE_BITMAP;
- }
- if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
- return INPUT_TYPE_TEXTURE_ID;
- }
- if (MimeTypes.isVideo(sampleMimeType)) {
- return INPUT_TYPE_SURFACE;
- }
- throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
- }
-
- private static Size getDecodedSize(Format format) {
- // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
- int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
- int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
- return new Size(decodedWidth, decodedHeight);
- }
-
- private static ImmutableList createEffectListWithPresentation(
- List effects, @Nullable Presentation presentation) {
- if (presentation == null) {
- return ImmutableList.copyOf(effects);
- }
- ImmutableList.Builder effectsWithPresentationBuilder = new ImmutableList.Builder<>();
- effectsWithPresentationBuilder.addAll(effects).add(presentation);
- return effectsWithPresentationBuilder.build();
+ if (videoFrameProcessingWrapper != null) {
+ videoFrameProcessingWrapper.release();
+ videoFrameProcessingWrapper = null;
}
+ released = true;
}
}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
index 7c37f52208..2e30d6fc24 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/TransformerInternal.java
@@ -50,7 +50,6 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper;
-import androidx.media3.effect.Presentation;
import androidx.media3.effect.ScaleAndRotateTransformation;
import com.google.common.collect.ImmutableList;
import java.lang.annotation.Documented;
@@ -591,11 +590,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else {
ImmutableList compositionVideoEffects = composition.effects.videoEffects;
- @Nullable
- Presentation compositionPresentation =
- compositionVideoEffects.isEmpty()
- ? null
- : (Presentation) compositionVideoEffects.get(0);
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
assetLoaderInputTracker.registerSampleExporter(
C.TRACK_TYPE_VIDEO,
@@ -603,7 +597,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
context,
firstAssetLoaderInputFormat,
transformationRequest,
- compositionPresentation,
+ compositionVideoEffects,
videoFrameProcessorFactory,
encoderFactory,
muxerWrapper,
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
new file mode 100644
index 0000000000..29515809a0
--- /dev/null
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoFrameProcessingWrapper.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media3.transformer;
+
+import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
+import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
+import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
+import static androidx.media3.common.util.Assertions.checkNotNull;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import androidx.media3.common.ColorInfo;
+import androidx.media3.common.DebugViewProvider;
+import androidx.media3.common.Effect;
+import androidx.media3.common.Format;
+import androidx.media3.common.FrameInfo;
+import androidx.media3.common.MimeTypes;
+import androidx.media3.common.OnInputFrameProcessedListener;
+import androidx.media3.common.SurfaceInfo;
+import androidx.media3.common.VideoFrameProcessingException;
+import androidx.media3.common.VideoFrameProcessor;
+import androidx.media3.common.util.Size;
+import androidx.media3.common.util.TimestampIterator;
+import androidx.media3.effect.Presentation;
+import com.google.common.collect.ImmutableList;
+import java.util.List;
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.AtomicLong;
+
+/** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */
+/* package */ final class VideoFrameProcessingWrapper implements GraphInput {
+ private final VideoFrameProcessor videoFrameProcessor;
+ private final AtomicLong mediaItemOffsetUs;
+ private final ColorInfo inputColorInfo;
+ @Nullable final Presentation presentation;
+
+ public VideoFrameProcessingWrapper(
+ Context context,
+ VideoFrameProcessor.Factory videoFrameProcessorFactory,
+ ColorInfo inputColorInfo,
+ ColorInfo outputColorInfo,
+ DebugViewProvider debugViewProvider,
+ Executor listenerExecutor,
+ VideoFrameProcessor.Listener listener,
+ boolean renderFramesAutomatically,
+ @Nullable Presentation presentation)
+ throws VideoFrameProcessingException {
+ this.mediaItemOffsetUs = new AtomicLong();
+ this.inputColorInfo = inputColorInfo;
+ this.presentation = presentation;
+
+ videoFrameProcessor =
+ videoFrameProcessorFactory.create(
+ context,
+ debugViewProvider,
+ inputColorInfo,
+ outputColorInfo,
+ renderFramesAutomatically,
+ listenerExecutor,
+ listener);
+ }
+
+ @Override
+ public void onMediaItemChanged(
+ EditedMediaItem editedMediaItem,
+ long durationUs,
+ @Nullable Format trackFormat,
+ boolean isLast) {
+ if (trackFormat != null) {
+ Size decodedSize = getDecodedSize(trackFormat);
+ videoFrameProcessor.registerInputStream(
+ getInputType(checkNotNull(trackFormat.sampleMimeType)),
+ createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
+ new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
+ .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
+ .setOffsetToAddUs(mediaItemOffsetUs.get())
+ .build());
+ }
+ mediaItemOffsetUs.addAndGet(durationUs);
+ }
+
+ @Override
+ public @InputResult int queueInputBitmap(
+ Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
+ return videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs)
+ ? INPUT_RESULT_SUCCESS
+ : INPUT_RESULT_TRY_AGAIN_LATER;
+ }
+
+ @Override
+ public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
+ videoFrameProcessor.setOnInputFrameProcessedListener(listener);
+ }
+
+ @Override
+ public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
+ return videoFrameProcessor.queueInputTexture(texId, presentationTimeUs)
+ ? INPUT_RESULT_SUCCESS
+ : INPUT_RESULT_TRY_AGAIN_LATER;
+ }
+
+ @Override
+ public Surface getInputSurface() {
+ return videoFrameProcessor.getInputSurface();
+ }
+
+ @Override
+ public ColorInfo getExpectedInputColorInfo() {
+ return inputColorInfo;
+ }
+
+ @Override
+ public int getPendingVideoFrameCount() {
+ return videoFrameProcessor.getPendingInputFrameCount();
+ }
+
+ @Override
+ public boolean registerVideoFrame(long presentationTimeUs) {
+ return videoFrameProcessor.registerInputFrame();
+ }
+
+ @Override
+ public void signalEndOfVideoInput() {
+ videoFrameProcessor.signalEndOfInput();
+ }
+
+ public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
+ videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
+ }
+
+ public void release() {
+ videoFrameProcessor.release();
+ }
+
+ private static Size getDecodedSize(Format format) {
+ // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
+ int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
+ int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
+ return new Size(decodedWidth, decodedHeight);
+ }
+
+ private static ImmutableList createEffectListWithPresentation(
+ List effects, @Nullable Presentation presentation) {
+ if (presentation == null) {
+ return ImmutableList.copyOf(effects);
+ }
+ ImmutableList.Builder effectsWithPresentationBuilder = new ImmutableList.Builder<>();
+ effectsWithPresentationBuilder.addAll(effects).add(presentation);
+ return effectsWithPresentationBuilder.build();
+ }
+
+ private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
+ if (MimeTypes.isImage(sampleMimeType)) {
+ return INPUT_TYPE_BITMAP;
+ }
+ if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
+ return INPUT_TYPE_TEXTURE_ID;
+ }
+ if (MimeTypes.isVideo(sampleMimeType)) {
+ return INPUT_TYPE_SURFACE;
+ }
+ throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
+ }
+}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java
new file mode 100644
index 0000000000..883d2be788
--- /dev/null
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoGraph.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media3.transformer;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+import androidx.media3.common.ColorInfo;
+import androidx.media3.common.DebugViewProvider;
+import androidx.media3.common.Effect;
+import androidx.media3.common.SurfaceInfo;
+import androidx.media3.common.VideoFrameProcessingException;
+import androidx.media3.common.VideoFrameProcessor;
+import androidx.media3.common.util.Consumer;
+import java.util.List;
+import java.util.concurrent.Executor;
+
+/** Represents a graph for processing decoded video frames. */
+/* package */ interface VideoGraph {
+
+ /** A factory for creating a {@link VideoGraph}. */
+ interface Factory {
+ /**
+ * Creates a new {@link VideoGraph} instance.
+ *
+ * @param context A {@link Context}.
+ * @param inputColorInfo The {@link ColorInfo} for the input frames.
+ * @param outputColorInfo The {@link ColorInfo} for the output frames.
+ * @param errorConsumer A {@link Consumer} of {@link ExportException}.
+ * @param debugViewProvider A {@link DebugViewProvider}.
+ * @param listener A {@link Listener}.
+ * @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
+ * @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
+ * @return A new instance.
+ * @throws VideoFrameProcessingException If a problem occurs while creating the {@link
+ * VideoFrameProcessor}.
+ */
+ VideoGraph create(
+ Context context,
+ ColorInfo inputColorInfo,
+ ColorInfo outputColorInfo,
+ Consumer errorConsumer,
+ DebugViewProvider debugViewProvider,
+ Listener listener,
+ Executor listenerExecutor,
+ List compositionEffects)
+ throws VideoFrameProcessingException;
+ }
+
+ /** Listener for video frame processing events. */
+ interface Listener {
+ /**
+ * Called when the output size changes.
+ *
+ * @param width The new output width in pixels.
+ * @param height The new output width in pixels.
+ * @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code
+ * null} if the output is not needed.
+ */
+ @Nullable
+ SurfaceInfo onOutputSizeChanged(int width, int height);
+
+ /** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */
+ void onEnded(long finalFramePresentationTimeUs);
+ }
+
+ /**
+ * Initialize the {@code VideoGraph}.
+ *
+ * This method must be called before calling other methods.
+ *
+ *
If the method throws, the caller must call {@link #release}.
+ */
+ void initialize() throws VideoFrameProcessingException;
+
+ /**
+ * Returns a {@link GraphInput} object to which the {@code VideoGraph} inputs are queued.
+ *
+ *
This method must be called after successfully {@linkplain #initialize() initializing} the
+ * {@code VideoGraph}.
+ *
+ *
If the method throws any {@link Exception}, the caller must call {@link #release}.
+ */
+ GraphInput getInput() throws VideoFrameProcessingException;
+
+ /**
+ * Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp.
+ */
+ boolean hasProducedFrameWithTimestampZero();
+
+ /** Releases the associated resources. */
+ void release();
+}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
index 2451b3c5cf..46d56101f3 100644
--- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoSampleExporter.java
@@ -36,6 +36,7 @@ import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
+import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo;
@@ -46,12 +47,12 @@ import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil;
-import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
import java.nio.ByteBuffer;
import java.util.List;
+import org.checkerframework.checker.initialization.qual.Initialized;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.dataflow.qual.Pure;
@@ -59,7 +60,7 @@ import org.checkerframework.dataflow.qual.Pure;
/* package */ final class VideoSampleExporter extends SampleExporter {
private static final String TAG = "VideoSampleExporter";
- private final SingleInputVideoGraph singleInputVideoGraph;
+ private final VideoGraph videoGraph;
private final EncoderWrapper encoderWrapper;
private final DecoderInputBuffer encoderOutputBuffer;
@@ -75,7 +76,7 @@ import org.checkerframework.dataflow.qual.Pure;
Context context,
Format firstInputFormat,
TransformationRequest transformationRequest,
- @Nullable Presentation presentation,
+ List compositionEffects,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper,
@@ -86,7 +87,6 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder
// output format instead of the extractor output format, to match AudioSampleExporter behavior.
super(firstInputFormat, muxerWrapper);
-
finalFramePresentationTimeUs = C.TIME_UNSET;
ColorInfo decoderInputColor;
@@ -111,80 +111,60 @@ import org.checkerframework.dataflow.qual.Pure;
boolean isMediaCodecToneMapping =
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
&& ColorInfo.isTransferHdr(decoderInputColor);
- ColorInfo videoFrameProcessorInputColor =
+ ColorInfo videoGraphInputColor =
isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
boolean isGlToneMapping =
ColorInfo.isTransferHdr(decoderInputColor)
&& transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
- ColorInfo videoFrameProcessorOutputColor;
- if (videoFrameProcessorInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) {
+ ColorInfo videoGraphOutputColor;
+ if (videoGraphInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) {
// The sRGB color transfer is only used for images, so when an image gets transcoded into a
// video, we use the SMPTE 170M transfer function for the resulting video.
- videoFrameProcessorOutputColor = SDR_BT709_LIMITED;
+ videoGraphOutputColor = SDR_BT709_LIMITED;
} else if (isGlToneMapping) {
// For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder.
- videoFrameProcessorOutputColor =
+ videoGraphOutputColor =
new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709)
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build();
} else {
- videoFrameProcessorOutputColor = videoFrameProcessorInputColor;
+ videoGraphOutputColor = videoGraphInputColor;
}
try {
- singleInputVideoGraph =
- new SingleInputVideoGraph(
+ videoGraph =
+ new VideoGraphWrapper(
context,
- videoFrameProcessorFactory,
- videoFrameProcessorInputColor,
- videoFrameProcessorOutputColor,
- new SingleInputVideoGraph.Listener() {
- @Nullable
- @Override
- public SurfaceInfo onOutputSizeChanged(int width, int height) {
- @Nullable SurfaceInfo surfaceInfo = null;
- try {
- surfaceInfo = encoderWrapper.getSurfaceInfo(width, height);
- } catch (ExportException e) {
- errorConsumer.accept(e);
- }
- return surfaceInfo;
- }
-
- @Override
- public void onEnded(long finalFramePresentationTimeUs) {
- VideoSampleExporter.this.finalFramePresentationTimeUs =
- finalFramePresentationTimeUs;
- try {
- encoderWrapper.signalEndOfInputStream();
- } catch (ExportException e) {
- errorConsumer.accept(e);
- }
- }
- },
+ new SingleInputVideoGraph.Factory(videoFrameProcessorFactory),
+ videoGraphInputColor,
+ videoGraphOutputColor,
errorConsumer,
debugViewProvider,
- MoreExecutors.directExecutor(),
- /* renderFramesAutomatically= */ true,
- presentation);
+ compositionEffects);
+ videoGraph.initialize();
} catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e);
}
}
@Override
- public GraphInput getInput(EditedMediaItem item, Format format) {
- return singleInputVideoGraph.getInput();
+ public GraphInput getInput(EditedMediaItem editedMediaItem, Format format)
+ throws ExportException {
+ try {
+ return videoGraph.getInput();
+ } catch (VideoFrameProcessingException e) {
+ throw ExportException.createForVideoFrameProcessingException(e);
+ }
}
@Override
public void release() {
- singleInputVideoGraph.release();
+ videoGraph.release();
encoderWrapper.release();
}
@@ -206,7 +186,7 @@ import org.checkerframework.dataflow.qual.Pure;
// Internal ref b/235045165: Some encoder incorrectly set a zero presentation time on the
// penultimate buffer (before EOS), and sets the actual timestamp on the EOS buffer. Use the
// last processed frame presentation time instead.
- if (singleInputVideoGraph.hasProducedFrameWithTimestampZero() == hasMuxedTimestampZero
+ if (videoGraph.hasProducedFrameWithTimestampZero() == hasMuxedTimestampZero
&& finalFramePresentationTimeUs != C.TIME_UNSET
&& bufferInfo.size > 0) {
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
@@ -478,4 +458,79 @@ import org.checkerframework.dataflow.qual.Pure;
releaseEncoder = true;
}
}
+
+ private final class VideoGraphWrapper implements VideoGraph, VideoGraph.Listener {
+
+ private final VideoGraph videoGraph;
+ private final Consumer errorConsumer;
+
+ public VideoGraphWrapper(
+ Context context,
+ VideoGraph.Factory videoGraphFactory,
+ ColorInfo videoFrameProcessorInputColor,
+ ColorInfo videoFrameProcessorOutputColor,
+ Consumer errorConsumer,
+ DebugViewProvider debugViewProvider,
+ List compositionEffects)
+ throws VideoFrameProcessingException {
+ this.errorConsumer = errorConsumer;
+ // To satisfy the nullness checker by declaring an initialized this reference used in the
+ // videoGraphFactory.create method
+ @SuppressWarnings("nullness:assignment")
+ @Initialized
+ VideoGraphWrapper thisRef = this;
+ videoGraph =
+ videoGraphFactory.create(
+ context,
+ videoFrameProcessorInputColor,
+ videoFrameProcessorOutputColor,
+ errorConsumer,
+ debugViewProvider,
+ /* listener= */ thisRef,
+ /* listenerExecutor= */ MoreExecutors.directExecutor(),
+ compositionEffects);
+ }
+
+ @Nullable
+ @Override
+ public SurfaceInfo onOutputSizeChanged(int width, int height) {
+ @Nullable SurfaceInfo surfaceInfo = null;
+ try {
+ surfaceInfo = encoderWrapper.getSurfaceInfo(width, height);
+ } catch (ExportException e) {
+ errorConsumer.accept(e);
+ }
+ return surfaceInfo;
+ }
+
+ @Override
+ public void onEnded(long finalFramePresentationTimeUs) {
+ VideoSampleExporter.this.finalFramePresentationTimeUs = finalFramePresentationTimeUs;
+ try {
+ encoderWrapper.signalEndOfInputStream();
+ } catch (ExportException e) {
+ errorConsumer.accept(e);
+ }
+ }
+
+ @Override
+ public void initialize() throws VideoFrameProcessingException {
+ videoGraph.initialize();
+ }
+
+ @Override
+ public GraphInput getInput() throws VideoFrameProcessingException {
+ return videoGraph.getInput();
+ }
+
+ @Override
+ public boolean hasProducedFrameWithTimestampZero() {
+ return videoGraph.hasProducedFrameWithTimestampZero();
+ }
+
+ @Override
+ public void release() {
+ videoGraph.release();
+ }
+ }
}