Split out VideoGraph and VideoFrameProcessingWrapper

More specifically, this CL

- Defines a VideoGraph interface
  - Adds a factory method, to hide the constructors
  - Separate out an initialize method that does the real work (following that constructor should not do real work)
  - The VideoGraph takes in a list of composition effects. For now, we only use the `Presentation`, if there is one in the list. This means we can take any number of Presentations now, but only the first one will be used.
- Moves the VideoFrameProcessingWrapper to its own file

PiperOrigin-RevId: 561059653
This commit is contained in:
claincly 2023-08-29 09:52:10 -07:00 committed by Copybara-Service
parent bb214b19f9
commit b466b06ace
5 changed files with 474 additions and 242 deletions

View File

@ -16,80 +16,82 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer; import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.effect.Presentation; import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
/** Processes decoded video frames from one single input. */ /** A {@link VideoGraph} that handles one input stream. */
/* package */ final class SingleInputVideoGraph { /* package */ final class SingleInputVideoGraph implements VideoGraph {
/** /** A factory for creating a {@link SingleInputVideoGraph}. */
* Listener for video frame processing events. public static final class Factory implements VideoGraph.Factory {
*
* <p>The methods are called from the GL thread.
*/
public interface Listener {
/**
* Called when the output size changes.
*
* @param width The new output width in pixels.
* @param height The new output width in pixels.
* @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code
* null} if the output is not needed.
*/
@Nullable
SurfaceInfo onOutputSizeChanged(int width, int height);
/** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */ private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
void onEnded(long finalFramePresentationTimeUs);
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
} }
private final VideoFrameProcessingWrapper videoFrameProcessingWrapper; @Override
public VideoGraph create(
Context context,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects) {
@Nullable Presentation presentation = null;
for (int i = 0; i < compositionEffects.size(); i++) {
Effect effect = compositionEffects.get(i);
if (effect instanceof Presentation) {
presentation = (Presentation) effect;
}
}
return new SingleInputVideoGraph(
context,
videoFrameProcessorFactory,
inputColorInfo,
outputColorInfo,
listener,
errorConsumer,
debugViewProvider,
listenerExecutor,
/* renderFramesAutomatically= */ true,
presentation);
}
}
private final Context context;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo;
private final Listener listener;
private final Consumer<ExportException> errorConsumer;
private final DebugViewProvider debugViewProvider;
private final Executor listenerExecutor;
private final boolean renderFramesAutomatically;
@Nullable private final Presentation presentation;
@Nullable private VideoFrameProcessingWrapper videoFrameProcessingWrapper;
private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero; private volatile boolean hasProducedFrameWithTimestampZero;
/** private SingleInputVideoGraph(
* Creates a new instance.
*
* @param context A {@link Context}.
* @param videoFrameProcessorFactory A {@link VideoFrameProcessor.Factory}.
* @param inputColorInfo The {@link ColorInfo} for the input frames.
* @param outputColorInfo The {@link ColorInfo} for the output frames.
* @param listener A {@link Listener}.
* @param errorConsumer A {@link Consumer} of {@link ExportException}.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listenerExecutor An {@link Executor} on which {@link VideoFrameProcessor.Listener}
* methods are called.
* @param renderFramesAutomatically Whether to automatically render output frames. Use {@code
* false} when controlling the presentation of output frames.
* @param presentation A {@link Presentation} to apply to processed frames.
* @throws VideoFrameProcessingException When video frame processing fails.
*/
public SingleInputVideoGraph(
Context context, Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
@ -99,8 +101,27 @@ import java.util.concurrent.atomic.AtomicLong;
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
Executor listenerExecutor, Executor listenerExecutor,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
@Nullable Presentation presentation) @Nullable Presentation presentation) {
throws VideoFrameProcessingException { this.context = context;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo;
this.listener = listener;
this.errorConsumer = errorConsumer;
this.debugViewProvider = debugViewProvider;
this.listenerExecutor = listenerExecutor;
this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation;
}
/**
* {@inheritDoc}
*
* <p>This method must be called at most once.
*/
@Override
public void initialize() throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessingWrapper == null && !released);
videoFrameProcessingWrapper = videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper( new VideoFrameProcessingWrapper(
@ -151,150 +172,26 @@ import java.util.concurrent.atomic.AtomicLong;
} }
/** Returns the {@link GraphInput}. */ /** Returns the {@link GraphInput}. */
@Override
public GraphInput getInput() { public GraphInput getInput() {
return videoFrameProcessingWrapper; return checkNotNull(videoFrameProcessingWrapper);
} }
/* package */ boolean hasProducedFrameWithTimestampZero() { @Override
public boolean hasProducedFrameWithTimestampZero() {
return hasProducedFrameWithTimestampZero; return hasProducedFrameWithTimestampZero;
} }
@Override
public void release() { public void release() {
if (released) {
return;
}
if (videoFrameProcessingWrapper != null) {
videoFrameProcessingWrapper.release(); videoFrameProcessingWrapper.release();
videoFrameProcessingWrapper = null;
} }
released = true;
private static final class VideoFrameProcessingWrapper implements GraphInput {
private final VideoFrameProcessor videoFrameProcessor;
private final AtomicLong mediaItemOffsetUs;
private final ColorInfo inputColorInfo;
@Nullable private final Presentation presentation;
public VideoFrameProcessingWrapper(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Executor listenerExecutor,
VideoFrameProcessor.Listener listener,
boolean renderFramesAutomatically,
@Nullable Presentation presentation)
throws VideoFrameProcessingException {
this.videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
debugViewProvider,
inputColorInfo,
outputColorInfo,
renderFramesAutomatically,
listenerExecutor,
listener);
this.mediaItemOffsetUs = new AtomicLong();
this.inputColorInfo = inputColorInfo;
this.presentation = presentation;
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
boolean isLast) {
if (trackFormat != null) {
Size decodedSize = getDecodedSize(trackFormat);
videoFrameProcessor.registerInputStream(
getInputType(checkNotNull(trackFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.build());
}
mediaItemOffsetUs.addAndGet(durationUs);
}
@Override
public @InputResult int queueInputBitmap(
Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
return videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
return videoFrameProcessor.queueInputTexture(texId, presentationTimeUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public Surface getInputSurface() {
return videoFrameProcessor.getInputSurface();
}
@Override
public ColorInfo getExpectedInputColorInfo() {
return inputColorInfo;
}
@Override
public int getPendingVideoFrameCount() {
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public boolean registerVideoFrame(long presentationTimeUs) {
return videoFrameProcessor.registerInputFrame();
}
@Override
public void signalEndOfVideoInput() {
videoFrameProcessor.signalEndOfInput();
}
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
}
public void release() {
videoFrameProcessor.release();
}
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
private static Size getDecodedSize(Format format) {
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
return new Size(decodedWidth, decodedHeight);
}
private static ImmutableList<Effect> createEffectListWithPresentation(
List<Effect> effects, @Nullable Presentation presentation) {
if (presentation == null) {
return ImmutableList.copyOf(effects);
}
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
effectsWithPresentationBuilder.addAll(effects).add(presentation);
return effectsWithPresentationBuilder.build();
}
} }
} }

View File

@ -50,7 +50,6 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable; import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper; import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.ScaleAndRotateTransformation; import androidx.media3.effect.ScaleAndRotateTransformation;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
@ -591,11 +590,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else { } else {
ImmutableList<Effect> compositionVideoEffects = composition.effects.videoEffects; ImmutableList<Effect> compositionVideoEffects = composition.effects.videoEffects;
@Nullable
Presentation compositionPresentation =
compositionVideoEffects.isEmpty()
? null
: (Presentation) compositionVideoEffects.get(0);
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP. // TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
assetLoaderInputTracker.registerSampleExporter( assetLoaderInputTracker.registerSampleExporter(
C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_VIDEO,
@ -603,7 +597,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
context, context,
firstAssetLoaderInputFormat, firstAssetLoaderInputFormat,
transformationRequest, transformationRequest,
compositionPresentation, compositionVideoEffects,
videoFrameProcessorFactory, videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,

View File

@ -0,0 +1,180 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
/** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */
/* package */ final class VideoFrameProcessingWrapper implements GraphInput {
private final VideoFrameProcessor videoFrameProcessor;
private final AtomicLong mediaItemOffsetUs;
private final ColorInfo inputColorInfo;
@Nullable final Presentation presentation;
public VideoFrameProcessingWrapper(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Executor listenerExecutor,
VideoFrameProcessor.Listener listener,
boolean renderFramesAutomatically,
@Nullable Presentation presentation)
throws VideoFrameProcessingException {
this.mediaItemOffsetUs = new AtomicLong();
this.inputColorInfo = inputColorInfo;
this.presentation = presentation;
videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
debugViewProvider,
inputColorInfo,
outputColorInfo,
renderFramesAutomatically,
listenerExecutor,
listener);
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format trackFormat,
boolean isLast) {
if (trackFormat != null) {
Size decodedSize = getDecodedSize(trackFormat);
videoFrameProcessor.registerInputStream(
getInputType(checkNotNull(trackFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.build());
}
mediaItemOffsetUs.addAndGet(durationUs);
}
@Override
public @InputResult int queueInputBitmap(
Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
return videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
return videoFrameProcessor.queueInputTexture(texId, presentationTimeUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public Surface getInputSurface() {
return videoFrameProcessor.getInputSurface();
}
@Override
public ColorInfo getExpectedInputColorInfo() {
return inputColorInfo;
}
@Override
public int getPendingVideoFrameCount() {
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public boolean registerVideoFrame(long presentationTimeUs) {
return videoFrameProcessor.registerInputFrame();
}
@Override
public void signalEndOfVideoInput() {
videoFrameProcessor.signalEndOfInput();
}
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
}
public void release() {
videoFrameProcessor.release();
}
private static Size getDecodedSize(Format format) {
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
return new Size(decodedWidth, decodedHeight);
}
private static ImmutableList<Effect> createEffectListWithPresentation(
List<Effect> effects, @Nullable Presentation presentation) {
if (presentation == null) {
return ImmutableList.copyOf(effects);
}
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
effectsWithPresentationBuilder.addAll(effects).add(presentation);
return effectsWithPresentationBuilder.build();
}
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
}

View File

@ -0,0 +1,106 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer;
import java.util.List;
import java.util.concurrent.Executor;
/** Represents a graph for processing decoded video frames. */
/* package */ interface VideoGraph {
/** A factory for creating a {@link VideoGraph}. */
interface Factory {
/**
* Creates a new {@link VideoGraph} instance.
*
* @param context A {@link Context}.
* @param inputColorInfo The {@link ColorInfo} for the input frames.
* @param outputColorInfo The {@link ColorInfo} for the output frames.
* @param errorConsumer A {@link Consumer} of {@link ExportException}.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listener A {@link Listener}.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
* @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* VideoFrameProcessor}.
*/
VideoGraph create(
Context context,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects)
throws VideoFrameProcessingException;
}
/** Listener for video frame processing events. */
interface Listener {
/**
* Called when the output size changes.
*
* @param width The new output width in pixels.
* @param height The new output width in pixels.
* @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code
* null} if the output is not needed.
*/
@Nullable
SurfaceInfo onOutputSizeChanged(int width, int height);
/** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */
void onEnded(long finalFramePresentationTimeUs);
}
/**
* Initialize the {@code VideoGraph}.
*
* <p>This method must be called before calling other methods.
*
* <p>If the method throws, the caller must call {@link #release}.
*/
void initialize() throws VideoFrameProcessingException;
/**
* Returns a {@link GraphInput} object to which the {@code VideoGraph} inputs are queued.
*
* <p>This method must be called after successfully {@linkplain #initialize() initializing} the
* {@code VideoGraph}.
*
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
*/
GraphInput getInput() throws VideoFrameProcessingException;
/**
* Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp.
*/
boolean hasProducedFrameWithTimestampZero();
/** Releases the associated resources. */
void release();
}

View File

@ -36,6 +36,7 @@ import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
@ -46,12 +47,12 @@ import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil; import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil; import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.List; import java.util.List;
import org.checkerframework.checker.initialization.qual.Initialized;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.dataflow.qual.Pure; import org.checkerframework.dataflow.qual.Pure;
@ -59,7 +60,7 @@ import org.checkerframework.dataflow.qual.Pure;
/* package */ final class VideoSampleExporter extends SampleExporter { /* package */ final class VideoSampleExporter extends SampleExporter {
private static final String TAG = "VideoSampleExporter"; private static final String TAG = "VideoSampleExporter";
private final SingleInputVideoGraph singleInputVideoGraph; private final VideoGraph videoGraph;
private final EncoderWrapper encoderWrapper; private final EncoderWrapper encoderWrapper;
private final DecoderInputBuffer encoderOutputBuffer; private final DecoderInputBuffer encoderOutputBuffer;
@ -75,7 +76,7 @@ import org.checkerframework.dataflow.qual.Pure;
Context context, Context context,
Format firstInputFormat, Format firstInputFormat,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
@Nullable Presentation presentation, List<Effect> compositionEffects,
VideoFrameProcessor.Factory videoFrameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
@ -86,7 +87,6 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder // TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder
// output format instead of the extractor output format, to match AudioSampleExporter behavior. // output format instead of the extractor output format, to match AudioSampleExporter behavior.
super(firstInputFormat, muxerWrapper); super(firstInputFormat, muxerWrapper);
finalFramePresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
ColorInfo decoderInputColor; ColorInfo decoderInputColor;
@ -111,80 +111,60 @@ import org.checkerframework.dataflow.qual.Pure;
boolean isMediaCodecToneMapping = boolean isMediaCodecToneMapping =
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
&& ColorInfo.isTransferHdr(decoderInputColor); && ColorInfo.isTransferHdr(decoderInputColor);
ColorInfo videoFrameProcessorInputColor = ColorInfo videoGraphInputColor =
isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor; isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
boolean isGlToneMapping = boolean isGlToneMapping =
ColorInfo.isTransferHdr(decoderInputColor) ColorInfo.isTransferHdr(decoderInputColor)
&& transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL; && transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
ColorInfo videoFrameProcessorOutputColor; ColorInfo videoGraphOutputColor;
if (videoFrameProcessorInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) { if (videoGraphInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) {
// The sRGB color transfer is only used for images, so when an image gets transcoded into a // The sRGB color transfer is only used for images, so when an image gets transcoded into a
// video, we use the SMPTE 170M transfer function for the resulting video. // video, we use the SMPTE 170M transfer function for the resulting video.
videoFrameProcessorOutputColor = SDR_BT709_LIMITED; videoGraphOutputColor = SDR_BT709_LIMITED;
} else if (isGlToneMapping) { } else if (isGlToneMapping) {
// For consistency with the Android platform, OpenGL tone mapping outputs colors with // For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as // C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder. // C.COLOR_TRANSFER_SDR to the encoder.
videoFrameProcessorOutputColor = videoGraphOutputColor =
new ColorInfo.Builder() new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709) .setColorSpace(C.COLOR_SPACE_BT709)
.setColorRange(C.COLOR_RANGE_LIMITED) .setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2) .setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build(); .build();
} else { } else {
videoFrameProcessorOutputColor = videoFrameProcessorInputColor; videoGraphOutputColor = videoGraphInputColor;
} }
try { try {
singleInputVideoGraph = videoGraph =
new SingleInputVideoGraph( new VideoGraphWrapper(
context, context,
videoFrameProcessorFactory, new SingleInputVideoGraph.Factory(videoFrameProcessorFactory),
videoFrameProcessorInputColor, videoGraphInputColor,
videoFrameProcessorOutputColor, videoGraphOutputColor,
new SingleInputVideoGraph.Listener() {
@Nullable
@Override
public SurfaceInfo onOutputSizeChanged(int width, int height) {
@Nullable SurfaceInfo surfaceInfo = null;
try {
surfaceInfo = encoderWrapper.getSurfaceInfo(width, height);
} catch (ExportException e) {
errorConsumer.accept(e);
}
return surfaceInfo;
}
@Override
public void onEnded(long finalFramePresentationTimeUs) {
VideoSampleExporter.this.finalFramePresentationTimeUs =
finalFramePresentationTimeUs;
try {
encoderWrapper.signalEndOfInputStream();
} catch (ExportException e) {
errorConsumer.accept(e);
}
}
},
errorConsumer, errorConsumer,
debugViewProvider, debugViewProvider,
MoreExecutors.directExecutor(), compositionEffects);
/* renderFramesAutomatically= */ true, videoGraph.initialize();
presentation);
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e); throw ExportException.createForVideoFrameProcessingException(e);
} }
} }
@Override @Override
public GraphInput getInput(EditedMediaItem item, Format format) { public GraphInput getInput(EditedMediaItem editedMediaItem, Format format)
return singleInputVideoGraph.getInput(); throws ExportException {
try {
return videoGraph.getInput();
} catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e);
}
} }
@Override @Override
public void release() { public void release() {
singleInputVideoGraph.release(); videoGraph.release();
encoderWrapper.release(); encoderWrapper.release();
} }
@ -206,7 +186,7 @@ import org.checkerframework.dataflow.qual.Pure;
// Internal ref b/235045165: Some encoder incorrectly set a zero presentation time on the // Internal ref b/235045165: Some encoder incorrectly set a zero presentation time on the
// penultimate buffer (before EOS), and sets the actual timestamp on the EOS buffer. Use the // penultimate buffer (before EOS), and sets the actual timestamp on the EOS buffer. Use the
// last processed frame presentation time instead. // last processed frame presentation time instead.
if (singleInputVideoGraph.hasProducedFrameWithTimestampZero() == hasMuxedTimestampZero if (videoGraph.hasProducedFrameWithTimestampZero() == hasMuxedTimestampZero
&& finalFramePresentationTimeUs != C.TIME_UNSET && finalFramePresentationTimeUs != C.TIME_UNSET
&& bufferInfo.size > 0) { && bufferInfo.size > 0) {
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs; bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
@ -478,4 +458,79 @@ import org.checkerframework.dataflow.qual.Pure;
releaseEncoder = true; releaseEncoder = true;
} }
} }
private final class VideoGraphWrapper implements VideoGraph, VideoGraph.Listener {
private final VideoGraph videoGraph;
private final Consumer<ExportException> errorConsumer;
public VideoGraphWrapper(
Context context,
VideoGraph.Factory videoGraphFactory,
ColorInfo videoFrameProcessorInputColor,
ColorInfo videoFrameProcessorOutputColor,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider,
List<Effect> compositionEffects)
throws VideoFrameProcessingException {
this.errorConsumer = errorConsumer;
// To satisfy the nullness checker by declaring an initialized this reference used in the
// videoGraphFactory.create method
@SuppressWarnings("nullness:assignment")
@Initialized
VideoGraphWrapper thisRef = this;
videoGraph =
videoGraphFactory.create(
context,
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
errorConsumer,
debugViewProvider,
/* listener= */ thisRef,
/* listenerExecutor= */ MoreExecutors.directExecutor(),
compositionEffects);
}
@Nullable
@Override
public SurfaceInfo onOutputSizeChanged(int width, int height) {
@Nullable SurfaceInfo surfaceInfo = null;
try {
surfaceInfo = encoderWrapper.getSurfaceInfo(width, height);
} catch (ExportException e) {
errorConsumer.accept(e);
}
return surfaceInfo;
}
@Override
public void onEnded(long finalFramePresentationTimeUs) {
VideoSampleExporter.this.finalFramePresentationTimeUs = finalFramePresentationTimeUs;
try {
encoderWrapper.signalEndOfInputStream();
} catch (ExportException e) {
errorConsumer.accept(e);
}
}
@Override
public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize();
}
@Override
public GraphInput getInput() throws VideoFrameProcessingException {
return videoGraph.getInput();
}
@Override
public boolean hasProducedFrameWithTimestampZero() {
return videoGraph.hasProducedFrameWithTimestampZero();
}
@Override
public void release() {
videoGraph.release();
}
}
} }