Integrate MultiInputVideoGraph in CompositionPlayer

Currently doesn't support image/video transitions

PiperOrigin-RevId: 715393251
This commit is contained in:
claincly 2025-01-14 08:31:01 -08:00 committed by Copybara-Service
parent 8f17ab84f8
commit e9e0569425
18 changed files with 545 additions and 204 deletions

View File

@ -35,6 +35,7 @@ public interface PreviewingVideoGraph extends VideoGraph {
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listener A {@link Listener}.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param videoCompositorSettings The {@link VideoCompositorSettings}.
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
* @return A new instance.
@ -47,9 +48,16 @@ public interface PreviewingVideoGraph extends VideoGraph {
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException;
/**
* Returns whether the {@link VideoGraph} implementation supports {@linkplain #registerInput
* registering} multiple inputs.
*/
boolean supportsMultipleInputs();
}
/**

View File

@ -0,0 +1,104 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.UnstableApi;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A {@linkplain PreviewingVideoGraph previewing} specific implementation of {@link
* MultipleInputVideoGraph}.
*/
@UnstableApi
public final class PreviewingMultipleInputVideoGraph extends MultipleInputVideoGraph
implements PreviewingVideoGraph {
/** A factory for creating a {@link PreviewingMultipleInputVideoGraph}. */
public static final class Factory implements PreviewingVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* Creates a new factory that uses the {@link DefaultVideoFrameProcessor.Factory} with its
* default values.
*/
public Factory() {
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory.Builder().build();
}
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return new PreviewingMultipleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return true;
}
}
private PreviewingMultipleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
/* renderFramesAutomatically= */ false);
}
@Override
public void renderOutputFrame(long renderTimeNs) {
getCompositionVideoFrameProcessor().renderOutputFrame(renderTimeNs);
}
}

View File

@ -63,6 +63,7 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return new PreviewingSingleInputVideoGraph(
@ -74,6 +75,11 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
listenerExecutor,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
private PreviewingSingleInputVideoGraph(

View File

@ -99,7 +99,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
@Override
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessor == null && !released);
checkState(this.inputIndex == C.INDEX_UNSET);
checkState(this.inputIndex == C.INDEX_UNSET, "This VideoGraph supports only one input.");
this.inputIndex = inputIndex;
videoFrameProcessor =

View File

@ -18,6 +18,7 @@ package androidx.media3.exoplayer.image;
import static androidx.media3.common.C.FIRST_FRAME_NOT_RENDERED;
import static androidx.media3.common.C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED;
import static androidx.media3.common.C.FIRST_FRAME_RENDERED;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
@ -47,9 +48,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.ArrayDeque;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** A {@link Renderer} implementation for images. */
@UnstableApi
@ -117,6 +116,7 @@ public class ImageRenderer extends BaseRenderer {
@Nullable private TileInfo tileInfo;
@Nullable private TileInfo nextTileInfo;
private int currentTileIndex;
private boolean codecNeedsInitialization;
/**
* Creates an instance.
@ -164,7 +164,7 @@ public class ImageRenderer extends BaseRenderer {
if (result == C.RESULT_FORMAT_READ) {
// Note that this works because we only expect to enter this if-condition once per playback.
inputFormat = checkStateNotNull(formatHolder.format);
initDecoder();
codecNeedsInitialization = true;
} else if (result == C.RESULT_BUFFER_READ) {
// End of stream read having not read a format.
checkState(flagsOnlyBuffer.isEndOfStream());
@ -176,6 +176,9 @@ public class ImageRenderer extends BaseRenderer {
return;
}
}
if (decoder == null && !maybeInitCodec()) {
return;
}
try {
// Rendering loop.
TraceUtil.beginSection("drainAndFeedDecoder");
@ -315,7 +318,7 @@ public class ImageRenderer extends BaseRenderer {
// We're waiting to re-initialize the decoder, and have now processed all final buffers.
releaseDecoderResources();
checkStateNotNull(inputFormat);
initDecoder();
maybeInitCodec();
} else {
checkStateNotNull(outputBuffer).release();
if (pendingOutputStreamChanges.isEmpty()) {
@ -407,6 +410,23 @@ public class ImageRenderer extends BaseRenderer {
return false;
}
/**
* Initializes the processing pipeline, if needed by the implementation.
*
* <p>This method is called before initializing the image decoder.
*
* <p>The default implementation is a no-op.
*
* @return Returns {@code true} when the processing pipeline is successfully initialized, or the
* {@code renderer} does not use a processing pipeline. The caller should try again later, if
* {@code false} is returned.
* @throws ExoPlaybackException If an error occurs preparing for initializing the codec.
*/
protected boolean maybeInitializeProcessingPipeline() throws ExoPlaybackException {
// Do nothing.
return true;
}
/**
* Called when an output buffer is successfully processed.
*
@ -483,6 +503,7 @@ public class ImageRenderer extends BaseRenderer {
return !readyToOutputTiles;
case C.RESULT_FORMAT_READ:
inputFormat = checkStateNotNull(formatHolder.format);
codecNeedsInitialization = true;
decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM_THEN_WAIT;
return true;
default:
@ -490,10 +511,16 @@ public class ImageRenderer extends BaseRenderer {
}
}
@RequiresNonNull("inputFormat")
@EnsuresNonNull("decoder")
private void initDecoder() throws ExoPlaybackException {
if (canCreateDecoderForFormat(inputFormat)) {
private boolean maybeInitCodec() throws ExoPlaybackException {
if (!maybeInitializeProcessingPipeline()) {
return false;
}
if (!codecNeedsInitialization) {
return true;
}
if (canCreateDecoderForFormat(checkNotNull(inputFormat))) {
if (decoder != null) {
decoder.release();
}
@ -504,6 +531,8 @@ public class ImageRenderer extends BaseRenderer {
inputFormat,
PlaybackException.ERROR_CODE_DECODING_FORMAT_UNSUPPORTED);
}
codecNeedsInitialization = false;
return true;
}
private boolean canCreateDecoderForFormat(Format format) {

View File

@ -1100,7 +1100,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
private void maybeInitCodecWithFallback(
@Nullable MediaCrypto crypto, boolean mediaCryptoRequiresSecureDecoder)
throws DecoderInitializationException {
throws DecoderInitializationException, ExoPlaybackException {
Format inputFormat = checkNotNull(this.inputFormat);
if (availableCodecInfos == null) {
try {
@ -1133,6 +1133,10 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
ArrayDeque<MediaCodecInfo> availableCodecInfos = checkNotNull(this.availableCodecInfos);
while (codec == null) {
MediaCodecInfo codecInfo = checkNotNull(availableCodecInfos.peekFirst());
if (!maybeInitializeProcessingPipeline(codecInfo, inputFormat)) {
return;
}
if (!shouldInitCodec(codecInfo)) {
return;
}
@ -1216,7 +1220,6 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
if (codecOperatingRate <= assumedMinimumCodecOperatingRate) {
codecOperatingRate = CODEC_OPERATING_RATE_UNSET;
}
onReadyToInitializeCodec(codecInfo, inputFormat);
codecInitializingTimestamp = getClock().elapsedRealtime();
MediaCodecAdapter.Configuration configuration =
getMediaCodecConfiguration(codecInfo, inputFormat, crypto, codecOperatingRate);
@ -1487,7 +1490,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
}
/**
* Called when ready to initialize the {@link MediaCodecAdapter}.
* Initializes the processing pipeline, if needed by the implementation.
*
* <p>This method is called just before the renderer obtains the {@linkplain
* #getMediaCodecConfiguration configuration} for the {@link MediaCodecAdapter} and creates the
@ -1497,11 +1500,15 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
*
* @param codecInfo The {@link MediaCodecInfo} of the codec which will be initialized.
* @param format The {@link Format} for which the codec is being configured.
* @return Returns {@code true} when the processing pipeline is successfully initialized, or the
* {@linkplain MediaCodecRenderer renderer} does not use a processing pipeline. The caller
* should try again later, if {@code false} is returned.
* @throws ExoPlaybackException If an error occurs preparing for initializing the codec.
*/
protected void onReadyToInitializeCodec(MediaCodecInfo codecInfo, Format format)
protected boolean maybeInitializeProcessingPipeline(MediaCodecInfo codecInfo, Format format)
throws ExoPlaybackException {
// Do nothing.
return true;
}
/**

View File

@ -108,8 +108,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public void initialize(Format sourceFormat) {
public boolean initialize(Format sourceFormat) {
// Do nothing as there is no initialization needed.
return true;
}
@Override

View File

@ -384,7 +384,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
eventListener,
maxDroppedFramesToNotify,
assumedMinimumCodecOperatingRate,
videoSinkProvider == null ? null : videoSinkProvider.getSink());
/* videoSink= */ videoSinkProvider == null
? null
: videoSinkProvider.getSink(/* inputIndex= */ 0));
}
/**
@ -696,11 +698,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
// been reset.
if (!hasSetVideoSink) {
if (videoEffects != null && videoSink == null) {
videoSink =
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
new PlaybackVideoGraphWrapper.Builder(context, videoFrameReleaseControl)
.setClock(getClock())
.build()
.getSink();
.build();
playbackVideoGraphWrapper.setTotalVideoInputCount(1);
videoSink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
}
hasSetVideoSink = true;
}
@ -1222,16 +1225,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
@CallSuper
@Override
protected void onReadyToInitializeCodec(MediaCodecInfo codecInfo, Format format)
protected boolean maybeInitializeProcessingPipeline(MediaCodecInfo codecInfo, Format format)
throws ExoPlaybackException {
if (videoSink != null && !videoSink.isInitialized()) {
try {
videoSink.initialize(format);
return videoSink.initialize(format);
} catch (VideoSink.VideoSinkException e) {
throw createRendererException(
e, format, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
}
return true;
}
/** Sets the {@linkplain Effect video effects} to apply. */

View File

@ -19,6 +19,8 @@ import static androidx.media3.common.VideoFrameProcessor.DROP_OUTPUT_FRAME;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static androidx.media3.common.util.Util.getMaxPendingFramesCountForMediaCodecDecoders;
import static androidx.media3.exoplayer.video.VideoSink.INPUT_TYPE_SURFACE;
import static java.lang.annotation.ElementType.TYPE_USE;
@ -26,6 +28,7 @@ import android.content.Context;
import android.graphics.Bitmap;
import android.os.Looper;
import android.util.Pair;
import android.util.SparseArray;
import android.view.Surface;
import androidx.annotation.FloatRange;
import androidx.annotation.IntDef;
@ -39,6 +42,7 @@ import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
@ -122,6 +126,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory;
private PreviewingVideoGraph.@MonotonicNonNull Factory previewingVideoGraphFactory;
private List<Effect> compositionEffects;
private VideoCompositorSettings compositorSettings;
private Clock clock;
private boolean requestOpenGlToneMapping;
private boolean built;
@ -131,6 +136,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
this.context = context.getApplicationContext();
this.videoFrameReleaseControl = videoFrameReleaseControl;
compositionEffects = ImmutableList.of();
compositorSettings = VideoCompositorSettings.DEFAULT;
clock = Clock.DEFAULT;
}
@ -179,6 +185,18 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
return this;
}
/**
* Sets the {@link VideoCompositorSettings}.
*
* @param compositorSettings The {@link VideoCompositorSettings}.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setCompositorSettings(VideoCompositorSettings compositorSettings) {
this.compositorSettings = compositorSettings;
return this;
}
/**
* Sets the {@link Clock} that will be used.
*
@ -239,10 +257,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private static final int STATE_INITIALIZED = 1;
private static final int STATE_RELEASED = 2;
private static final int PRIMARY_SEQUENCE_INDEX = 0;
private static final Executor NO_OP_EXECUTOR = runnable -> {};
private final Context context;
private final InputVideoSink inputVideoSink;
/**
* A queue of unprocessed input frame start positions. Each position is associated with the
@ -251,7 +270,9 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final TimedValueQueue<Long> streamStartPositionsUs;
private final PreviewingVideoGraph.Factory previewingVideoGraphFactory;
private final SparseArray<InputVideoSink> inputVideoSinks;
private final List<Effect> compositionEffects;
private final VideoCompositorSettings compositorSettings;
private final VideoSink defaultVideoSink;
private final VideoSink.VideoFrameHandler videoFrameHandler;
private final Clock clock;
@ -285,12 +306,16 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
*/
private long bufferTimestampAdjustmentUs;
private int totalVideoInputCount;
private int registeredVideoInputCount;
private PlaybackVideoGraphWrapper(Builder builder) {
context = builder.context;
inputVideoSink = new InputVideoSink(context);
streamStartPositionsUs = new TimedValueQueue<>();
previewingVideoGraphFactory = checkStateNotNull(builder.previewingVideoGraphFactory);
inputVideoSinks = new SparseArray<>();
compositionEffects = builder.compositionEffects;
compositorSettings = builder.compositorSettings;
clock = builder.clock;
defaultVideoSink = new DefaultVideoSink(builder.videoFrameReleaseControl, clock);
videoFrameHandler =
@ -306,12 +331,12 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
};
listeners = new CopyOnWriteArraySet<>();
listeners.add(inputVideoSink);
requestOpenGlToneMapping = builder.requestOpenGlToneMapping;
videoGraphOutputFormat = new Format.Builder().build();
state = STATE_CREATED;
lastOutputBufferPresentationTimeUs = C.TIME_UNSET;
finalBufferPresentationTimeUs = C.TIME_UNSET;
totalVideoInputCount = C.LENGTH_UNSET;
state = STATE_CREATED;
}
/**
@ -335,7 +360,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// VideoSinkProvider methods
@Override
public VideoSink getSink() {
public VideoSink getSink(int inputIndex) {
checkState(!contains(inputVideoSinks, inputIndex));
InputVideoSink inputVideoSink = new InputVideoSink(context, inputIndex);
addListener(inputVideoSink);
inputVideoSinks.put(inputIndex, inputVideoSink);
return inputVideoSink;
}
@ -440,53 +469,77 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// Internal methods
private VideoFrameProcessor initialize(Format sourceFormat) throws VideoSink.VideoSinkException {
checkState(state == STATE_CREATED);
ColorInfo inputColorInfo = getAdjustedInputColorInfo(sourceFormat.colorInfo);
ColorInfo outputColorInfo;
if (requestOpenGlToneMapping) {
outputColorInfo = ColorInfo.SDR_BT709_LIMITED;
} else {
outputColorInfo = inputColorInfo;
if (outputColorInfo.colorTransfer == C.COLOR_TRANSFER_HLG && Util.SDK_INT < 34) {
// PQ SurfaceView output is supported from API 33, but HLG output is supported from API 34.
// Therefore, convert HLG to PQ below API 34, so that HLG input can be displayed properly on
// API 33.
outputColorInfo =
outputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
@Nullable
private VideoFrameProcessor registerInput(Format sourceFormat, int inputIndex)
throws VideoSink.VideoSinkException {
if (inputIndex == PRIMARY_SEQUENCE_INDEX) {
checkState(state == STATE_CREATED);
ColorInfo inputColorInfo = getAdjustedInputColorInfo(sourceFormat.colorInfo);
ColorInfo outputColorInfo;
if (requestOpenGlToneMapping) {
outputColorInfo = ColorInfo.SDR_BT709_LIMITED;
} else {
outputColorInfo = inputColorInfo;
if (outputColorInfo.colorTransfer == C.COLOR_TRANSFER_HLG && Util.SDK_INT < 34) {
// PQ SurfaceView output is supported from API 33, but HLG output is supported from API
// 34.
// Therefore, convert HLG to PQ below API 34, so that HLG input can be displayed properly
// on
// API 33.
outputColorInfo =
outputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
}
}
}
handler = clock.createHandler(checkStateNotNull(Looper.myLooper()), /* callback= */ null);
try {
videoGraph =
previewingVideoGraphFactory.create(
context,
outputColorInfo,
DebugViewProvider.NONE,
/* listener= */ this,
/* listenerExecutor= */ handler::post,
/* compositionEffects= */ ImmutableList.of(),
/* initialTimestampOffsetUs= */ 0);
handler = clock.createHandler(checkStateNotNull(Looper.myLooper()), /* callback= */ null);
try {
videoGraph =
previewingVideoGraphFactory.create(
context,
outputColorInfo,
DebugViewProvider.NONE,
/* listener= */ this,
/* listenerExecutor= */ handler::post,
compositorSettings,
compositionEffects,
/* initialTimestampOffsetUs= */ 0);
videoGraph.initialize();
} catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat);
}
if (currentSurfaceAndSize != null) {
Surface surface = currentSurfaceAndSize.first;
Size size = currentSurfaceAndSize.second;
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
}
videoGraph.registerInput(/* inputIndex= */ 0);
defaultVideoSink.initialize(sourceFormat);
state = STATE_INITIALIZED;
} else {
if (!isInitialized()) {
// Make sure the primary sequence is initialized first.
return null;
}
}
try {
checkNotNull(videoGraph).registerInput(inputIndex);
} catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat);
}
defaultVideoSink.setListener(new DefaultVideoSinkListener(), /* executor= */ handler::post);
defaultVideoSink.initialize(sourceFormat);
state = STATE_INITIALIZED;
return videoGraph.getProcessor(/* inputIndex= */ 0);
registeredVideoInputCount++;
defaultVideoSink.setListener(
new DefaultVideoSinkListener(), /* executor= */ checkNotNull(handler)::post);
return videoGraph.getProcessor(inputIndex);
}
private boolean isInitialized() {
return state == STATE_INITIALIZED;
}
public void setTotalVideoInputCount(int totalVideoInputCount) {
this.totalVideoInputCount = totalVideoInputCount;
}
private void maybeSetOutputSurfaceInfo(@Nullable Surface surface, int width, int height) {
if (videoGraph == null) {
return;
@ -561,6 +614,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
outputStreamStartPositionUs, bufferTimestampAdjustmentUs, /* unused */ C.TIME_UNSET);
}
private boolean shouldRenderToInputVideoSink() {
return totalVideoInputCount != C.LENGTH_UNSET
&& totalVideoInputCount == registeredVideoInputCount;
}
private static ColorInfo getAdjustedInputColorInfo(@Nullable ColorInfo inputColorInfo) {
if (inputColorInfo == null || !inputColorInfo.isDataSpaceValid()) {
return ColorInfo.SDR_BT709_LIMITED;
@ -573,9 +631,10 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final class InputVideoSink implements VideoSink, PlaybackVideoGraphWrapper.Listener {
private final int videoFrameProcessorMaxPendingFrameCount;
private final int inputIndex;
private ImmutableList<Effect> videoEffects;
private @MonotonicNonNull VideoFrameProcessor videoFrameProcessor;
@Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private Format inputFormat;
private @InputType int inputType;
private long inputBufferTimestampAdjustmentUs;
@ -591,12 +650,13 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private boolean signaledEndOfStream;
/** Creates a new instance. */
public InputVideoSink(Context context) {
public InputVideoSink(Context context, int inputIndex) {
// TODO b/226330223 - Investigate increasing frame count when frame dropping is allowed.
// TODO b/278234847 - Evaluate whether limiting frame count when frame dropping is not allowed
// reduces decoder timeouts, and consider restoring.
this.inputIndex = inputIndex;
videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecDecoders(context);
getMaxPendingFramesCountForMediaCodecDecoders(context);
videoEffects = ImmutableList.of();
lastBufferPresentationTimeUs = C.TIME_UNSET;
listener = VideoSink.Listener.NO_OP;
@ -630,9 +690,10 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
@Override
public void initialize(Format sourceFormat) throws VideoSinkException {
public boolean initialize(Format sourceFormat) throws VideoSinkException {
checkState(!isInitialized());
videoFrameProcessor = PlaybackVideoGraphWrapper.this.initialize(sourceFormat);
videoFrameProcessor = PlaybackVideoGraphWrapper.this.registerInput(sourceFormat, inputIndex);
return videoFrameProcessor != null;
}
@Override
@ -776,6 +837,9 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) {
checkState(isInitialized());
if (!shouldRenderToInputVideoSink()) {
return false;
}
// The sink takes in frames with monotonically increasing, non-offset frame
// timestamps. That is, with two ten-second long videos, the first frame of the second video
@ -812,10 +876,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
checkState(isInitialized());
if (!checkStateNotNull(videoFrameProcessor)
.queueInputBitmap(inputBitmap, timestampIterator)) {
if (!shouldRenderToInputVideoSink()
|| !checkNotNull(videoFrameProcessor).queueInputBitmap(inputBitmap, timestampIterator)) {
return false;
}
// TimestampIterator generates frame time.
long lastBufferPresentationTimeUs =
timestampIterator.getLastTimestampUs() - inputBufferTimestampAdjustmentUs;
@ -888,11 +953,15 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
* <p>Effects are pending until a new input stream is registered.
*/
private void setPendingVideoEffects(List<Effect> newVideoEffects) {
this.videoEffects =
new ImmutableList.Builder<Effect>()
.addAll(newVideoEffects)
.addAll(compositionEffects)
.build();
if (previewingVideoGraphFactory.supportsMultipleInputs()) {
this.videoEffects = ImmutableList.copyOf(newVideoEffects);
} else {
this.videoEffects =
new ImmutableList.Builder<Effect>()
.addAll(newVideoEffects)
.addAll(compositionEffects)
.build();
}
}
private void registerInputStream(Format inputFormat) {
@ -960,6 +1029,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException {
@ -977,12 +1047,18 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
} catch (Exception e) {
throw VideoFrameProcessingException.from(e);
}
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
/**

View File

@ -149,9 +149,10 @@ public interface VideoSink {
* Initializes the video sink.
*
* @param sourceFormat The format of the first input video or image.
* @return Whether initialization succeeded. If {@code false}, the caller should try again later.
* @throws VideoSink.VideoSinkException If initializing the sink failed.
*/
void initialize(Format sourceFormat) throws VideoSinkException;
boolean initialize(Format sourceFormat) throws VideoSinkException;
/** Returns whether the video sink is {@linkplain #initialize(Format) initialized}. */
boolean isInitialized();

View File

@ -18,14 +18,17 @@ package androidx.media3.exoplayer.video;
import android.view.Surface;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi;
/** A provider of {@link VideoSink VideoSinks}. */
@UnstableApi
public interface VideoSinkProvider {
/* package */ interface VideoSinkProvider {
/** Returns a {@link VideoSink} to forward video frames for processing. */
VideoSink getSink();
/**
* Returns the {@link VideoSink} to forward video frames for processing.
*
* @param inputIndex The index of the {@link VideoSink}.
* @return The {@link VideoSink} at the given index.
*/
VideoSink getSink(int inputIndex);
/** Sets the output surface info. */
void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution);

View File

@ -31,6 +31,7 @@ import androidx.media3.common.Format;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.TimestampIterator;
@ -61,7 +62,7 @@ public final class PlaybackVideoGraphWrapperTest {
public void initializeSink_calledTwice_throws() throws VideoSink.VideoSinkException {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(new FakeVideoFrameProcessor());
VideoSink sink = playbackVideoGraphWrapper.getSink();
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
sink.initialize(new Format.Builder().build());
assertThrows(IllegalStateException.class, () -> sink.initialize(new Format.Builder().build()));
@ -76,7 +77,7 @@ public final class PlaybackVideoGraphWrapperTest {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(videoFrameProcessor);
Format format = new Format.Builder().build();
VideoSink sink = playbackVideoGraphWrapper.getSink();
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
sink.initialize(format);
@ -200,10 +201,16 @@ public final class PlaybackVideoGraphWrapperTest {
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
when(previewingVideoGraph.getProcessor(anyInt())).thenReturn(videoFrameProcessor);
return previewingVideoGraph;
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
}

View File

@ -38,6 +38,7 @@ import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.NullableType;
@ -755,6 +756,7 @@ public class CompositionPlayerSeekTest {
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return singleInputVideoGraphFactory.create(
@ -790,9 +792,15 @@ public class CompositionPlayerSeekTest {
}
},
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return singleInputVideoGraphFactory.supportsMultipleInputs();
}
}
private static final class ResettableCountDownLatch {

View File

@ -15,7 +15,7 @@
*/
package androidx.media3.transformer;
import static androidx.media3.common.PlaybackException.ERROR_CODE_DECODER_INIT_FAILED;
import static androidx.media3.common.PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED;
import static androidx.media3.common.util.Util.isRunningOnEmulator;
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_ASSET;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
@ -41,6 +41,7 @@ import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
@ -468,15 +469,26 @@ public class CompositionPlayerTest {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
(context,
outputColorInfo,
debugViewProvider,
graphListener,
listenerExecutor,
compositionEffects,
initialTimestampOffsetUs) -> {
throw new VideoFrameProcessingException(
"Test video graph failed to initialize");
new PreviewingVideoGraph.Factory() {
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException {
throw new VideoFrameProcessingException(
"Test video graph failed to initialize");
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
})
.build();
compositionPlayer.addListener(listener);
@ -488,7 +500,7 @@ public class CompositionPlayerTest {
PlaybackException thrownException =
assertThrows(PlaybackException.class, listener::waitUntilPlayerEnded);
assertThat(thrownException.errorCode).isEqualTo(ERROR_CODE_DECODER_INIT_FAILED);
assertThat(thrownException.errorCode).isEqualTo(ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
@Test
@ -503,7 +515,7 @@ public class CompositionPlayerTest {
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(FailingReleaseVideoGraph::new)
.setPreviewingVideoGraphFactory(new FailingReleaseVideoGraph.Factory())
.build();
compositionPlayer.addListener(playerTestListener);
compositionPlayer.setComposition(
@ -550,12 +562,43 @@ public class CompositionPlayerTest {
}
private static final class FailingReleaseVideoGraph extends ForwardingVideoGraph {
public FailingReleaseVideoGraph(
public static final class Factory implements PreviewingVideoGraph.Factory {
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException {
return new FailingReleaseVideoGraph(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
private FailingReleaseVideoGraph(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
@ -566,6 +609,7 @@ public class CompositionPlayerTest {
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs));
}

View File

@ -108,7 +108,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public void initialize(Format sourceFormat) throws VideoSinkException {
public boolean initialize(Format sourceFormat) throws VideoSinkException {
executeOrDelayThrowing(
videoSink -> {
if (videoSink.isInitialized()) {
@ -117,6 +117,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoSink.initialize(sourceFormat);
});
isInitialized = true;
return true;
}
@Override

View File

@ -28,6 +28,7 @@ import android.os.HandlerThread;
import android.os.Looper;
import android.os.Process;
import android.util.Pair;
import android.util.SparseBooleanArray;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
@ -135,8 +136,8 @@ public final class CompositionPlayer extends SimpleBasePlayer
}
/**
* Sets the {@link Looper} from which the player can be accessed and {@link Player.Listener}
* callbacks are dispatched too.
* Sets the {@link Looper} from which the player can be accessed and {@link Listener} callbacks
* are dispatched too.
*
* <p>By default, the builder uses the looper of the thread that calls {@link #build()}.
*
@ -231,7 +232,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
*
* <p>If no {@link Looper} has been called with {@link #setLooper(Looper)}, then this method
* must be called within a {@link Looper} thread which is the thread that can access the player
* instance and where {@link Player.Listener} callbacks are dispatched.
* instance and where {@link Listener} callbacks are dispatched.
*/
public CompositionPlayer build() {
checkState(!built);
@ -291,11 +292,16 @@ public final class CompositionPlayer extends SimpleBasePlayer
private final PreviewingVideoGraph.Factory previewingVideoGraphFactory;
private final HandlerWrapper compositionInternalListenerHandler;
/** Maps from input index to whether the video track is selected in that sequence. */
private final SparseBooleanArray videoTracksSelected;
private @MonotonicNonNull HandlerThread playbackThread;
private @MonotonicNonNull CompositionPlayerInternal compositionPlayerInternal;
private @MonotonicNonNull ImmutableList<MediaItemData> playlist;
private @MonotonicNonNull Composition composition;
private @MonotonicNonNull Size videoOutputSize;
private @MonotonicNonNull PlaybackVideoGraphWrapper playbackVideoGraphWrapper;
private long compositionDurationUs;
private boolean playWhenReady;
private @PlayWhenReadyChangeReason int playWhenReadyChangeReason;
@ -324,6 +330,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
imageDecoderFactory = builder.imageDecoderFactory;
previewingVideoGraphFactory = checkNotNull(builder.previewingVideoGraphFactory);
compositionInternalListenerHandler = clock.createHandler(builder.looper, /* callback= */ null);
videoTracksSelected = new SparseBooleanArray();
players = new ArrayList<>();
compositionDurationUs = C.TIME_UNSET;
playbackState = STATE_IDLE;
@ -348,6 +355,10 @@ public final class CompositionPlayer extends SimpleBasePlayer
checkState(this.composition == null);
composition = deactivateSpeedAdjustingVideoEffects(composition);
if (composition.sequences.size() > 1 && !previewingVideoGraphFactory.supportsMultipleInputs()) {
Log.w(TAG, "Setting multi-sequence Composition with single input video graph.");
}
setCompositionInternal(composition);
if (videoOutput != null) {
if (videoOutput instanceof SurfaceHolder) {
@ -573,7 +584,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
@Override
protected ListenableFuture<?> handleSeek(
int mediaItemIndex, long positionMs, @Player.Command int seekCommand) {
int mediaItemIndex, long positionMs, @Command int seekCommand) {
resetLivePositionSuppliers();
CompositionPlayerInternal compositionPlayerInternal =
checkStateNotNull(this.compositionPlayerInternal);
@ -679,9 +690,10 @@ public final class CompositionPlayer extends SimpleBasePlayer
VideoFrameReleaseControl videoFrameReleaseControl =
new VideoFrameReleaseControl(
context, new CompositionFrameTimingEvaluator(), /* allowedJoiningTimeMs= */ 0);
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
playbackVideoGraphWrapper =
new PlaybackVideoGraphWrapper.Builder(context, videoFrameReleaseControl)
.setPreviewingVideoGraphFactory(checkNotNull(previewingVideoGraphFactory))
.setCompositorSettings(composition.videoCompositorSettings)
.setCompositionEffects(composition.effects.videoEffects)
.setClock(clock)
.setRequestOpenGlToneMapping(
@ -694,18 +706,16 @@ public final class CompositionPlayer extends SimpleBasePlayer
for (int i = 0; i < composition.sequences.size(); i++) {
EditedMediaItemSequence editedMediaItemSequence = composition.sequences.get(i);
SequenceRenderersFactory sequenceRenderersFactory =
i == 0
? SequenceRenderersFactory.create(
context,
editedMediaItemSequence,
playbackAudioGraphWrapper,
playbackVideoGraphWrapper.getSink(),
imageDecoderFactory,
/* inputIndex= */ i,
/* requestToneMapping= */ composition.hdrMode
== Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC)
: SequenceRenderersFactory.createForAudio(
context, editedMediaItemSequence, playbackAudioGraphWrapper, /* inputIndex= */ i);
SequenceRenderersFactory.create(
context,
editedMediaItemSequence,
playbackAudioGraphWrapper,
playbackVideoGraphWrapper.getSink(/* inputIndex= */ i),
imageDecoderFactory,
/* inputIndex= */ i,
/* requestToneMapping= */ composition.hdrMode
== Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC);
ExoPlayer.Builder playerBuilder =
new ExoPlayer.Builder(context)
.setLooper(getApplicationLooper())
@ -767,31 +777,38 @@ public final class CompositionPlayer extends SimpleBasePlayer
EditedMediaItem editedMediaItem = sequence.editedMediaItems.get(i);
checkArgument(editedMediaItem.durationUs != C.TIME_UNSET);
long durationUs = editedMediaItem.getPresentationDurationUs();
// Generate silence for primary sequence.
MediaSource silenceMediaSource =
new ClippingMediaSource(
new SilenceMediaSource(editedMediaItem.durationUs),
editedMediaItem.mediaItem.clippingConfiguration.startPositionUs,
editedMediaItem.mediaItem.clippingConfiguration.endPositionUs);
// The MediaSource that loads the MediaItem
MediaSource mainMediaSource = mediaSourceFactory.createMediaSource(editedMediaItem.mediaItem);
if (editedMediaItem.removeAudio) {
mainMediaSource =
new FilteringMediaSource(
mainMediaSource, ImmutableSet.of(C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_IMAGE));
}
MediaSource silenceGeneratedMediaSource =
createMediaSourceWithSilence(mediaSourceFactory, editedMediaItem);
MediaSource mergingMediaSource = new MergingMediaSource(mainMediaSource, silenceMediaSource);
MediaSource itemMediaSource =
wrapWithVideoEffectsBasedMediaSources(
mergingMediaSource, editedMediaItem.effects.videoEffects, durationUs);
silenceGeneratedMediaSource, editedMediaItem.effects.videoEffects, durationUs);
mediaSourceBuilder.add(
itemMediaSource, /* initialPlaceholderDurationMs= */ usToMs(durationUs));
}
player.setMediaSource(mediaSourceBuilder.build());
}
private static MediaSource createMediaSourceWithSilence(
MediaSource.Factory mediaSourceFactory, EditedMediaItem editedMediaItem) {
// The MediaSource that loads the MediaItem
MediaSource mainMediaSource = mediaSourceFactory.createMediaSource(editedMediaItem.mediaItem);
if (editedMediaItem.removeAudio) {
mainMediaSource =
new FilteringMediaSource(
mainMediaSource, ImmutableSet.of(C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_IMAGE));
}
MediaSource silenceMediaSource =
new ClippingMediaSource(
new SilenceMediaSource(editedMediaItem.durationUs),
editedMediaItem.mediaItem.clippingConfiguration.startPositionUs,
editedMediaItem.mediaItem.clippingConfiguration.endPositionUs);
return new MergingMediaSource(mainMediaSource, silenceMediaSource);
}
private void setSecondaryPlayerSequence(
ExoPlayer player, EditedMediaItemSequence sequence, long primarySequenceDurationUs) {
@ -804,21 +821,17 @@ public final class CompositionPlayer extends SimpleBasePlayer
while (accumulatedDurationUs < primarySequenceDurationUs) {
EditedMediaItem editedMediaItem = sequence.editedMediaItems.get(i);
long itemPresentationDurationUs = editedMediaItem.getPresentationDurationUs();
MediaItem mediaItem = editedMediaItem.mediaItem;
if (accumulatedDurationUs + itemPresentationDurationUs <= primarySequenceDurationUs) {
mediaSourceBuilder.add(
mediaSourceFactory.createMediaSource(mediaItem),
createMediaSourceWithSilence(mediaSourceFactory, editedMediaItem),
/* initialPlaceholderDurationMs= */ usToMs(itemPresentationDurationUs));
accumulatedDurationUs += itemPresentationDurationUs;
} else {
long remainingDurationUs = primarySequenceDurationUs - accumulatedDurationUs;
// TODO: b/289989542 - Handle already clipped, or speed adjusted media.
mediaSourceBuilder.add(
new ClippingMediaSource(
mediaSourceFactory.createMediaSource(mediaItem),
mediaItem.clippingConfiguration.startPositionUs,
mediaItem.clippingConfiguration.startPositionUs + remainingDurationUs),
/* initialPlaceholderDurationMs= */ usToMs(remainingDurationUs));
createMediaSourceWithSilence(
mediaSourceFactory, clipToDuration(editedMediaItem, remainingDurationUs)));
break;
}
i = (i + 1) % sequence.editedMediaItems.size();
@ -826,6 +839,24 @@ public final class CompositionPlayer extends SimpleBasePlayer
player.setMediaSource(mediaSourceBuilder.build());
}
private static EditedMediaItem clipToDuration(EditedMediaItem editedMediaItem, long durationUs) {
MediaItem.ClippingConfiguration clippingConfiguration =
editedMediaItem.mediaItem.clippingConfiguration;
return editedMediaItem
.buildUpon()
.setMediaItem(
editedMediaItem
.mediaItem
.buildUpon()
.setClippingConfiguration(
clippingConfiguration
.buildUpon()
.setEndPositionUs(clippingConfiguration.startPositionUs + durationUs)
.build())
.build())
.build();
}
private MediaSource wrapWithVideoEffectsBasedMediaSources(
MediaSource mediaSource, ImmutableList<Effect> videoEffects, long durationUs) {
MediaSource newMediaSource = mediaSource;
@ -872,7 +903,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
@Override
public Period getPeriod(int periodIndex, Period period, boolean setIds) {
Timeline.Period wrappedPeriod = newTimeline.getPeriod(periodIndex, period, setIds);
Period wrappedPeriod = newTimeline.getPeriod(periodIndex, period, setIds);
wrappedPeriod.durationUs = durationUs;
return wrappedPeriod;
}
@ -1062,7 +1093,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
}
}
private final class PlayerListener implements Player.Listener {
private final class PlayerListener implements Listener {
private final int playerIndex;
public PlayerListener(int playerIndex) {
@ -1092,11 +1123,26 @@ public final class CompositionPlayer extends SimpleBasePlayer
}
}
private void onVideoTrackSelection(boolean selected, int inputIndex) {
videoTracksSelected.put(inputIndex, selected);
if (videoTracksSelected.size() == checkNotNull(composition).sequences.size()) {
int selectedVideoTracks = 0;
for (int i = 0; i < videoTracksSelected.size(); i++) {
if (videoTracksSelected.get(videoTracksSelected.keyAt(i))) {
selectedVideoTracks++;
}
}
checkNotNull(playbackVideoGraphWrapper).setTotalVideoInputCount(selectedVideoTracks);
}
}
/**
* A {@link DefaultTrackSelector} extension to de-select generated audio when the audio from the
* media is playable.
*/
private static final class CompositionTrackSelector extends DefaultTrackSelector {
private final class CompositionTrackSelector extends DefaultTrackSelector {
private static final String SILENCE_AUDIO_TRACK_GROUP_ID = "1:";
private final int sequenceIndex;
@ -1117,44 +1163,41 @@ public final class CompositionPlayer extends SimpleBasePlayer
@RendererCapabilities.AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupports,
Parameters params)
throws ExoPlaybackException {
if (sequenceIndex == 0) {
// Currently silence is only generated for the zero-indexed sequence.
int audioRenderIndex = C.INDEX_UNSET;
for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) {
if (mappedTrackInfo.getRendererType(i) == C.TRACK_TYPE_AUDIO) {
audioRenderIndex = i;
break;
int audioRenderIndex = C.INDEX_UNSET;
for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) {
if (mappedTrackInfo.getRendererType(i) == C.TRACK_TYPE_AUDIO) {
audioRenderIndex = i;
break;
}
}
checkState(audioRenderIndex != C.INDEX_UNSET);
TrackGroupArray audioTrackGroups = mappedTrackInfo.getTrackGroups(audioRenderIndex);
// If there's only one audio TrackGroup, it'll be silence, there's no need to override track
// selection.
if (audioTrackGroups.length > 1) {
boolean mediaAudioIsPlayable = false;
int silenceAudioTrackGroupIndex = C.INDEX_UNSET;
for (int i = 0; i < audioTrackGroups.length; i++) {
if (audioTrackGroups.get(i).id.startsWith(SILENCE_AUDIO_TRACK_GROUP_ID)) {
silenceAudioTrackGroupIndex = i;
continue;
}
// For non-silence tracks
for (int j = 0; j < audioTrackGroups.get(i).length; j++) {
mediaAudioIsPlayable |=
RendererCapabilities.getFormatSupport(
rendererFormatSupports[audioRenderIndex][i][j])
== C.FORMAT_HANDLED;
}
}
checkState(audioRenderIndex != C.INDEX_UNSET);
checkState(silenceAudioTrackGroupIndex != C.INDEX_UNSET);
TrackGroupArray audioTrackGroups = mappedTrackInfo.getTrackGroups(audioRenderIndex);
// If there's only one audio TrackGroup, it'll be silence, there's no need to override track
// selection.
if (audioTrackGroups.length > 1) {
boolean mediaAudioIsPlayable = false;
int silenceAudioTrackGroupIndex = C.INDEX_UNSET;
for (int i = 0; i < audioTrackGroups.length; i++) {
if (audioTrackGroups.get(i).id.startsWith(SILENCE_AUDIO_TRACK_GROUP_ID)) {
silenceAudioTrackGroupIndex = i;
continue;
}
// For non-silence tracks
for (int j = 0; j < audioTrackGroups.get(i).length; j++) {
mediaAudioIsPlayable |=
RendererCapabilities.getFormatSupport(
rendererFormatSupports[audioRenderIndex][i][j])
== C.FORMAT_HANDLED;
}
}
checkState(silenceAudioTrackGroupIndex != C.INDEX_UNSET);
if (mediaAudioIsPlayable) {
// Disable silence if the media's audio track is playable.
int silenceAudioTrackIndex = audioTrackGroups.length - 1;
rendererFormatSupports[audioRenderIndex][silenceAudioTrackIndex][0] =
RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
}
if (mediaAudioIsPlayable) {
// Disable silence if the media's audio track is playable.
int silenceAudioTrackIndex = audioTrackGroups.length - 1;
rendererFormatSupports[audioRenderIndex][silenceAudioTrackIndex][0] =
RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
}
}
@ -1171,15 +1214,19 @@ public final class CompositionPlayer extends SimpleBasePlayer
Parameters params,
@Nullable String selectedAudioLanguage)
throws ExoPlaybackException {
@Nullable
Pair<ExoTrackSelection.Definition, Integer> trackSelection =
super.selectVideoTrack(
mappedTrackInfo,
rendererFormatSupports,
mixedMimeTypeSupports,
params,
selectedAudioLanguage);
if (disableVideoPlayback) {
return null;
trackSelection = null;
}
return super.selectVideoTrack(
mappedTrackInfo,
rendererFormatSupports,
mixedMimeTypeSupports,
params,
selectedAudioLanguage);
onVideoTrackSelection(/* selected= */ trackSelection != null, sequenceIndex);
return trackSelection;
}
@Nullable
@ -1189,10 +1236,15 @@ public final class CompositionPlayer extends SimpleBasePlayer
@RendererCapabilities.Capabilities int[][][] rendererFormatSupports,
Parameters params)
throws ExoPlaybackException {
@Nullable
Pair<ExoTrackSelection.Definition, Integer> trackSelection =
super.selectImageTrack(mappedTrackInfo, rendererFormatSupports, params);
if (disableVideoPlayback) {
return null;
trackSelection = null;
}
return super.selectImageTrack(mappedTrackInfo, rendererFormatSupports, params);
// Images are treated as video tracks.
onVideoTrackSelection(/* selected= */ trackSelection != null, sequenceIndex);
return trackSelection;
}
}
}

View File

@ -663,13 +663,13 @@ public final class ExperimentalFrameExtractor {
@CallSuper
@Override
protected void onReadyToInitializeCodec(MediaCodecInfo codecInfo, Format format)
protected boolean maybeInitializeProcessingPipeline(MediaCodecInfo codecInfo, Format format)
throws ExoPlaybackException {
if (isTransferHdr(format.colorInfo) && toneMapHdrToSdr) {
// Setting the VideoSink format to SDR_BT709_LIMITED tone maps to SDR.
format = format.buildUpon().setColorInfo(SDR_BT709_LIMITED).build();
}
super.onReadyToInitializeCodec(codecInfo, format);
return super.maybeInitializeProcessingPipeline(codecInfo, format);
}
@Override

View File

@ -89,22 +89,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
requestToneMapping);
}
/** Creates a renderers factory that for a player that will only play audio. */
public static SequenceRenderersFactory createForAudio(
Context context,
EditedMediaItemSequence sequence,
PlaybackAudioGraphWrapper playbackAudioGraphWrapper,
int inputIndex) {
return new SequenceRenderersFactory(
context,
sequence,
playbackAudioGraphWrapper,
/* videoSink= */ null,
/* imageDecoderFactory= */ null,
inputIndex,
/* requestToneMapping= */ false);
}
private SequenceRenderersFactory(
Context context,
EditedMediaItemSequence sequence,
@ -410,15 +394,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
super.onEnabled(joining, mayRenderStartOfStream);
this.mayRenderStartOfStream = mayRenderStartOfStream;
videoSink.onRendererEnabled(mayRenderStartOfStream);
if (!videoSink.isInitialized()) {
Format format = new Format.Builder().build();
try {
videoSink.initialize(format);
} catch (VideoSink.VideoSinkException e) {
throw createRendererException(
e, format, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
}
// TODO - b/328444280: Do not set a listener on VideoSink, but MediaCodecVideoRenderer must
// unregister itself as a listener too.
videoSink.setListener(VideoSink.Listener.NO_OP, /* executor= */ (runnable) -> {});
@ -471,6 +446,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoSink.onRendererStarted();
}
@Override
protected boolean maybeInitializeProcessingPipeline() throws ExoPlaybackException {
if (videoSink.isInitialized()) {
return true;
}
Format format = new Format.Builder().build();
try {
return videoSink.initialize(format);
} catch (VideoSink.VideoSinkException e) {
throw createRendererException(
e, format, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
}
@Override
protected void onStopped() {
super.onStopped();
@ -503,6 +492,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
pendingExoPlaybackException = null;
throw exoPlaybackException;
}
super.render(positionUs, elapsedRealtimeUs);
try {
videoSink.render(positionUs, elapsedRealtimeUs);