Fix review comment

This commit is contained in:
Luyuan Chen 2024-03-05 14:05:27 +00:00
parent 0403e5881d
commit 766ff44a2c
15 changed files with 108 additions and 83 deletions

View File

@ -16,6 +16,7 @@
package androidx.media3.common; package androidx.media3.common;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -73,20 +74,23 @@ public interface VideoGraph {
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is * <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
* called. * called.
* *
* <p>All inputs must be registered before rendering frames to the underlying
* {@link #getProcessor(int) VideoFrameProcessor}.
*
* <p>If the method throws, the caller must call {@link #release}. * <p>If the method throws, the caller must call {@link #release}.
* *
* @param sequenceIndex The sequence index of the input which can aid ordering of the inputs. The * @param inputIndex The index of the input which could be used to order the inputs.
* index must start from 0. * The index must start from 0.
*/ */
void registerInput(int sequenceIndex) throws VideoFrameProcessingException; void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;
/** /**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via * Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput(int)}. If the {@code sequenceIndex} is not {@linkplain * {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain
* #registerInput(int) registered} before, this method will throw an {@link * #registerInput(int) registered} before, this method will throw an {@link
* IllegalStateException}. * IllegalStateException}.
*/ */
VideoFrameProcessor getProcessor(int sequenceIndex); VideoFrameProcessor getProcessor(int inputIndex);
/** /**
* Sets the output surface and supporting information. * Sets the output surface and supporting information.

View File

@ -18,6 +18,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static java.lang.Math.abs; import static java.lang.Math.abs;
import static java.lang.Math.max; import static java.lang.Math.max;
@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {
private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread"; private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor"; private static final String TAG = "DefaultVideoCompositor";
private static final int PRIMARY_INPUT_ID = 0; // TODO: b/338579287: Use the first registered index instead of a constant value.
private static final int PRIMARY_INPUT_INDEX = 0;
private final VideoCompositor.Listener listener; private final VideoCompositor.Listener listener;
private final GlTextureProducer.Listener textureOutputListener; private final GlTextureProducer.Listener textureOutputListener;
@ -142,24 +144,26 @@ public final class DefaultVideoCompositor implements VideoCompositor {
} }
@Override @Override
public synchronized void registerInputSource(int sequenceIndex) { public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
inputSources.put(sequenceIndex, new InputSource()); checkState(!contains(inputSources, inputIndex));
inputSources.put(inputIndex, new InputSource());
} }
@Override @Override
public synchronized void signalEndOfInputSource(int inputId) { public synchronized void signalEndOfInputSource(int inputIndex) {
inputSources.get(inputId).isInputEnded = true; checkState(contains(inputSources, inputIndex));
inputSources.get(inputIndex).isInputEnded = true;
boolean allInputsEnded = true; boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) { for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(inputSources.keyAt(i)).isInputEnded) { if (!inputSources.valueAt(i).isInputEnded) {
allInputsEnded = false; allInputsEnded = false;
break; break;
} }
} }
this.allInputsEnded = allInputsEnded; this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) { if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) { if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams(); releaseExcessFramesInAllSecondaryStreams();
} }
if (allInputsEnded) { if (allInputsEnded) {
@ -167,7 +171,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {
return; return;
} }
} }
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) { if (inputIndex != PRIMARY_INPUT_INDEX
&& inputSources.get(inputIndex).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the // When a secondary stream ends input, composite if there was only one pending frame in the
// stream. // stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite); videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
@ -176,12 +181,13 @@ public final class DefaultVideoCompositor implements VideoCompositor {
@Override @Override
public synchronized void queueInputTexture( public synchronized void queueInputTexture(
int inputId, int inputIndex,
GlTextureProducer textureProducer, GlTextureProducer textureProducer,
GlTextureInfo inputTexture, GlTextureInfo inputTexture,
ColorInfo colorInfo, ColorInfo colorInfo,
long presentationTimeUs) { long presentationTimeUs) {
InputSource inputSource = inputSources.get(inputId); checkState(contains(inputSources, inputIndex));
InputSource inputSource = inputSources.get(inputIndex);
checkState(!inputSource.isInputEnded); checkState(!inputSource.isInputEnded);
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported."); checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
if (configuredColorInfo == null) { if (configuredColorInfo == null) {
@ -195,10 +201,10 @@ public final class DefaultVideoCompositor implements VideoCompositor {
textureProducer, textureProducer,
inputTexture, inputTexture,
presentationTimeUs, presentationTimeUs,
settings.getOverlaySettings(inputId, presentationTimeUs)); settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo); inputSource.frameInfos.add(inputFrameInfo);
if (inputId == PRIMARY_INPUT_ID) { if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams(); releaseExcessFramesInAllSecondaryStreams();
} else { } else {
releaseExcessFramesInSecondaryStream(inputSource); releaseExcessFramesInSecondaryStream(inputSource);
@ -224,11 +230,11 @@ public final class DefaultVideoCompositor implements VideoCompositor {
} }
private synchronized void releaseExcessFramesInAllSecondaryStreams() { private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) { for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
if (i == PRIMARY_INPUT_ID) { if (inputIndex == PRIMARY_INPUT_INDEX) {
continue; continue;
} }
releaseExcessFramesInSecondaryStream(inputSources.get(inputSources.keyAt(i))); releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
} }
} }
@ -240,7 +246,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
* began. * began.
*/ */
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) { private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID); InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
// If the primary stream output is ended, all secondary frames can be released. // If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) { if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames( releaseFrames(
@ -291,7 +297,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
return; return;
} }
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID); InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>(); ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) { for (int i = 0; i < framesToComposite.size(); i++) {
@ -312,7 +318,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
textureOutputListener.onTextureRendered( textureOutputListener.onTextureRendered(
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject); /* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID); InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1); releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams(); releaseExcessFramesInAllSecondaryStreams();
@ -332,18 +338,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
if (outputTexturePool.freeTextureCount() == 0) { if (outputTexturePool.freeTextureCount() == 0) {
return ImmutableList.of(); return ImmutableList.of();
} }
for (int inputId = 0; inputId < inputSources.size(); inputId++) { for (int i = 0; i < inputSources.size(); i++) {
if (inputSources.get(inputSources.keyAt(inputId)).frameInfos.isEmpty()) { if (inputSources.valueAt(i).frameInfos.isEmpty()) {
return ImmutableList.of(); return ImmutableList.of();
} }
} }
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>(); ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite = InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element(); inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
framesToComposite.add(primaryFrameToComposite); framesToComposite.add(primaryFrameToComposite);
for (int inputId = 0; inputId < inputSources.size(); inputId++) { for (int i = 0; i < inputSources.size(); i++) {
if (inputId == PRIMARY_INPUT_ID) { if (i == PRIMARY_INPUT_INDEX) {
continue; continue;
} }
// Select the secondary streams' frame that would be composited next. The frame selected is // Select the secondary streams' frame that would be composited next. The frame selected is
@ -352,7 +358,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
// 2. Two or more frames, and at least one frame has timestamp greater than the target // 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp. // timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary. // The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputSources.keyAt(inputId)); InputSource secondaryInputSource = inputSources.valueAt(i);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) { if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of(); return ImmutableList.of();
} }

View File

@ -33,6 +33,7 @@ import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import android.util.SparseArray; import android.util.SparseArray;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
@ -211,9 +212,10 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
} }
@Override @Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException { public void registerInput(@IntRange(from = 0) int inputIndex)
checkStateNotNull(videoCompositor); throws VideoFrameProcessingException {
videoCompositor.registerInputSource(sequenceIndex); checkState(!contains(preProcessors, inputIndex));
checkNotNull(videoCompositor).registerInputSource(inputIndex);
// Creating a new VideoFrameProcessor for the input. // Creating a new VideoFrameProcessor for the input.
VideoFrameProcessor preProcessor = VideoFrameProcessor preProcessor =
videoFrameProcessorFactory videoFrameProcessorFactory
@ -222,7 +224,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
// Texture output to compositor. // Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) -> (textureProducer, texture, presentationTimeUs, syncObject) ->
queuePreProcessingOutputToCompositor( queuePreProcessingOutputToCompositor(
sequenceIndex, textureProducer, texture, presentationTimeUs), inputIndex, textureProducer, texture, presentationTimeUs),
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY) PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build() .build()
.create( .create(
@ -253,16 +255,16 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
@Override @Override
public void onEnded() { public void onEnded() {
onPreProcessingVideoFrameProcessorEnded(sequenceIndex); onPreProcessingVideoFrameProcessorEnded(inputIndex);
} }
}); });
preProcessors.put(sequenceIndex, preProcessor); preProcessors.put(inputIndex, preProcessor);
} }
@Override @Override
public VideoFrameProcessor getProcessor(int sequenceIndex) { public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, sequenceIndex)); checkState(contains(preProcessors, inputIndex));
return preProcessors.get(sequenceIndex); return preProcessors.get(inputIndex);
} }
@Override @Override

View File

@ -109,6 +109,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
@Override @Override
public void renderOutputFrame(long renderTimeNs) { public void renderOutputFrame(long renderTimeNs) {
getProcessor(SINGLE_INPUT_INDEX).renderOutputFrame(renderTimeNs); getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
} }
} }

View File

@ -16,11 +16,13 @@
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context; import android.content.Context;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
@ -38,9 +40,6 @@ import java.util.concurrent.Executor;
@UnstableApi @UnstableApi
public abstract class SingleInputVideoGraph implements VideoGraph { public abstract class SingleInputVideoGraph implements VideoGraph {
/** The index of the only {@linkplain #registerInput(int) registered} input. */
public static final int SINGLE_INPUT_INDEX = 0;
private final Context context; private final Context context;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory; private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final ColorInfo outputColorInfo; private final ColorInfo outputColorInfo;
@ -56,6 +55,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private boolean isEnded; private boolean isEnded;
private boolean released; private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero; private volatile boolean hasProducedFrameWithTimestampZero;
private int inputIndex;
/** /**
* Creates an instance. * Creates an instance.
@ -86,6 +86,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
this.renderFramesAutomatically = renderFramesAutomatically; this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation; this.presentation = presentation;
this.initialTimestampOffsetUs = initialTimestampOffsetUs; this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET;
} }
/** /**
@ -99,9 +100,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
} }
@Override @Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException { public void registerInput(int inputIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessor == null && !released); checkStateNotNull(videoFrameProcessor == null && !released);
checkState(this.inputIndex == C.INDEX_UNSET);
this.inputIndex = inputIndex;
videoFrameProcessor = videoFrameProcessor =
videoFrameProcessorFactory.create( videoFrameProcessorFactory.create(
context, context,
@ -162,7 +165,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
} }
@Override @Override
public VideoFrameProcessor getProcessor(int sequenceIndex) { public VideoFrameProcessor getProcessor(int inputIndex) {
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
return checkStateNotNull(videoFrameProcessor); return checkStateNotNull(videoFrameProcessor);
} }
@ -192,6 +196,10 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
released = true; released = true;
} }
protected int getInputIndex() {
return inputIndex;
}
protected long getInitialTimestampOffsetUs() { protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs; return initialTimestampOffsetUs;
} }

View File

@ -15,6 +15,7 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import androidx.annotation.IntRange;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
@ -48,29 +49,33 @@ public interface VideoCompositor extends GlTextureProducer {
/** /**
* Registers a new input source. * Registers a new input source.
* *
* @param sequenceIndex The sequence index of the input source which is used to determine the * @param inputIndex The index of the input source which could be used to determine the order of
* order of the input sources. The same index should to be used in {@link #queueInputTexture}. * the input sources. The same index should to be used in {@link #queueInputTexture}. The
* index must start from 0. All inputs must be registered before
* {@linkplain #queueInputTexture(int, GlTextureProducer, GlTextureInfo, ColorInfo, long) queueing}
* textures.
*/ */
void registerInputSource(int sequenceIndex); void registerInputSource(@IntRange(from = 0) int inputIndex);
/** /**
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}. * Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
* *
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}. * @param inputIndex The index of the input source.
*/ */
void signalEndOfInputSource(int inputId); void signalEndOfInputSource(int inputIndex);
/** /**
* Queues an input texture to be composited. * Queues an input texture to be composited.
* *
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}. * @param inputIndex The index of the input source, the same index used when {@linkplain
* #registerInputSource(int) registering the input source}.
* @param textureProducer The source from where the {@code inputTexture} is produced. * @param textureProducer The source from where the {@code inputTexture} is produced.
* @param inputTexture The {@link GlTextureInfo} to composite. * @param inputTexture The {@link GlTextureInfo} to composite.
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}. * @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds. * @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
*/ */
void queueInputTexture( void queueInputTexture(
int inputId, int inputIndex,
GlTextureProducer textureProducer, GlTextureProducer textureProducer,
GlTextureInfo inputTexture, GlTextureInfo inputTexture,
ColorInfo colorInfo, ColorInfo colorInfo,

View File

@ -419,7 +419,6 @@ public final class CompositingVideoSinkProvider
outputColorInfo = outputColorInfo =
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build(); inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
} }
int videoGraphInputId;
try { try {
videoGraph = videoGraph =
previewingVideoGraphFactory.create( previewingVideoGraphFactory.create(
@ -435,12 +434,12 @@ public final class CompositingVideoSinkProvider
Size size = currentSurfaceAndSize.second; Size size = currentSurfaceAndSize.second;
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight()); maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
} }
videoGraphInputId = videoGraph.registerInput(); videoGraph.registerInput(/* inputIndex= */ 0);
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat); throw new VideoSink.VideoSinkException(e, sourceFormat);
} }
state = STATE_INITIALIZED; state = STATE_INITIALIZED;
return videoGraph.getProcessor(videoGraphInputId); return videoGraph.getProcessor(/* inputIndex= */ 0);
} }
private boolean isInitialized() { private boolean isInitialized() {
@ -550,8 +549,6 @@ public final class CompositingVideoSinkProvider
// reduces decoder timeouts, and consider restoring. // reduces decoder timeouts, and consider restoring.
videoFrameProcessorMaxPendingFrameCount = videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecDecoders(context); Util.getMaxPendingFramesCountForMediaCodecDecoders(context);
videoGraph.registerInput(/* sequenceIndex= */ 0);
videoFrameProcessor = videoGraph.getProcessor(/* sequenceIndex= */ 0);
videoEffects = new ArrayList<>(); videoEffects = new ArrayList<>();
finalBufferPresentationTimeUs = C.TIME_UNSET; finalBufferPresentationTimeUs = C.TIME_UNSET;

View File

@ -744,7 +744,8 @@ public final class DefaultVideoCompositorPixelTest {
textureBitmapReader, textureBitmapReader,
videoCompositor, videoCompositor,
sharedExecutorService, sharedExecutorService,
glObjectsProvider) glObjectsProvider,
/* inputIndex= */ i)
.setEffects(effectsToApply.build()) .setEffects(effectsToApply.build())
.build(); .build();
inputVideoFrameProcessorTestRunners.add(vfpTestRunner); inputVideoFrameProcessorTestRunners.add(vfpTestRunner);
@ -855,9 +856,9 @@ public final class DefaultVideoCompositorPixelTest {
TextureBitmapReader textureBitmapReader, TextureBitmapReader textureBitmapReader,
VideoCompositor videoCompositor, VideoCompositor videoCompositor,
@Nullable ExecutorService executorService, @Nullable ExecutorService executorService,
GlObjectsProvider glObjectsProvider) { GlObjectsProvider glObjectsProvider,
int sequenceIndex = 0; int inputIndex) {
videoCompositor.registerInputSource(sequenceIndex); videoCompositor.registerInputSource(inputIndex);
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder = DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
new DefaultVideoFrameProcessor.Factory.Builder() new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider) .setGlObjectsProvider(glObjectsProvider)
@ -871,7 +872,7 @@ public final class DefaultVideoCompositorPixelTest {
textureBitmapReader.readBitmapUnpremultipliedAlpha( textureBitmapReader.readBitmapUnpremultipliedAlpha(
outputTexture, presentationTimeUs); outputTexture, presentationTimeUs);
videoCompositor.queueInputTexture( videoCompositor.queueInputTexture(
sequenceIndex, inputIndex,
outputTextureProducer, outputTextureProducer,
outputTexture, outputTexture,
ColorInfo.SRGB_BT709_FULL, ColorInfo.SRGB_BT709_FULL,
@ -885,7 +886,7 @@ public final class DefaultVideoCompositorPixelTest {
.setTestId(testId) .setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build()) .setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setBitmapReader(textureBitmapReader) .setBitmapReader(textureBitmapReader)
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(sequenceIndex)); .setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputIndex));
} }
} }

View File

@ -97,7 +97,7 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format, int sequenceIndex) public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
throws ExportException { throws ExportException {
if (!returnedFirstInput) { if (!returnedFirstInput) {
// First input initialized in constructor because output AudioFormat is needed. // First input initialized in constructor because output AudioFormat is needed.

View File

@ -129,7 +129,7 @@ import java.util.concurrent.atomic.AtomicLong;
} }
@Override @Override
public GraphInput getInput(EditedMediaItem item, Format format, int sequenceIndex) { public GraphInput getInput(EditedMediaItem item, Format format, int inputIndex) {
return this; return this;
} }

View File

@ -64,11 +64,11 @@ import java.util.List;
* *
* @param editedMediaItem The initial {@link EditedMediaItem} of the input. * @param editedMediaItem The initial {@link EditedMediaItem} of the input.
* @param format The initial {@link Format} of the input. * @param format The initial {@link Format} of the input.
* @param sequenceIndex The index of the input sequence. * @param inputIndex The index of the input.
* @throws ExportException If an error occurs getting the input. * @throws ExportException If an error occurs getting the input.
*/ */
public abstract GraphInput getInput( public abstract GraphInput getInput(
EditedMediaItem editedMediaItem, Format format, int sequenceIndex) throws ExportException; EditedMediaItem editedMediaItem, Format format, int inputIndex) throws ExportException;
/** /**
* Processes the input data and returns whether it may be possible to process more data by calling * Processes the input data and returns whether it may be possible to process more data by calling

View File

@ -79,9 +79,9 @@ import java.util.concurrent.Executor;
} }
@Override @Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException { public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
registerInput(sequenceIndex); registerInput(inputIndex);
return new VideoFrameProcessingWrapper( return new VideoFrameProcessingWrapper(
getProcessor(sequenceIndex), /* presentation= */ null, getInitialTimestampOffsetUs()); getProcessor(inputIndex), /* presentation= */ null, getInitialTimestampOffsetUs());
} }
} }

View File

@ -106,12 +106,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException { public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
checkState(videoFrameProcessingWrapper == null); checkState(videoFrameProcessingWrapper == null);
registerInput(sequenceIndex); registerInput(inputIndex);
videoFrameProcessingWrapper = videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper( new VideoFrameProcessingWrapper(
getProcessor(sequenceIndex), getPresentation(), getInitialTimestampOffsetUs()); getProcessor(inputIndex), getPresentation(), getInitialTimestampOffsetUs());
return videoFrameProcessingWrapper; return videoFrameProcessingWrapper;
} }
} }

View File

@ -70,7 +70,7 @@ import java.util.concurrent.Executor;
* *
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}. * <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
* *
* @param sequenceIndex The index of the input sequence, which is used to order the inputs. * @param inputIndex The index of the input, which could be used to order the inputs.
*/ */
GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException; GraphInput createInput(int inputIndex) throws VideoFrameProcessingException;
} }

View File

@ -33,6 +33,7 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.util.Pair; import android.util.Pair;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -154,10 +155,10 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format, int sequenceIndex) public GraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
throws ExportException { throws ExportException {
try { try {
return videoGraph.createInput(sequenceIndex); return videoGraph.createInput(inputIndex);
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e); throw ExportException.createForVideoFrameProcessingException(e);
} }
@ -543,18 +544,19 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException { public void registerInput(@IntRange(from = 0) int inputIndex)
videoGraph.registerInput(sequenceIndex); throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
} }
@Override @Override
public VideoFrameProcessor getProcessor(int sequenceIndex) { public VideoFrameProcessor getProcessor(int inputIndex) {
return videoGraph.getProcessor(sequenceIndex); return videoGraph.getProcessor(inputIndex);
} }
@Override @Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException { public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
return videoGraph.createInput(sequenceIndex); return videoGraph.createInput(inputIndex);
} }
@Override @Override