Fix review comment

This commit is contained in:
Luyuan Chen 2024-03-05 14:05:27 +00:00
parent 0403e5881d
commit 766ff44a2c
15 changed files with 108 additions and 83 deletions

View File

@ -16,6 +16,7 @@
package androidx.media3.common;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi;
@ -73,20 +74,23 @@ public interface VideoGraph {
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
* called.
*
* <p>All inputs must be registered before rendering frames to the underlying
* {@link #getProcessor(int) VideoFrameProcessor}.
*
* <p>If the method throws, the caller must call {@link #release}.
*
* @param sequenceIndex The sequence index of the input which can aid ordering of the inputs. The
* index must start from 0.
* @param inputIndex The index of the input which could be used to order the inputs.
* The index must start from 0.
*/
void registerInput(int sequenceIndex) throws VideoFrameProcessingException;
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;
/**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput(int)}. If the {@code sequenceIndex} is not {@linkplain
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain
* #registerInput(int) registered} before, this method will throw an {@link
* IllegalStateException}.
*/
VideoFrameProcessor getProcessor(int sequenceIndex);
VideoFrameProcessor getProcessor(int inputIndex);
/**
* Sets the output surface and supporting information.

View File

@ -18,6 +18,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static java.lang.Math.abs;
import static java.lang.Math.max;
@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {
private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor";
private static final int PRIMARY_INPUT_ID = 0;
// TODO: b/338579287: Use the first registered index instead of a constant value.
private static final int PRIMARY_INPUT_INDEX = 0;
private final VideoCompositor.Listener listener;
private final GlTextureProducer.Listener textureOutputListener;
@ -142,24 +144,26 @@ public final class DefaultVideoCompositor implements VideoCompositor {
}
@Override
public synchronized void registerInputSource(int sequenceIndex) {
inputSources.put(sequenceIndex, new InputSource());
public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
checkState(!contains(inputSources, inputIndex));
inputSources.put(inputIndex, new InputSource());
}
@Override
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
public synchronized void signalEndOfInputSource(int inputIndex) {
checkState(contains(inputSources, inputIndex));
inputSources.get(inputIndex).isInputEnded = true;
boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(inputSources.keyAt(i)).isInputEnded) {
if (!inputSources.valueAt(i).isInputEnded) {
allInputsEnded = false;
break;
}
}
this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) {
if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
}
if (allInputsEnded) {
@ -167,7 +171,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {
return;
}
}
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
if (inputIndex != PRIMARY_INPUT_INDEX
&& inputSources.get(inputIndex).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the
// stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
@ -176,12 +181,13 @@ public final class DefaultVideoCompositor implements VideoCompositor {
@Override
public synchronized void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
long presentationTimeUs) {
InputSource inputSource = inputSources.get(inputId);
checkState(contains(inputSources, inputIndex));
InputSource inputSource = inputSources.get(inputIndex);
checkState(!inputSource.isInputEnded);
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
if (configuredColorInfo == null) {
@ -195,10 +201,10 @@ public final class DefaultVideoCompositor implements VideoCompositor {
textureProducer,
inputTexture,
presentationTimeUs,
settings.getOverlaySettings(inputId, presentationTimeUs));
settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo);
if (inputId == PRIMARY_INPUT_ID) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
} else {
releaseExcessFramesInSecondaryStream(inputSource);
@ -224,11 +230,11 @@ public final class DefaultVideoCompositor implements VideoCompositor {
}
private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_ID) {
for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
continue;
}
releaseExcessFramesInSecondaryStream(inputSources.get(inputSources.keyAt(i)));
releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
}
}
@ -240,7 +246,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
* began.
*/
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
// If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames(
@ -291,7 +297,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
return;
}
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID);
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) {
@ -312,7 +318,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
textureOutputListener.onTextureRendered(
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams();
@ -332,18 +338,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
if (outputTexturePool.freeTextureCount() == 0) {
return ImmutableList.of();
}
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputSources.get(inputSources.keyAt(inputId)).frameInfos.isEmpty()) {
for (int i = 0; i < inputSources.size(); i++) {
if (inputSources.valueAt(i).frameInfos.isEmpty()) {
return ImmutableList.of();
}
}
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
framesToComposite.add(primaryFrameToComposite);
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputId == PRIMARY_INPUT_ID) {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_INDEX) {
continue;
}
// Select the secondary streams' frame that would be composited next. The frame selected is
@ -352,7 +358,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
// 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputSources.keyAt(inputId));
InputSource secondaryInputSource = inputSources.valueAt(i);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of();
}

View File

@ -33,6 +33,7 @@ import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.SparseArray;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
@ -211,9 +212,10 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
}
@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoCompositor);
videoCompositor.registerInputSource(sequenceIndex);
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
checkState(!contains(preProcessors, inputIndex));
checkNotNull(videoCompositor).registerInputSource(inputIndex);
// Creating a new VideoFrameProcessor for the input.
VideoFrameProcessor preProcessor =
videoFrameProcessorFactory
@ -222,7 +224,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
// Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) ->
queuePreProcessingOutputToCompositor(
sequenceIndex, textureProducer, texture, presentationTimeUs),
inputIndex, textureProducer, texture, presentationTimeUs),
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build()
.create(
@ -253,16 +255,16 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
@Override
public void onEnded() {
onPreProcessingVideoFrameProcessorEnded(sequenceIndex);
onPreProcessingVideoFrameProcessorEnded(inputIndex);
}
});
preProcessors.put(sequenceIndex, preProcessor);
preProcessors.put(inputIndex, preProcessor);
}
@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
checkState(contains(preProcessors, sequenceIndex));
return preProcessors.get(sequenceIndex);
public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, inputIndex));
return preProcessors.get(inputIndex);
}
@Override

View File

@ -109,6 +109,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
@Override
public void renderOutputFrame(long renderTimeNs) {
getProcessor(SINGLE_INPUT_INDEX).renderOutputFrame(renderTimeNs);
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
}
}

View File

@ -16,11 +16,13 @@
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
@ -38,9 +40,6 @@ import java.util.concurrent.Executor;
@UnstableApi
public abstract class SingleInputVideoGraph implements VideoGraph {
/** The index of the only {@linkplain #registerInput(int) registered} input. */
public static final int SINGLE_INPUT_INDEX = 0;
private final Context context;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final ColorInfo outputColorInfo;
@ -56,6 +55,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private boolean isEnded;
private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero;
private int inputIndex;
/**
* Creates an instance.
@ -86,6 +86,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET;
}
/**
@ -99,9 +100,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
}
@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessor == null && !released);
checkState(this.inputIndex == C.INDEX_UNSET);
this.inputIndex = inputIndex;
videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
@ -162,7 +165,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
}
@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
public VideoFrameProcessor getProcessor(int inputIndex) {
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
return checkStateNotNull(videoFrameProcessor);
}
@ -192,6 +196,10 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
released = true;
}
protected int getInputIndex() {
return inputIndex;
}
protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs;
}

View File

@ -15,6 +15,7 @@
*/
package androidx.media3.effect;
import androidx.annotation.IntRange;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
@ -48,29 +49,33 @@ public interface VideoCompositor extends GlTextureProducer {
/**
* Registers a new input source.
*
* @param sequenceIndex The sequence index of the input source which is used to determine the
* order of the input sources. The same index should to be used in {@link #queueInputTexture}.
* @param inputIndex The index of the input source which could be used to determine the order of
* the input sources. The same index should to be used in {@link #queueInputTexture}. The
* index must start from 0. All inputs must be registered before
* {@linkplain #queueInputTexture(int, GlTextureProducer, GlTextureInfo, ColorInfo, long) queueing}
* textures.
*/
void registerInputSource(int sequenceIndex);
void registerInputSource(@IntRange(from = 0) int inputIndex);
/**
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source.
*/
void signalEndOfInputSource(int inputId);
void signalEndOfInputSource(int inputIndex);
/**
* Queues an input texture to be composited.
*
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
* @param inputIndex The index of the input source, the same index used when {@linkplain
* #registerInputSource(int) registering the input source}.
* @param textureProducer The source from where the {@code inputTexture} is produced.
* @param inputTexture The {@link GlTextureInfo} to composite.
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
*/
void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,

View File

@ -419,7 +419,6 @@ public final class CompositingVideoSinkProvider
outputColorInfo =
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
}
int videoGraphInputId;
try {
videoGraph =
previewingVideoGraphFactory.create(
@ -435,12 +434,12 @@ public final class CompositingVideoSinkProvider
Size size = currentSurfaceAndSize.second;
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
}
videoGraphInputId = videoGraph.registerInput();
videoGraph.registerInput(/* inputIndex= */ 0);
} catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat);
}
state = STATE_INITIALIZED;
return videoGraph.getProcessor(videoGraphInputId);
return videoGraph.getProcessor(/* inputIndex= */ 0);
}
private boolean isInitialized() {
@ -550,8 +549,6 @@ public final class CompositingVideoSinkProvider
// reduces decoder timeouts, and consider restoring.
videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecDecoders(context);
videoGraph.registerInput(/* sequenceIndex= */ 0);
videoFrameProcessor = videoGraph.getProcessor(/* sequenceIndex= */ 0);
videoEffects = new ArrayList<>();
finalBufferPresentationTimeUs = C.TIME_UNSET;

View File

@ -744,7 +744,8 @@ public final class DefaultVideoCompositorPixelTest {
textureBitmapReader,
videoCompositor,
sharedExecutorService,
glObjectsProvider)
glObjectsProvider,
/* inputIndex= */ i)
.setEffects(effectsToApply.build())
.build();
inputVideoFrameProcessorTestRunners.add(vfpTestRunner);
@ -855,9 +856,9 @@ public final class DefaultVideoCompositorPixelTest {
TextureBitmapReader textureBitmapReader,
VideoCompositor videoCompositor,
@Nullable ExecutorService executorService,
GlObjectsProvider glObjectsProvider) {
int sequenceIndex = 0;
videoCompositor.registerInputSource(sequenceIndex);
GlObjectsProvider glObjectsProvider,
int inputIndex) {
videoCompositor.registerInputSource(inputIndex);
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider)
@ -871,7 +872,7 @@ public final class DefaultVideoCompositorPixelTest {
textureBitmapReader.readBitmapUnpremultipliedAlpha(
outputTexture, presentationTimeUs);
videoCompositor.queueInputTexture(
sequenceIndex,
inputIndex,
outputTextureProducer,
outputTexture,
ColorInfo.SRGB_BT709_FULL,
@ -885,7 +886,7 @@ public final class DefaultVideoCompositorPixelTest {
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
.setBitmapReader(textureBitmapReader)
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(sequenceIndex));
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputIndex));
}
}

View File

@ -97,7 +97,7 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format, int sequenceIndex)
public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
throws ExportException {
if (!returnedFirstInput) {
// First input initialized in constructor because output AudioFormat is needed.

View File

@ -129,7 +129,7 @@ import java.util.concurrent.atomic.AtomicLong;
}
@Override
public GraphInput getInput(EditedMediaItem item, Format format, int sequenceIndex) {
public GraphInput getInput(EditedMediaItem item, Format format, int inputIndex) {
return this;
}

View File

@ -64,11 +64,11 @@ import java.util.List;
*
* @param editedMediaItem The initial {@link EditedMediaItem} of the input.
* @param format The initial {@link Format} of the input.
* @param sequenceIndex The index of the input sequence.
* @param inputIndex The index of the input.
* @throws ExportException If an error occurs getting the input.
*/
public abstract GraphInput getInput(
EditedMediaItem editedMediaItem, Format format, int sequenceIndex) throws ExportException;
EditedMediaItem editedMediaItem, Format format, int inputIndex) throws ExportException;
/**
* Processes the input data and returns whether it may be possible to process more data by calling

View File

@ -79,9 +79,9 @@ import java.util.concurrent.Executor;
}
@Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException {
registerInput(sequenceIndex);
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
registerInput(inputIndex);
return new VideoFrameProcessingWrapper(
getProcessor(sequenceIndex), /* presentation= */ null, getInitialTimestampOffsetUs());
getProcessor(inputIndex), /* presentation= */ null, getInitialTimestampOffsetUs());
}
}

View File

@ -106,12 +106,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException {
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
checkState(videoFrameProcessingWrapper == null);
registerInput(sequenceIndex);
registerInput(inputIndex);
videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper(
getProcessor(sequenceIndex), getPresentation(), getInitialTimestampOffsetUs());
getProcessor(inputIndex), getPresentation(), getInitialTimestampOffsetUs());
return videoFrameProcessingWrapper;
}
}

View File

@ -70,7 +70,7 @@ import java.util.concurrent.Executor;
*
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
*
* @param sequenceIndex The index of the input sequence, which is used to order the inputs.
* @param inputIndex The index of the input, which could be used to order the inputs.
*/
GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException;
GraphInput createInput(int inputIndex) throws VideoFrameProcessingException;
}

View File

@ -33,6 +33,7 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.util.Pair;
import android.view.Surface;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C;
@ -154,10 +155,10 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format, int sequenceIndex)
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
throws ExportException {
try {
return videoGraph.createInput(sequenceIndex);
return videoGraph.createInput(inputIndex);
} catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e);
}
@ -543,18 +544,19 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public void registerInput(int sequenceIndex) throws VideoFrameProcessingException {
videoGraph.registerInput(sequenceIndex);
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
}
@Override
public VideoFrameProcessor getProcessor(int sequenceIndex) {
return videoGraph.getProcessor(sequenceIndex);
public VideoFrameProcessor getProcessor(int inputIndex) {
return videoGraph.getProcessor(inputIndex);
}
@Override
public GraphInput createInput(int sequenceIndex) throws VideoFrameProcessingException {
return videoGraph.createInput(sequenceIndex);
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
return videoGraph.createInput(inputIndex);
}
@Override