mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Merge pull request #1055 from AradiPatrik:z-order-fix
PiperOrigin-RevId: 631616905
This commit is contained in:
commit
c5964d197c
@ -62,6 +62,8 @@
|
|||||||
* Change default SDR color working space from linear colors to electrical
|
* Change default SDR color working space from linear colors to electrical
|
||||||
BT 709 SDR video. Also provides third option to retain the original
|
BT 709 SDR video. Also provides third option to retain the original
|
||||||
colorspace.
|
colorspace.
|
||||||
|
* Allow defining indeterminate z-order of EditedMediaItemSequences
|
||||||
|
([#1055](https://github.com/androidx/media/pull/1055)).
|
||||||
* Muxers:
|
* Muxers:
|
||||||
* IMA extension:
|
* IMA extension:
|
||||||
* Promote API that is required for apps to play
|
* Promote API that is required for apps to play
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
package androidx.media3.common;
|
package androidx.media3.common;
|
||||||
|
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.util.UnstableApi;
|
import androidx.media3.common.util.UnstableApi;
|
||||||
|
|
||||||
@ -73,19 +74,22 @@ public interface VideoGraph {
|
|||||||
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
|
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
|
||||||
* called.
|
* called.
|
||||||
*
|
*
|
||||||
|
* <p>All inputs must be registered before rendering frames to the underlying {@link
|
||||||
|
* #getProcessor(int) VideoFrameProcessor}.
|
||||||
|
*
|
||||||
* <p>If the method throws, the caller must call {@link #release}.
|
* <p>If the method throws, the caller must call {@link #release}.
|
||||||
*
|
*
|
||||||
* @return The id of the registered input, which can be used to get the underlying {@link
|
* @param inputIndex The index of the input which could be used to order the inputs. The index
|
||||||
* VideoFrameProcessor} via {@link #getProcessor(int)}.
|
* must start from 0.
|
||||||
*/
|
*/
|
||||||
int registerInput() throws VideoFrameProcessingException;
|
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
|
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
|
||||||
* {@link #registerInput()}. If the {@code inputId} is not {@linkplain #registerInput()
|
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain #registerInput(int)
|
||||||
* registered} before, this method will throw an {@link IllegalStateException}.
|
* registered} before, this method will throw an {@link IllegalStateException}.
|
||||||
*/
|
*/
|
||||||
VideoFrameProcessor getProcessor(int inputId);
|
VideoFrameProcessor getProcessor(int inputIndex);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the output surface and supporting information.
|
* Sets the output surface and supporting information.
|
||||||
|
@ -18,6 +18,7 @@ package androidx.media3.effect;
|
|||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
|
import static androidx.media3.common.util.Util.contains;
|
||||||
import static java.lang.Math.abs;
|
import static java.lang.Math.abs;
|
||||||
import static java.lang.Math.max;
|
import static java.lang.Math.max;
|
||||||
|
|
||||||
@ -26,6 +27,7 @@ import android.opengl.EGLContext;
|
|||||||
import android.opengl.EGLDisplay;
|
import android.opengl.EGLDisplay;
|
||||||
import android.opengl.EGLSurface;
|
import android.opengl.EGLSurface;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
|
import android.util.SparseArray;
|
||||||
import androidx.annotation.GuardedBy;
|
import androidx.annotation.GuardedBy;
|
||||||
import androidx.annotation.IntRange;
|
import androidx.annotation.IntRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
@ -45,7 +47,6 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayDeque;
|
import java.util.ArrayDeque;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Queue;
|
import java.util.Queue;
|
||||||
@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
|
|
||||||
private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
|
private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
|
||||||
private static final String TAG = "DefaultVideoCompositor";
|
private static final String TAG = "DefaultVideoCompositor";
|
||||||
private static final int PRIMARY_INPUT_ID = 0;
|
// TODO: b/338579287: Use the first registered index instead of a constant value.
|
||||||
|
private static final int PRIMARY_INPUT_INDEX = 0;
|
||||||
|
|
||||||
private final VideoCompositor.Listener listener;
|
private final VideoCompositor.Listener listener;
|
||||||
private final GlTextureProducer.Listener textureOutputListener;
|
private final GlTextureProducer.Listener textureOutputListener;
|
||||||
@ -88,7 +90,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||||
|
|
||||||
@GuardedBy("this")
|
@GuardedBy("this")
|
||||||
private final List<InputSource> inputSources;
|
private final SparseArray<InputSource> inputSources;
|
||||||
|
|
||||||
@GuardedBy("this")
|
@GuardedBy("this")
|
||||||
private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
|
private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
|
||||||
@ -124,7 +126,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
this.compositorGlProgram = new CompositorGlProgram(context);
|
this.compositorGlProgram = new CompositorGlProgram(context);
|
||||||
|
|
||||||
inputSources = new ArrayList<>();
|
inputSources = new SparseArray<>();
|
||||||
outputTexturePool =
|
outputTexturePool =
|
||||||
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
|
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
|
||||||
outputTextureTimestamps = new LongArrayQueue(textureOutputCapacity);
|
outputTextureTimestamps = new LongArrayQueue(textureOutputCapacity);
|
||||||
@ -142,25 +144,26 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized int registerInputSource() {
|
public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
|
||||||
inputSources.add(new InputSource());
|
checkState(!contains(inputSources, inputIndex));
|
||||||
return inputSources.size() - 1;
|
inputSources.put(inputIndex, new InputSource());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void signalEndOfInputSource(int inputId) {
|
public synchronized void signalEndOfInputSource(int inputIndex) {
|
||||||
inputSources.get(inputId).isInputEnded = true;
|
checkState(contains(inputSources, inputIndex));
|
||||||
|
inputSources.get(inputIndex).isInputEnded = true;
|
||||||
boolean allInputsEnded = true;
|
boolean allInputsEnded = true;
|
||||||
for (int i = 0; i < inputSources.size(); i++) {
|
for (int i = 0; i < inputSources.size(); i++) {
|
||||||
if (!inputSources.get(i).isInputEnded) {
|
if (!inputSources.valueAt(i).isInputEnded) {
|
||||||
allInputsEnded = false;
|
allInputsEnded = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.allInputsEnded = allInputsEnded;
|
this.allInputsEnded = allInputsEnded;
|
||||||
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
|
if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
|
||||||
if (inputId == PRIMARY_INPUT_ID) {
|
if (inputIndex == PRIMARY_INPUT_INDEX) {
|
||||||
releaseExcessFramesInAllSecondaryStreams();
|
releaseExcessFramesInAllSecondaryStreams();
|
||||||
}
|
}
|
||||||
if (allInputsEnded) {
|
if (allInputsEnded) {
|
||||||
@ -168,7 +171,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
|
if (inputIndex != PRIMARY_INPUT_INDEX && inputSources.get(inputIndex).frameInfos.size() == 1) {
|
||||||
// When a secondary stream ends input, composite if there was only one pending frame in the
|
// When a secondary stream ends input, composite if there was only one pending frame in the
|
||||||
// stream.
|
// stream.
|
||||||
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
|
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
|
||||||
@ -177,12 +180,13 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void queueInputTexture(
|
public synchronized void queueInputTexture(
|
||||||
int inputId,
|
int inputIndex,
|
||||||
GlTextureProducer textureProducer,
|
GlTextureProducer textureProducer,
|
||||||
GlTextureInfo inputTexture,
|
GlTextureInfo inputTexture,
|
||||||
ColorInfo colorInfo,
|
ColorInfo colorInfo,
|
||||||
long presentationTimeUs) {
|
long presentationTimeUs) {
|
||||||
InputSource inputSource = inputSources.get(inputId);
|
checkState(contains(inputSources, inputIndex));
|
||||||
|
InputSource inputSource = inputSources.get(inputIndex);
|
||||||
checkState(!inputSource.isInputEnded);
|
checkState(!inputSource.isInputEnded);
|
||||||
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
|
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
|
||||||
if (configuredColorInfo == null) {
|
if (configuredColorInfo == null) {
|
||||||
@ -196,10 +200,10 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
textureProducer,
|
textureProducer,
|
||||||
inputTexture,
|
inputTexture,
|
||||||
presentationTimeUs,
|
presentationTimeUs,
|
||||||
settings.getOverlaySettings(inputId, presentationTimeUs));
|
settings.getOverlaySettings(inputIndex, presentationTimeUs));
|
||||||
inputSource.frameInfos.add(inputFrameInfo);
|
inputSource.frameInfos.add(inputFrameInfo);
|
||||||
|
|
||||||
if (inputId == PRIMARY_INPUT_ID) {
|
if (inputIndex == PRIMARY_INPUT_INDEX) {
|
||||||
releaseExcessFramesInAllSecondaryStreams();
|
releaseExcessFramesInAllSecondaryStreams();
|
||||||
} else {
|
} else {
|
||||||
releaseExcessFramesInSecondaryStream(inputSource);
|
releaseExcessFramesInSecondaryStream(inputSource);
|
||||||
@ -225,11 +229,11 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void releaseExcessFramesInAllSecondaryStreams() {
|
private synchronized void releaseExcessFramesInAllSecondaryStreams() {
|
||||||
for (int i = 0; i < inputSources.size(); i++) {
|
for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
|
||||||
if (i == PRIMARY_INPUT_ID) {
|
if (inputIndex == PRIMARY_INPUT_INDEX) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
releaseExcessFramesInSecondaryStream(inputSources.get(i));
|
releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -241,7 +245,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
* began.
|
* began.
|
||||||
*/
|
*/
|
||||||
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
|
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
|
||||||
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
|
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
|
||||||
// If the primary stream output is ended, all secondary frames can be released.
|
// If the primary stream output is ended, all secondary frames can be released.
|
||||||
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
|
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
|
||||||
releaseFrames(
|
releaseFrames(
|
||||||
@ -292,7 +296,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID);
|
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);
|
||||||
|
|
||||||
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
|
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
|
||||||
for (int i = 0; i < framesToComposite.size(); i++) {
|
for (int i = 0; i < framesToComposite.size(); i++) {
|
||||||
@ -313,7 +317,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
textureOutputListener.onTextureRendered(
|
textureOutputListener.onTextureRendered(
|
||||||
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);
|
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);
|
||||||
|
|
||||||
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
|
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
|
||||||
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
|
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
|
||||||
releaseExcessFramesInAllSecondaryStreams();
|
releaseExcessFramesInAllSecondaryStreams();
|
||||||
|
|
||||||
@ -333,18 +337,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
if (outputTexturePool.freeTextureCount() == 0) {
|
if (outputTexturePool.freeTextureCount() == 0) {
|
||||||
return ImmutableList.of();
|
return ImmutableList.of();
|
||||||
}
|
}
|
||||||
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
|
for (int i = 0; i < inputSources.size(); i++) {
|
||||||
if (inputSources.get(inputId).frameInfos.isEmpty()) {
|
if (inputSources.valueAt(i).frameInfos.isEmpty()) {
|
||||||
return ImmutableList.of();
|
return ImmutableList.of();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
|
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
|
||||||
InputFrameInfo primaryFrameToComposite =
|
InputFrameInfo primaryFrameToComposite =
|
||||||
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
|
inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
|
||||||
framesToComposite.add(primaryFrameToComposite);
|
framesToComposite.add(primaryFrameToComposite);
|
||||||
|
|
||||||
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
|
for (int i = 0; i < inputSources.size(); i++) {
|
||||||
if (inputId == PRIMARY_INPUT_ID) {
|
if (i == PRIMARY_INPUT_INDEX) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Select the secondary streams' frame that would be composited next. The frame selected is
|
// Select the secondary streams' frame that would be composited next. The frame selected is
|
||||||
@ -353,7 +357,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
|||||||
// 2. Two or more frames, and at least one frame has timestamp greater than the target
|
// 2. Two or more frames, and at least one frame has timestamp greater than the target
|
||||||
// timestamp.
|
// timestamp.
|
||||||
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
|
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
|
||||||
InputSource secondaryInputSource = inputSources.get(inputId);
|
InputSource secondaryInputSource = inputSources.valueAt(i);
|
||||||
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
|
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
|
||||||
return ImmutableList.of();
|
return ImmutableList.of();
|
||||||
}
|
}
|
||||||
|
@ -33,6 +33,7 @@ import android.opengl.EGLContext;
|
|||||||
import android.opengl.EGLDisplay;
|
import android.opengl.EGLDisplay;
|
||||||
import android.opengl.EGLSurface;
|
import android.opengl.EGLSurface;
|
||||||
import android.util.SparseArray;
|
import android.util.SparseArray;
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
@ -75,7 +76,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
private final Executor listenerExecutor;
|
private final Executor listenerExecutor;
|
||||||
private final VideoCompositorSettings videoCompositorSettings;
|
private final VideoCompositorSettings videoCompositorSettings;
|
||||||
private final List<Effect> compositionEffects;
|
private final List<Effect> compositionEffects;
|
||||||
private final List<VideoFrameProcessor> preProcessors;
|
private final SparseArray<VideoFrameProcessor> preProcessors;
|
||||||
|
|
||||||
private final ExecutorService sharedExecutorService;
|
private final ExecutorService sharedExecutorService;
|
||||||
|
|
||||||
@ -114,7 +115,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
this.compositionEffects = new ArrayList<>(compositionEffects);
|
this.compositionEffects = new ArrayList<>(compositionEffects);
|
||||||
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
||||||
lastRenderedPresentationTimeUs = C.TIME_UNSET;
|
lastRenderedPresentationTimeUs = C.TIME_UNSET;
|
||||||
preProcessors = new ArrayList<>();
|
preProcessors = new SparseArray<>();
|
||||||
sharedExecutorService = newSingleThreadScheduledExecutor(SHARED_EXECUTOR_NAME);
|
sharedExecutorService = newSingleThreadScheduledExecutor(SHARED_EXECUTOR_NAME);
|
||||||
glObjectsProvider = new SingleContextGlObjectsProvider();
|
glObjectsProvider = new SingleContextGlObjectsProvider();
|
||||||
// TODO - b/289986435: Support injecting VideoFrameProcessor.Factory.
|
// TODO - b/289986435: Support injecting VideoFrameProcessor.Factory.
|
||||||
@ -136,7 +137,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
@Override
|
@Override
|
||||||
public void initialize() throws VideoFrameProcessingException {
|
public void initialize() throws VideoFrameProcessingException {
|
||||||
checkState(
|
checkState(
|
||||||
preProcessors.isEmpty()
|
preProcessors.size() == 0
|
||||||
&& videoCompositor == null
|
&& videoCompositor == null
|
||||||
&& compositionVideoFrameProcessor == null
|
&& compositionVideoFrameProcessor == null
|
||||||
&& !released);
|
&& !released);
|
||||||
@ -211,10 +212,10 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int registerInput() throws VideoFrameProcessingException {
|
public void registerInput(@IntRange(from = 0) int inputIndex)
|
||||||
checkStateNotNull(videoCompositor);
|
throws VideoFrameProcessingException {
|
||||||
|
checkState(!contains(preProcessors, inputIndex));
|
||||||
int videoCompositorInputId = videoCompositor.registerInputSource();
|
checkNotNull(videoCompositor).registerInputSource(inputIndex);
|
||||||
// Creating a new VideoFrameProcessor for the input.
|
// Creating a new VideoFrameProcessor for the input.
|
||||||
VideoFrameProcessor preProcessor =
|
VideoFrameProcessor preProcessor =
|
||||||
videoFrameProcessorFactory
|
videoFrameProcessorFactory
|
||||||
@ -223,7 +224,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
// Texture output to compositor.
|
// Texture output to compositor.
|
||||||
(textureProducer, texture, presentationTimeUs, syncObject) ->
|
(textureProducer, texture, presentationTimeUs, syncObject) ->
|
||||||
queuePreProcessingOutputToCompositor(
|
queuePreProcessingOutputToCompositor(
|
||||||
videoCompositorInputId, textureProducer, texture, presentationTimeUs),
|
inputIndex, textureProducer, texture, presentationTimeUs),
|
||||||
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
|
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
|
||||||
.build()
|
.build()
|
||||||
.create(
|
.create(
|
||||||
@ -254,17 +255,16 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onEnded() {
|
public void onEnded() {
|
||||||
onPreProcessingVideoFrameProcessorEnded(videoCompositorInputId);
|
onPreProcessingVideoFrameProcessorEnded(inputIndex);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
preProcessors.add(preProcessor);
|
preProcessors.put(inputIndex, preProcessor);
|
||||||
return videoCompositorInputId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public VideoFrameProcessor getProcessor(int inputId) {
|
public VideoFrameProcessor getProcessor(int inputIndex) {
|
||||||
checkState(inputId < preProcessors.size());
|
checkState(contains(preProcessors, inputIndex));
|
||||||
return preProcessors.get(inputId);
|
return preProcessors.get(inputIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -285,7 +285,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
|
|
||||||
// Needs to release the frame processors before their internal executor services are released.
|
// Needs to release the frame processors before their internal executor services are released.
|
||||||
for (int i = 0; i < preProcessors.size(); i++) {
|
for (int i = 0; i < preProcessors.size(); i++) {
|
||||||
preProcessors.get(i).release();
|
preProcessors.get(preProcessors.keyAt(i)).release();
|
||||||
}
|
}
|
||||||
preProcessors.clear();
|
preProcessors.clear();
|
||||||
|
|
||||||
|
@ -109,6 +109,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void renderOutputFrame(long renderTimeNs) {
|
public void renderOutputFrame(long renderTimeNs) {
|
||||||
getProcessor(SINGLE_INPUT_INDEX).renderOutputFrame(renderTimeNs);
|
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,11 +16,13 @@
|
|||||||
|
|
||||||
package androidx.media3.effect;
|
package androidx.media3.effect;
|
||||||
|
|
||||||
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.DebugViewProvider;
|
import androidx.media3.common.DebugViewProvider;
|
||||||
import androidx.media3.common.Effect;
|
import androidx.media3.common.Effect;
|
||||||
@ -38,9 +40,6 @@ import java.util.concurrent.Executor;
|
|||||||
@UnstableApi
|
@UnstableApi
|
||||||
public abstract class SingleInputVideoGraph implements VideoGraph {
|
public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||||
|
|
||||||
/** The ID {@link #registerInput()} returns. */
|
|
||||||
public static final int SINGLE_INPUT_INDEX = 0;
|
|
||||||
|
|
||||||
private final Context context;
|
private final Context context;
|
||||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||||
private final ColorInfo outputColorInfo;
|
private final ColorInfo outputColorInfo;
|
||||||
@ -56,6 +55,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
private boolean isEnded;
|
private boolean isEnded;
|
||||||
private boolean released;
|
private boolean released;
|
||||||
private volatile boolean hasProducedFrameWithTimestampZero;
|
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||||
|
private int inputIndex;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an instance.
|
* Creates an instance.
|
||||||
@ -86,6 +86,7 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
this.renderFramesAutomatically = renderFramesAutomatically;
|
this.renderFramesAutomatically = renderFramesAutomatically;
|
||||||
this.presentation = presentation;
|
this.presentation = presentation;
|
||||||
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
||||||
|
this.inputIndex = C.INDEX_UNSET;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -99,9 +100,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int registerInput() throws VideoFrameProcessingException {
|
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
|
||||||
checkStateNotNull(videoFrameProcessor == null && !released);
|
checkStateNotNull(videoFrameProcessor == null && !released);
|
||||||
|
checkState(this.inputIndex == C.INDEX_UNSET);
|
||||||
|
|
||||||
|
this.inputIndex = inputIndex;
|
||||||
videoFrameProcessor =
|
videoFrameProcessor =
|
||||||
videoFrameProcessorFactory.create(
|
videoFrameProcessorFactory.create(
|
||||||
context,
|
context,
|
||||||
@ -159,11 +162,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
if (outputSurfaceInfo != null) {
|
if (outputSurfaceInfo != null) {
|
||||||
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
|
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
|
||||||
}
|
}
|
||||||
return SINGLE_INPUT_INDEX;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public VideoFrameProcessor getProcessor(int inputId) {
|
public VideoFrameProcessor getProcessor(int inputIndex) {
|
||||||
|
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
|
||||||
return checkStateNotNull(videoFrameProcessor);
|
return checkStateNotNull(videoFrameProcessor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,6 +196,10 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
released = true;
|
released = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected int getInputIndex() {
|
||||||
|
return inputIndex;
|
||||||
|
}
|
||||||
|
|
||||||
protected long getInitialTimestampOffsetUs() {
|
protected long getInitialTimestampOffsetUs() {
|
||||||
return initialTimestampOffsetUs;
|
return initialTimestampOffsetUs;
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package androidx.media3.effect;
|
package androidx.media3.effect;
|
||||||
|
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.GlTextureInfo;
|
import androidx.media3.common.GlTextureInfo;
|
||||||
import androidx.media3.common.VideoFrameProcessingException;
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
@ -46,29 +47,35 @@ public interface VideoCompositor extends GlTextureProducer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registers a new input source, and returns a unique {@code inputId} corresponding to this
|
* Registers a new input source.
|
||||||
* source, to be used in {@link #queueInputTexture}.
|
*
|
||||||
|
* @param inputIndex The index of the input source which could be used to determine the order of
|
||||||
|
* the input sources. The same index should to be used in {@link #queueInputTexture}. The
|
||||||
|
* index must start from 0. All inputs must be registered before {@linkplain
|
||||||
|
* #queueInputTexture(int, GlTextureProducer, GlTextureInfo, ColorInfo, long) queueing}
|
||||||
|
* textures.
|
||||||
*/
|
*/
|
||||||
int registerInputSource();
|
void registerInputSource(@IntRange(from = 0) int inputIndex);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
|
* Signals that no more frames will come from the upstream {@link GlTextureProducer.Listener}.
|
||||||
*
|
*
|
||||||
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
|
* @param inputIndex The index of the input source.
|
||||||
*/
|
*/
|
||||||
void signalEndOfInputSource(int inputId);
|
void signalEndOfInputSource(int inputIndex);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Queues an input texture to be composited.
|
* Queues an input texture to be composited.
|
||||||
*
|
*
|
||||||
* @param inputId The identifier for an input source, returned from {@link #registerInputSource}.
|
* @param inputIndex The index of the input source, the same index used when {@linkplain
|
||||||
|
* #registerInputSource(int) registering the input source}.
|
||||||
* @param textureProducer The source from where the {@code inputTexture} is produced.
|
* @param textureProducer The source from where the {@code inputTexture} is produced.
|
||||||
* @param inputTexture The {@link GlTextureInfo} to composite.
|
* @param inputTexture The {@link GlTextureInfo} to composite.
|
||||||
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
|
* @param colorInfo The {@link ColorInfo} of {@code inputTexture}.
|
||||||
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
|
* @param presentationTimeUs The presentation time of {@code inputTexture}, in microseconds.
|
||||||
*/
|
*/
|
||||||
void queueInputTexture(
|
void queueInputTexture(
|
||||||
int inputId,
|
int inputIndex,
|
||||||
GlTextureProducer textureProducer,
|
GlTextureProducer textureProducer,
|
||||||
GlTextureInfo inputTexture,
|
GlTextureInfo inputTexture,
|
||||||
ColorInfo colorInfo,
|
ColorInfo colorInfo,
|
||||||
|
@ -419,7 +419,6 @@ public final class CompositingVideoSinkProvider
|
|||||||
outputColorInfo =
|
outputColorInfo =
|
||||||
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
|
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
|
||||||
}
|
}
|
||||||
int videoGraphInputId;
|
|
||||||
try {
|
try {
|
||||||
videoGraph =
|
videoGraph =
|
||||||
previewingVideoGraphFactory.create(
|
previewingVideoGraphFactory.create(
|
||||||
@ -435,12 +434,12 @@ public final class CompositingVideoSinkProvider
|
|||||||
Size size = currentSurfaceAndSize.second;
|
Size size = currentSurfaceAndSize.second;
|
||||||
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
|
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
|
||||||
}
|
}
|
||||||
videoGraphInputId = videoGraph.registerInput();
|
videoGraph.registerInput(/* inputIndex= */ 0);
|
||||||
} catch (VideoFrameProcessingException e) {
|
} catch (VideoFrameProcessingException e) {
|
||||||
throw new VideoSink.VideoSinkException(e, sourceFormat);
|
throw new VideoSink.VideoSinkException(e, sourceFormat);
|
||||||
}
|
}
|
||||||
state = STATE_INITIALIZED;
|
state = STATE_INITIALIZED;
|
||||||
return videoGraph.getProcessor(videoGraphInputId);
|
return videoGraph.getProcessor(/* inputIndex= */ 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isInitialized() {
|
private boolean isInitialized() {
|
||||||
|
@ -744,7 +744,8 @@ public final class DefaultVideoCompositorPixelTest {
|
|||||||
textureBitmapReader,
|
textureBitmapReader,
|
||||||
videoCompositor,
|
videoCompositor,
|
||||||
sharedExecutorService,
|
sharedExecutorService,
|
||||||
glObjectsProvider)
|
glObjectsProvider,
|
||||||
|
/* inputIndex= */ i)
|
||||||
.setEffects(effectsToApply.build())
|
.setEffects(effectsToApply.build())
|
||||||
.build();
|
.build();
|
||||||
inputVideoFrameProcessorTestRunners.add(vfpTestRunner);
|
inputVideoFrameProcessorTestRunners.add(vfpTestRunner);
|
||||||
@ -855,8 +856,9 @@ public final class DefaultVideoCompositorPixelTest {
|
|||||||
TextureBitmapReader textureBitmapReader,
|
TextureBitmapReader textureBitmapReader,
|
||||||
VideoCompositor videoCompositor,
|
VideoCompositor videoCompositor,
|
||||||
@Nullable ExecutorService executorService,
|
@Nullable ExecutorService executorService,
|
||||||
GlObjectsProvider glObjectsProvider) {
|
GlObjectsProvider glObjectsProvider,
|
||||||
int inputId = videoCompositor.registerInputSource();
|
int inputIndex) {
|
||||||
|
videoCompositor.registerInputSource(inputIndex);
|
||||||
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
|
DefaultVideoFrameProcessor.Factory.Builder defaultVideoFrameProcessorFactoryBuilder =
|
||||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||||
.setGlObjectsProvider(glObjectsProvider)
|
.setGlObjectsProvider(glObjectsProvider)
|
||||||
@ -870,7 +872,7 @@ public final class DefaultVideoCompositorPixelTest {
|
|||||||
textureBitmapReader.readBitmapUnpremultipliedAlpha(
|
textureBitmapReader.readBitmapUnpremultipliedAlpha(
|
||||||
outputTexture, presentationTimeUs);
|
outputTexture, presentationTimeUs);
|
||||||
videoCompositor.queueInputTexture(
|
videoCompositor.queueInputTexture(
|
||||||
inputId,
|
inputIndex,
|
||||||
outputTextureProducer,
|
outputTextureProducer,
|
||||||
outputTexture,
|
outputTexture,
|
||||||
ColorInfo.SRGB_BT709_FULL,
|
ColorInfo.SRGB_BT709_FULL,
|
||||||
@ -884,7 +886,7 @@ public final class DefaultVideoCompositorPixelTest {
|
|||||||
.setTestId(testId)
|
.setTestId(testId)
|
||||||
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
|
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactoryBuilder.build())
|
||||||
.setBitmapReader(textureBitmapReader)
|
.setBitmapReader(textureBitmapReader)
|
||||||
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputId));
|
.setOnEndedListener(() -> videoCompositor.signalEndOfInputSource(inputIndex));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format)
|
public AudioGraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
|
||||||
throws ExportException {
|
throws ExportException {
|
||||||
if (!returnedFirstInput) {
|
if (!returnedFirstInput) {
|
||||||
// First input initialized in constructor because output AudioFormat is needed.
|
// First input initialized in constructor because output AudioFormat is needed.
|
||||||
|
@ -129,7 +129,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput getInput(EditedMediaItem item, Format format) {
|
public GraphInput getInput(EditedMediaItem item, Format format, int inputIndex) {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,10 +64,11 @@ import java.util.List;
|
|||||||
*
|
*
|
||||||
* @param editedMediaItem The initial {@link EditedMediaItem} of the input.
|
* @param editedMediaItem The initial {@link EditedMediaItem} of the input.
|
||||||
* @param format The initial {@link Format} of the input.
|
* @param format The initial {@link Format} of the input.
|
||||||
|
* @param inputIndex The index of the input.
|
||||||
* @throws ExportException If an error occurs getting the input.
|
* @throws ExportException If an error occurs getting the input.
|
||||||
*/
|
*/
|
||||||
public abstract GraphInput getInput(EditedMediaItem editedMediaItem, Format format)
|
public abstract GraphInput getInput(
|
||||||
throws ExportException;
|
EditedMediaItem editedMediaItem, Format format, int inputIndex) throws ExportException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes the input data and returns whether it may be possible to process more data by calling
|
* Processes the input data and returns whether it may be possible to process more data by calling
|
||||||
|
@ -637,7 +637,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
GraphInput sampleExporterInput =
|
GraphInput sampleExporterInput =
|
||||||
sampleExporter.getInput(firstEditedMediaItem, assetLoaderOutputFormat);
|
sampleExporter.getInput(firstEditedMediaItem, assetLoaderOutputFormat, sequenceIndex);
|
||||||
OnMediaItemChangedListener onMediaItemChangedListener =
|
OnMediaItemChangedListener onMediaItemChangedListener =
|
||||||
(editedMediaItem, durationUs, decodedFormat, isLast) -> {
|
(editedMediaItem, durationUs, decodedFormat, isLast) -> {
|
||||||
onMediaItemChanged(trackType, durationUs, isLast);
|
onMediaItemChanged(trackType, durationUs, isLast);
|
||||||
|
@ -79,9 +79,9 @@ import java.util.concurrent.Executor;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput createInput() throws VideoFrameProcessingException {
|
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
|
||||||
int inputId = registerInput();
|
registerInput(inputIndex);
|
||||||
return new VideoFrameProcessingWrapper(
|
return new VideoFrameProcessingWrapper(
|
||||||
getProcessor(inputId), /* presentation= */ null, getInitialTimestampOffsetUs());
|
getProcessor(inputIndex), /* presentation= */ null, getInitialTimestampOffsetUs());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,12 +106,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput createInput() throws VideoFrameProcessingException {
|
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
|
||||||
checkState(videoFrameProcessingWrapper == null);
|
checkState(videoFrameProcessingWrapper == null);
|
||||||
int inputId = registerInput();
|
registerInput(inputIndex);
|
||||||
videoFrameProcessingWrapper =
|
videoFrameProcessingWrapper =
|
||||||
new VideoFrameProcessingWrapper(
|
new VideoFrameProcessingWrapper(
|
||||||
getProcessor(inputId), getPresentation(), getInitialTimestampOffsetUs());
|
getProcessor(inputIndex), getPresentation(), getInitialTimestampOffsetUs());
|
||||||
return videoFrameProcessingWrapper;
|
return videoFrameProcessingWrapper;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,6 +69,8 @@ import java.util.concurrent.Executor;
|
|||||||
* <p>This method must called exactly once for every input stream.
|
* <p>This method must called exactly once for every input stream.
|
||||||
*
|
*
|
||||||
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
|
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
|
||||||
|
*
|
||||||
|
* @param inputIndex The index of the input, which could be used to order the inputs.
|
||||||
*/
|
*/
|
||||||
GraphInput createInput() throws VideoFrameProcessingException;
|
GraphInput createInput(int inputIndex) throws VideoFrameProcessingException;
|
||||||
}
|
}
|
||||||
|
@ -33,6 +33,7 @@ import android.media.MediaCodec;
|
|||||||
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecInfo;
|
||||||
import android.util.Pair;
|
import android.util.Pair;
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.annotation.VisibleForTesting;
|
import androidx.annotation.VisibleForTesting;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -153,10 +154,10 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format)
|
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format, int inputIndex)
|
||||||
throws ExportException {
|
throws ExportException {
|
||||||
try {
|
try {
|
||||||
return videoGraph.createInput();
|
return videoGraph.createInput(inputIndex);
|
||||||
} catch (VideoFrameProcessingException e) {
|
} catch (VideoFrameProcessingException e) {
|
||||||
throw ExportException.createForVideoFrameProcessingException(e);
|
throw ExportException.createForVideoFrameProcessingException(e);
|
||||||
}
|
}
|
||||||
@ -540,18 +541,19 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int registerInput() throws VideoFrameProcessingException {
|
public void registerInput(@IntRange(from = 0) int inputIndex)
|
||||||
return videoGraph.registerInput();
|
throws VideoFrameProcessingException {
|
||||||
|
videoGraph.registerInput(inputIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public VideoFrameProcessor getProcessor(int inputId) {
|
public VideoFrameProcessor getProcessor(int inputIndex) {
|
||||||
return videoGraph.getProcessor(inputId);
|
return videoGraph.getProcessor(inputIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput createInput() throws VideoFrameProcessingException {
|
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
|
||||||
return videoGraph.createInput();
|
return videoGraph.createInput(inputIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
Loading…
x
Reference in New Issue
Block a user