Use VideoGraph as top level component

...and reduce the number of classes/interfaces

PiperOrigin-RevId: 735451687
This commit is contained in:
claincly 2025-03-10 11:28:28 -07:00 committed by Copybara-Service
parent ce59680d0f
commit 44b3a43652
21 changed files with 779 additions and 1245 deletions

View File

@ -1,80 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.common;
import android.content.Context;
import androidx.media3.common.util.UnstableApi;
import java.util.List;
import java.util.concurrent.Executor;
/** A {@link VideoGraph} specific to previewing. */
@UnstableApi
public interface PreviewingVideoGraph extends VideoGraph {
/** A factory for creating a {@link PreviewingVideoGraph}. */
interface Factory {
/**
* Creates a new {@link PreviewingVideoGraph} instance.
*
* @param context A {@link Context}.
* @param outputColorInfo The {@link ColorInfo} for the output frames.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listener A {@link Listener}.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param videoCompositorSettings The {@link VideoCompositorSettings}.
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
* @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* VideoFrameProcessor}.
*/
PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException;
/**
* Returns whether the {@link VideoGraph} implementation supports {@linkplain #registerInput
* registering} multiple inputs.
*/
boolean supportsMultipleInputs();
}
/**
* Renders the oldest unrendered output frame that has become {@linkplain
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
* renderTimeNs}.
*
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
* surface}, or drop the frame, per {@code renderTimeNs}.
*
* <p>The {@code renderTimeNs} may be passed to {@link
* android.opengl.EGLExt#eglPresentationTimeANDROID} depending on the implementation.
*
* @param renderTimeNs The render time to use for the frame, in nanoseconds. The render time can
* be before or after the current system time. Use {@link
* VideoFrameProcessor#DROP_OUTPUT_FRAME} to drop the frame, or {@link
* VideoFrameProcessor#RENDER_OUTPUT_FRAME_IMMEDIATELY} to render the frame immediately.
*/
void renderOutputFrame(long renderTimeNs);
}

View File

@ -288,7 +288,7 @@ public interface VideoFrameProcessor {
* *
* <p>After registering the first input stream, this method must only be called after the last * <p>After registering the first input stream, this method must only be called after the last
* frame of the already-registered input stream has been {@linkplain #registerInputFrame * frame of the already-registered input stream has been {@linkplain #registerInputFrame
* registered}, last bitmap {@link #queueInputBitmap queued} or last texture id {@linkplain * registered}, last bitmap {@linkplain #queueInputBitmap queued} or last texture id {@linkplain
* #queueInputTexture queued}. * #queueInputTexture queued}.
* *
* <p>This method blocks the calling thread until the previous calls to this method finish, that * <p>This method blocks the calling thread until the previous calls to this method finish, that

View File

@ -16,16 +16,61 @@
package androidx.media3.common; package androidx.media3.common;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.VideoFrameProcessor.InputType;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import java.util.List;
import java.util.concurrent.Executor;
/** Represents a graph for processing raw video frames. */ /** Represents a graph for processing raw video frames. */
@UnstableApi @UnstableApi
public interface VideoGraph { public interface VideoGraph {
/** A factory for {@link VideoGraph} instances. */
interface Factory {
/**
* Creates a new {@link VideoGraph} instance.
*
* @param context A {@link Context}.
* @param outputColorInfo The {@link ColorInfo} for the output frames.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listener A {@link Listener}.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param videoCompositorSettings The {@link VideoCompositorSettings} to apply to the
* composition.
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain VideoGraph#setOutputSurfaceInfo(SurfaceInfo) output surface}
* automatically as the instance is done processing them. If {@code false}, the instance
* will block until {@code VideoGraph#renderOutputFrameWithMediaPresentationTime()} is
* called, to render the frame.
* @return A new instance.
*/
VideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically);
/**
* Returns whether the {@linkplain #create created} {@link VideoGraph} supports multiple video
* {@linkplain VideoGraph#registerInputStream inputs}.
*/
boolean supportsMultipleInputs();
}
/** Listener for video frame processing events. */ /** Listener for video frame processing events. */
@UnstableApi
interface Listener { interface Listener {
/** /**
* Called when the output size changes. * Called when the output size changes.
@ -79,11 +124,8 @@ public interface VideoGraph {
/** /**
* Registers a new input to the {@code VideoGraph}. * Registers a new input to the {@code VideoGraph}.
* *
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is * <p>All inputs must be registered before rendering frames by calling {@link
* called. * #registerInputFrame}, {@link #queueInputBitmap} or {@link #queueInputTexture}.
*
* <p>All inputs must be registered before rendering frames to the underlying {@link
* #getProcessor(int) VideoFrameProcessor}.
* *
* <p>If the method throws, the caller must call {@link #release}. * <p>If the method throws, the caller must call {@link #release}.
* *
@ -92,13 +134,6 @@ public interface VideoGraph {
*/ */
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException; void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;
/**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain #registerInput(int)
* registered} before, this method will throw an {@link IllegalStateException}.
*/
VideoFrameProcessor getProcessor(int inputIndex);
/** /**
* Sets the output surface and supporting information. * Sets the output surface and supporting information.
* *
@ -116,11 +151,111 @@ public interface VideoGraph {
*/ */
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo); void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
/**
* Sets a listener that's called when the {@linkplain #getInputSurface input surface} is ready to
* use at {@code inputIndex}.
*/
void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener);
/** Returns the input {@link Surface} at {@code inputIndex}. */
Surface getInputSurface(int inputIndex);
/** Sets the {@link OnInputFrameProcessedListener} at {@code inputIndex}. */
void setOnInputFrameProcessedListener(int inputIndex, OnInputFrameProcessedListener listener);
/**
* Informs the graph that a new input stream will be queued to the graph input corresponding to
* {@code inputIndex}.
*
* <p>After registering the first input stream, this method must only be called for the same index
* after the last frame of the already-registered input stream has been {@linkplain
* #registerInputFrame registered}, last bitmap {@linkplain #queueInputBitmap queued} or last
* texture id {@linkplain #queueInputTexture queued}.
*
* <p>This method blocks the calling thread until the previous input stream corresponding to the
* same {@code inputIndex} has been fully registered internally.
*
* @param inputIndex The index of the input for which a new input stream should be registered.
* This index must start from 0.
* @param inputType The {@link InputType} of the new input stream.
* @param format The {@link Format} of the new input stream. The {@link Format#colorInfo}, the
* {@link Format#width}, the {@link Format#height} and the {@link
* Format#pixelWidthHeightRatio} must be set.
* @param effects The list of {@link Effect effects} to apply to the new input stream.
* @param offsetToAddUs The offset that must be added to the frame presentation timestamps, in
* microseconds. This offset is not part of the input timestamps. It is added to the frame
* timestamps before processing, and is retained in the output timestamps.
*/
void registerInputStream(
int inputIndex,
@InputType int inputType,
Format format,
List<Effect> effects,
long offsetToAddUs);
/**
* Returns the number of pending input frames at {@code inputIndex} that has not been processed
* yet.
*/
int getPendingInputFrameCount(int inputIndex);
/**
* Registers a new input frame at {@code inputIndex}.
*
* @see VideoFrameProcessor#registerInputFrame()
*/
boolean registerInputFrame(int inputIndex);
/**
* Queues the input {@link Bitmap} at {@code inputIndex}.
*
* @see VideoFrameProcessor#queueInputBitmap(Bitmap, TimestampIterator)
*/
boolean queueInputBitmap(int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator);
/**
* Queues the input texture at {@code inputIndex}.
*
* @see VideoFrameProcessor#queueInputTexture(int, long)
*/
boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs);
/**
* Renders the output frame from the {@code VideoGraph}.
*
* <p>This method must be called only for frames that have become {@linkplain
* Listener#onOutputFrameAvailableForRendering(long) available}, calling the method renders the
* frame that becomes available the earliest but not yet rendered.
*
* @see VideoFrameProcessor#renderOutputFrame(long)
*/
void renderOutputFrame(long renderTimeNs);
/**
* Updates an {@linkplain Listener#onOutputFrameAvailableForRendering available frame} with the
* modified effects.
*/
void redraw();
/** /**
* Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp. * Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp.
*/ */
boolean hasProducedFrameWithTimestampZero(); boolean hasProducedFrameWithTimestampZero();
/**
* Flushes the {@linkplain #registerInput inputs} of the {@code VideoGraph}.
*
* @see VideoFrameProcessor#flush()
*/
void flush();
/**
* Informs that no further inputs should be accepted at {@code inputIndex}.
*
* @see VideoFrameProcessor#signalEndOfInput()
*/
void signalEndOfInput(int inputIndex);
/** /**
* Releases the associated resources. * Releases the associated resources.
* *

View File

@ -31,10 +31,12 @@ import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_TEXTURE_RENDERE
import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import android.util.SparseArray; import android.util.SparseArray;
import android.view.Surface;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -44,6 +46,7 @@ import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider; import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
@ -51,6 +54,7 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.GlUtil.GlException; import androidx.media3.common.util.GlUtil.GlException;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.util.ArrayDeque; import java.util.ArrayDeque;
@ -64,7 +68,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** A {@link VideoGraph} that handles multiple input streams. */ /** A {@link VideoGraph} that handles multiple input streams. */
@UnstableApi @UnstableApi
public abstract class MultipleInputVideoGraph implements VideoGraph { public final class MultipleInputVideoGraph implements VideoGraph {
private static final String TAG = "MultiInputVG"; private static final String TAG = "MultiInputVG";
private static final String SHARED_EXECUTOR_NAME = "Effect:MultipleInputVideoGraph:Thread"; private static final String SHARED_EXECUTOR_NAME = "Effect:MultipleInputVideoGraph:Thread";
@ -90,7 +94,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
private final Queue<TimedGlTextureInfo> compositorOutputTextures; private final Queue<TimedGlTextureInfo> compositorOutputTextures;
private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases; private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases;
private final long initialTimestampOffsetUs;
private final boolean renderFramesAutomatically; private final boolean renderFramesAutomatically;
@Nullable private VideoFrameProcessor compositionVideoFrameProcessor; @Nullable private VideoFrameProcessor compositionVideoFrameProcessor;
@ -104,7 +107,52 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
private volatile boolean hasProducedFrameWithTimestampZero; private volatile boolean hasProducedFrameWithTimestampZero;
protected MultipleInputVideoGraph( /** A {@link VideoGraph.Factory} for {@link MultipleInputVideoGraph}. */
public static final class Factory implements VideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* A {@code Factory} for {@link MultipleInputVideoGraph} that uses a {@link
* DefaultVideoFrameProcessor.Factory}.
*/
public Factory() {
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
}
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
@Override
public MultipleInputVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return new MultipleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
renderFramesAutomatically);
}
@Override
public boolean supportsMultipleInputs() {
return true;
}
}
private MultipleInputVideoGraph(
Context context, Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@ -113,7 +161,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) { boolean renderFramesAutomatically) {
checkArgument(videoFrameProcessorFactory instanceof DefaultVideoFrameProcessor.Factory); checkArgument(videoFrameProcessorFactory instanceof DefaultVideoFrameProcessor.Factory);
this.context = context; this.context = context;
@ -123,7 +170,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
this.listenerExecutor = listenerExecutor; this.listenerExecutor = listenerExecutor;
this.videoCompositorSettings = videoCompositorSettings; this.videoCompositorSettings = videoCompositorSettings;
this.compositionEffects = new ArrayList<>(compositionEffects); this.compositionEffects = new ArrayList<>(compositionEffects);
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.renderFramesAutomatically = renderFramesAutomatically; this.renderFramesAutomatically = renderFramesAutomatically;
lastRenderedPresentationTimeUs = C.TIME_UNSET; lastRenderedPresentationTimeUs = C.TIME_UNSET;
preProcessors = new SparseArray<>(); preProcessors = new SparseArray<>();
@ -268,12 +314,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
preProcessors.put(inputIndex, preProcessor); preProcessors.put(inputIndex, preProcessor);
} }
@Override
public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, inputIndex));
return preProcessors.get(inputIndex);
}
@Override @Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
checkNotNull(compositionVideoFrameProcessor).setOutputSurfaceInfo(outputSurfaceInfo); checkNotNull(compositionVideoFrameProcessor).setOutputSurfaceInfo(outputSurfaceInfo);
@ -284,6 +324,75 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
return hasProducedFrameWithTimestampZero; return hasProducedFrameWithTimestampZero;
} }
@Override
public boolean queueInputBitmap(
int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator) {
return getProcessor(inputIndex).queueInputBitmap(inputBitmap, timestampIterator);
}
@Override
public boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs) {
return getProcessor(inputIndex).queueInputTexture(textureId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(
int inputIndex, OnInputFrameProcessedListener listener) {
getProcessor(inputIndex).setOnInputFrameProcessedListener(listener);
}
@Override
public void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener) {
getProcessor(inputIndex).setOnInputSurfaceReadyListener(listener);
}
@Override
public Surface getInputSurface(int inputIndex) {
return getProcessor(inputIndex).getInputSurface();
}
@Override
public void registerInputStream(
int inputIndex,
@VideoFrameProcessor.InputType int inputType,
Format format,
List<Effect> effects,
long offsetToAddUs) {
getProcessor(inputIndex).registerInputStream(inputType, format, effects, offsetToAddUs);
}
@Override
public boolean registerInputFrame(int inputIndex) {
return getProcessor(inputIndex).registerInputFrame();
}
@Override
public int getPendingInputFrameCount(int inputIndex) {
return getProcessor(inputIndex).getPendingInputFrameCount();
}
@Override
public void renderOutputFrame(long renderTimeNs) {
checkNotNull(compositionVideoFrameProcessor).renderOutputFrame(renderTimeNs);
}
@Override
public void redraw() {
throw new UnsupportedOperationException();
}
@Override
public void flush() {
for (int i = 0; i < preProcessors.size(); i++) {
preProcessors.get(preProcessors.keyAt(i)).flush();
}
}
@Override
public void signalEndOfInput(int inputIndex) {
getProcessor(inputIndex).signalEndOfInput();
}
@Override @Override
public void release() { public void release() {
if (released) { if (released) {
@ -327,12 +436,9 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
released = true; released = true;
} }
protected VideoFrameProcessor getCompositionVideoFrameProcessor() { private VideoFrameProcessor getProcessor(int inputIndex) {
return checkStateNotNull(compositionVideoFrameProcessor); checkState(contains(preProcessors, inputIndex));
} return preProcessors.get(inputIndex);
protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs;
} }
// This method is called on the sharedExecutorService. // This method is called on the sharedExecutorService.

View File

@ -1,104 +0,0 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.UnstableApi;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A {@linkplain PreviewingVideoGraph previewing} specific implementation of {@link
* MultipleInputVideoGraph}.
*/
@UnstableApi
public final class PreviewingMultipleInputVideoGraph extends MultipleInputVideoGraph
implements PreviewingVideoGraph {
/** A factory for creating a {@link PreviewingMultipleInputVideoGraph}. */
public static final class Factory implements PreviewingVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* Creates a new factory that uses the {@link DefaultVideoFrameProcessor.Factory} with its
* default values.
*/
public Factory() {
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory.Builder().build();
}
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return new PreviewingMultipleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return true;
}
}
private PreviewingMultipleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
/* renderFramesAutomatically= */ false);
}
@Override
public void renderOutputFrame(long renderTimeNs) {
getCompositionVideoFrameProcessor().renderOutputFrame(renderTimeNs);
}
}

View File

@ -1,110 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.UnstableApi;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A {@link PreviewingVideoGraph Previewing} specific implementation of {@link
* SingleInputVideoGraph}.
*/
@UnstableApi
public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
implements PreviewingVideoGraph {
/** A factory for creating a {@link PreviewingSingleInputVideoGraph}. */
public static final class Factory implements PreviewingVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* Creates a new factory that uses the {@link DefaultVideoFrameProcessor.Factory} with its
* default values.
*/
public Factory() {
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
}
/**
* Creates an instance that uses the supplied {@code videoFrameProcessorFactory} to create
* {@link VideoFrameProcessor} instances.
*/
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return new PreviewingSingleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
private PreviewingSingleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
long initialTimestampOffsetUs) {
super(
context,
videoFrameProcessorFactory,
outputColorInfo,
listener,
debugViewProvider,
listenerExecutor,
VideoCompositorSettings.DEFAULT,
// Previewing needs frame render timing.
/* renderFramesAutomatically= */ false,
initialTimestampOffsetUs);
}
@Override
public void renderOutputFrame(long renderTimeNs) {
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
}
}

View File

@ -16,27 +16,34 @@
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** A {@link VideoGraph} that handles one input stream. */ /** A {@link VideoGraph} that handles one input stream. */
@UnstableApi @UnstableApi
public abstract class SingleInputVideoGraph implements VideoGraph { public class SingleInputVideoGraph implements VideoGraph {
private final Context context; private final Context context;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory; private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
@ -44,8 +51,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private final Listener listener; private final Listener listener;
private final DebugViewProvider debugViewProvider; private final DebugViewProvider debugViewProvider;
private final Executor listenerExecutor; private final Executor listenerExecutor;
private final List<Effect> compositionEffects;
private final boolean renderFramesAutomatically; private final boolean renderFramesAutomatically;
private final long initialTimestampOffsetUs;
@Nullable private VideoFrameProcessor videoFrameProcessor; @Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private SurfaceInfo outputSurfaceInfo; @Nullable private SurfaceInfo outputSurfaceInfo;
@ -53,6 +60,51 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private volatile boolean hasProducedFrameWithTimestampZero; private volatile boolean hasProducedFrameWithTimestampZero;
private int inputIndex; private int inputIndex;
/** A {@link VideoGraph.Factory} for {@link SingleInputVideoGraph}. */
public static final class Factory implements VideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* A {@code Factory} for {@link SingleInputVideoGraph} that uses a {@link
* DefaultVideoFrameProcessor.Factory}.
*/
public Factory() {
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
}
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
@Override
public SingleInputVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return new SingleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
listener,
compositionEffects,
debugViewProvider,
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
/** /**
* Creates an instance. * Creates an instance.
* *
@ -63,11 +115,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
VideoFrameProcessor.Factory videoFrameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
Listener listener, Listener listener,
List<Effect> compositionEffects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically, boolean renderFramesAutomatically) {
long initialTimestampOffsetUs) {
checkState( checkState(
VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings), VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings),
"SingleInputVideoGraph does not use VideoCompositor, and therefore cannot apply" "SingleInputVideoGraph does not use VideoCompositor, and therefore cannot apply"
@ -78,8 +130,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
this.listener = listener; this.listener = listener;
this.debugViewProvider = debugViewProvider; this.debugViewProvider = debugViewProvider;
this.listenerExecutor = listenerExecutor; this.listenerExecutor = listenerExecutor;
this.compositionEffects = compositionEffects;
this.renderFramesAutomatically = renderFramesAutomatically; this.renderFramesAutomatically = renderFramesAutomatically;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET; this.inputIndex = C.INDEX_UNSET;
} }
@ -146,12 +198,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
} }
} }
@Override
public VideoFrameProcessor getProcessor(int inputIndex) {
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
return checkStateNotNull(videoFrameProcessor);
}
@Override @Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
this.outputSurfaceInfo = outputSurfaceInfo; this.outputSurfaceInfo = outputSurfaceInfo;
@ -165,6 +211,88 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
return hasProducedFrameWithTimestampZero; return hasProducedFrameWithTimestampZero;
} }
@Override
public boolean queueInputBitmap(
int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator) {
checkStateNotNull(videoFrameProcessor);
return videoFrameProcessor.queueInputBitmap(inputBitmap, timestampIterator);
}
@Override
public boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs) {
checkStateNotNull(videoFrameProcessor);
return videoFrameProcessor.queueInputTexture(textureId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(
int inputIndex, OnInputFrameProcessedListener listener) {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener) {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.setOnInputSurfaceReadyListener(listener);
}
@Override
public Surface getInputSurface(int inputIndex) {
checkStateNotNull(videoFrameProcessor);
return videoFrameProcessor.getInputSurface();
}
@Override
public void registerInputStream(
int inputIndex,
@VideoFrameProcessor.InputType int inputType,
Format format,
List<Effect> effects,
long offsetToAddUs) {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.registerInputStream(
inputType,
format,
new ImmutableList.Builder<Effect>().addAll(effects).addAll(compositionEffects).build(),
offsetToAddUs);
}
@Override
public boolean registerInputFrame(int inputIndex) {
checkStateNotNull(videoFrameProcessor);
return videoFrameProcessor.registerInputFrame();
}
@Override
public int getPendingInputFrameCount(int inputIndex) {
checkStateNotNull(videoFrameProcessor);
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public void renderOutputFrame(long renderTimeNs) {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.renderOutputFrame(renderTimeNs);
}
@Override
public void redraw() {
throw new UnsupportedOperationException();
}
@Override
public void flush() {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.flush();
}
@Override
public void signalEndOfInput(int inputIndex) {
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.signalEndOfInput();
}
@Override @Override
public void release() { public void release() {
if (released) { if (released) {
@ -177,12 +305,4 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
} }
released = true; released = true;
} }
protected int getInputIndex() {
return inputIndex;
}
protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs;
}
} }

View File

@ -71,8 +71,8 @@
} }
# Constructors and methods accessed via reflection in PlaybackVideoGraphWrapper # Constructors and methods accessed via reflection in PlaybackVideoGraphWrapper
-dontnote androidx.media3.effect.PreviewingSingleInputVideoGraph$Factory -dontnote androidx.media3.effect.SingleInputVideoGraph$Factory
-keepclasseswithmembers class androidx.media3.effect.PreviewingSingleInputVideoGraph$Factory { -keepclasseswithmembers class androidx.media3.effect.SingleInputVideoGraph$Factory {
<init>(androidx.media3.common.VideoFrameProcessor$Factory); <init>(androidx.media3.common.VideoFrameProcessor$Factory);
} }
-dontnote androidx.media3.effect.DefaultVideoFrameProcessor$Factory$Builder -dontnote androidx.media3.effect.DefaultVideoFrameProcessor$Factory$Builder

View File

@ -1278,7 +1278,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
verifyApplicationThread(); verifyApplicationThread();
try { try {
// LINT.IfChange(set_video_effects) // LINT.IfChange(set_video_effects)
Class.forName("androidx.media3.effect.PreviewingSingleInputVideoGraph$Factory") Class.forName("androidx.media3.effect.SingleInputVideoGraph$Factory")
.getConstructor(VideoFrameProcessor.Factory.class); .getConstructor(VideoFrameProcessor.Factory.class);
// LINT.ThenChange(video/PlaybackVideoGraphWrapper.java) // LINT.ThenChange(video/PlaybackVideoGraphWrapper.java)
} catch (ClassNotFoundException | NoSuchMethodException e) { } catch (ClassNotFoundException | NoSuchMethodException e) {

View File

@ -19,7 +19,6 @@ import static androidx.media3.common.VideoFrameProcessor.DROP_OUTPUT_FRAME;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.castNonNull;
import static androidx.media3.common.util.Util.contains; import static androidx.media3.common.util.Util.contains;
import static androidx.media3.common.util.Util.getMaxPendingFramesCountForMediaCodecDecoders; import static androidx.media3.common.util.Util.getMaxPendingFramesCountForMediaCodecDecoders;
import static androidx.media3.exoplayer.video.VideoSink.INPUT_TYPE_SURFACE; import static androidx.media3.exoplayer.video.VideoSink.INPUT_TYPE_SURFACE;
@ -41,7 +40,6 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
@ -67,7 +65,6 @@ import java.lang.annotation.Target;
import java.util.List; import java.util.List;
import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
@ -125,7 +122,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final VideoFrameReleaseControl videoFrameReleaseControl; private final VideoFrameReleaseControl videoFrameReleaseControl;
private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory; private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory;
private PreviewingVideoGraph.@MonotonicNonNull Factory previewingVideoGraphFactory; private VideoGraph.@MonotonicNonNull Factory videoGraphFactory;
private List<Effect> compositionEffects; private List<Effect> compositionEffects;
private VideoCompositorSettings compositorSettings; private VideoCompositorSettings compositorSettings;
private Clock clock; private Clock clock;
@ -159,18 +156,17 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
} }
/** /**
* Sets the {@link PreviewingVideoGraph.Factory} that will be used for creating {@link * Sets the {@link VideoGraph.Factory} that will be used for creating {@link VideoGraph}
* PreviewingVideoGraph} instances. * instances.
* *
* <p>By default, the {@code PreviewingSingleInputVideoGraph.Factory} will be used. * <p>By default, the {@code SingleInputVideoGraph.Factory} will be used.
* *
* @param previewingVideoGraphFactory The {@link PreviewingVideoGraph.Factory}. * @param videoGraphFactory The {@link VideoGraph.Factory}.
* @return This builder, for convenience. * @return This builder, for convenience.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
public Builder setPreviewingVideoGraphFactory( public Builder setVideoGraphFactory(VideoGraph.Factory videoGraphFactory) {
PreviewingVideoGraph.Factory previewingVideoGraphFactory) { this.videoGraphFactory = videoGraphFactory;
this.previewingVideoGraphFactory = previewingVideoGraphFactory;
return this; return this;
} }
@ -235,12 +231,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
public PlaybackVideoGraphWrapper build() { public PlaybackVideoGraphWrapper build() {
checkState(!built); checkState(!built);
if (previewingVideoGraphFactory == null) { if (videoGraphFactory == null) {
if (videoFrameProcessorFactory == null) { if (videoFrameProcessorFactory == null) {
videoFrameProcessorFactory = new ReflectiveDefaultVideoFrameProcessorFactory(); videoFrameProcessorFactory = new ReflectiveDefaultVideoFrameProcessorFactory();
} }
previewingVideoGraphFactory = videoGraphFactory = new ReflectiveSingleInputVideoGraphFactory(videoFrameProcessorFactory);
new ReflectivePreviewingSingleInputVideoGraphFactory(videoFrameProcessorFactory);
} }
PlaybackVideoGraphWrapper playbackVideoGraphWrapper = new PlaybackVideoGraphWrapper(this); PlaybackVideoGraphWrapper playbackVideoGraphWrapper = new PlaybackVideoGraphWrapper(this);
built = true; built = true;
@ -270,7 +265,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
*/ */
private final TimedValueQueue<Long> streamStartPositionsUs; private final TimedValueQueue<Long> streamStartPositionsUs;
private final PreviewingVideoGraph.Factory previewingVideoGraphFactory; private final VideoGraph.Factory videoGraphFactory;
private final SparseArray<InputVideoSink> inputVideoSinks; private final SparseArray<InputVideoSink> inputVideoSinks;
private final List<Effect> compositionEffects; private final List<Effect> compositionEffects;
private final VideoCompositorSettings compositorSettings; private final VideoCompositorSettings compositorSettings;
@ -282,7 +277,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private Format videoGraphOutputFormat; private Format videoGraphOutputFormat;
private @MonotonicNonNull HandlerWrapper handler; private @MonotonicNonNull HandlerWrapper handler;
private @MonotonicNonNull PreviewingVideoGraph videoGraph; private @MonotonicNonNull VideoGraph videoGraph;
private long outputStreamStartPositionUs; private long outputStreamStartPositionUs;
@Nullable private Pair<Surface, Size> currentSurfaceAndSize; @Nullable private Pair<Surface, Size> currentSurfaceAndSize;
private int pendingFlushCount; private int pendingFlushCount;
@ -313,7 +308,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private PlaybackVideoGraphWrapper(Builder builder) { private PlaybackVideoGraphWrapper(Builder builder) {
context = builder.context; context = builder.context;
streamStartPositionsUs = new TimedValueQueue<>(); streamStartPositionsUs = new TimedValueQueue<>();
previewingVideoGraphFactory = checkStateNotNull(builder.previewingVideoGraphFactory); videoGraphFactory = checkStateNotNull(builder.videoGraphFactory);
inputVideoSinks = new SparseArray<>(); inputVideoSinks = new SparseArray<>();
compositionEffects = builder.compositionEffects; compositionEffects = builder.compositionEffects;
compositorSettings = builder.compositorSettings; compositorSettings = builder.compositorSettings;
@ -469,8 +464,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// Internal methods // Internal methods
@Nullable private boolean registerInput(Format sourceFormat, int inputIndex)
private VideoFrameProcessor registerInput(Format sourceFormat, int inputIndex)
throws VideoSink.VideoSinkException { throws VideoSink.VideoSinkException {
if (inputIndex == PRIMARY_SEQUENCE_INDEX) { if (inputIndex == PRIMARY_SEQUENCE_INDEX) {
checkState(state == STATE_CREATED); checkState(state == STATE_CREATED);
@ -493,7 +487,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
handler = clock.createHandler(checkStateNotNull(Looper.myLooper()), /* callback= */ null); handler = clock.createHandler(checkStateNotNull(Looper.myLooper()), /* callback= */ null);
try { try {
videoGraph = videoGraph =
previewingVideoGraphFactory.create( videoGraphFactory.create(
context, context,
outputColorInfo, outputColorInfo,
DebugViewProvider.NONE, DebugViewProvider.NONE,
@ -501,12 +495,12 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
/* listenerExecutor= */ handler::post, /* listenerExecutor= */ handler::post,
compositorSettings, compositorSettings,
compositionEffects, compositionEffects,
/* initialTimestampOffsetUs= */ 0); /* initialTimestampOffsetUs= */ 0,
/* renderFramesAutomatically= */ false);
videoGraph.initialize(); videoGraph.initialize();
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw new VideoSink.VideoSinkException(e, sourceFormat); throw new VideoSink.VideoSinkException(e, sourceFormat);
} }
if (currentSurfaceAndSize != null) { if (currentSurfaceAndSize != null) {
Surface surface = currentSurfaceAndSize.first; Surface surface = currentSurfaceAndSize.first;
Size size = currentSurfaceAndSize.second; Size size = currentSurfaceAndSize.second;
@ -517,7 +511,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
} else { } else {
if (!isInitialized()) { if (!isInitialized()) {
// Make sure the primary sequence is initialized first. // Make sure the primary sequence is initialized first.
return null; return false;
} }
} }
@ -529,7 +523,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
registeredVideoInputCount++; registeredVideoInputCount++;
defaultVideoSink.setListener( defaultVideoSink.setListener(
new DefaultVideoSinkListener(), /* executor= */ checkNotNull(handler)::post); new DefaultVideoSinkListener(), /* executor= */ checkNotNull(handler)::post);
return videoGraph.getProcessor(inputIndex); return true;
} }
private boolean isInitialized() { private boolean isInitialized() {
@ -632,7 +626,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final int inputIndex; private final int inputIndex;
private ImmutableList<Effect> videoEffects; private ImmutableList<Effect> videoEffects;
@Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private Format inputFormat; @Nullable private Format inputFormat;
private @InputType int inputType; private @InputType int inputType;
private long inputBufferTimestampAdjustmentUs; private long inputBufferTimestampAdjustmentUs;
@ -645,6 +638,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private VideoSink.Listener listener; private VideoSink.Listener listener;
private Executor listenerExecutor; private Executor listenerExecutor;
private boolean signaledEndOfStream; private boolean signaledEndOfStream;
private boolean isInitialized;
/** Creates a new instance. */ /** Creates a new instance. */
public InputVideoSink(Context context, int inputIndex) { public InputVideoSink(Context context, int inputIndex) {
@ -684,26 +678,25 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override @Override
public boolean initialize(Format sourceFormat) throws VideoSinkException { public boolean initialize(Format sourceFormat) throws VideoSinkException {
checkState(!isInitialized()); checkState(!isInitialized());
videoFrameProcessor = PlaybackVideoGraphWrapper.this.registerInput(sourceFormat, inputIndex); isInitialized = PlaybackVideoGraphWrapper.this.registerInput(sourceFormat, inputIndex);
return videoFrameProcessor != null; return isInitialized;
} }
@Override @Override
@EnsuresNonNullIf(result = true, expression = "videoFrameProcessor")
public boolean isInitialized() { public boolean isInitialized() {
return videoFrameProcessor != null; return isInitialized;
} }
@Override @Override
public void redraw() { public void redraw() {
checkState(isInitialized()); checkState(isInitialized());
castNonNull(videoFrameProcessor).redraw(); checkNotNull(videoGraph).redraw();
} }
@Override @Override
public void flush(boolean resetPosition) { public void flush(boolean resetPosition) {
if (isInitialized()) { if (isInitialized()) {
videoFrameProcessor.flush(); checkNotNull(videoGraph).flush();
} }
lastBufferPresentationTimeUs = C.TIME_UNSET; lastBufferPresentationTimeUs = C.TIME_UNSET;
PlaybackVideoGraphWrapper.this.flush(resetPosition); PlaybackVideoGraphWrapper.this.flush(resetPosition);
@ -735,7 +728,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
return; return;
} }
if (isInitialized()) { if (isInitialized()) {
videoFrameProcessor.signalEndOfInput(); checkNotNull(videoGraph).signalEndOfInput(inputIndex);
signaledEndOfStream = true; signaledEndOfStream = true;
} }
} }
@ -767,7 +760,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
checkState(isInitialized()); checkState(isInitialized());
return checkStateNotNull(videoFrameProcessor).getInputSurface(); return checkNotNull(videoGraph).getInputSurface(inputIndex);
} }
@Override @Override
@ -841,11 +834,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
if (!shouldRenderToInputVideoSink()) { if (!shouldRenderToInputVideoSink()) {
return false; return false;
} }
if (checkStateNotNull(videoFrameProcessor).getPendingInputFrameCount() if (checkNotNull(videoGraph).getPendingInputFrameCount(inputIndex)
>= videoFrameProcessorMaxPendingFrameCount) { >= videoFrameProcessorMaxPendingFrameCount) {
return false; return false;
} }
if (!checkStateNotNull(videoFrameProcessor).registerInputFrame()) { if (!checkNotNull(videoGraph).registerInputFrame(inputIndex)) {
return false; return false;
} }
@ -871,7 +864,8 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) { public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
checkState(isInitialized()); checkState(isInitialized());
if (!shouldRenderToInputVideoSink() if (!shouldRenderToInputVideoSink()
|| !checkNotNull(videoFrameProcessor).queueInputBitmap(inputBitmap, timestampIterator)) { || !checkNotNull(videoGraph)
.queueInputBitmap(inputIndex, inputBitmap, timestampIterator)) {
return false; return false;
} }
@ -947,7 +941,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
* <p>Effects are pending until a new input stream is registered. * <p>Effects are pending until a new input stream is registered.
*/ */
private void setPendingVideoEffects(List<Effect> newVideoEffects) { private void setPendingVideoEffects(List<Effect> newVideoEffects) {
if (previewingVideoGraphFactory.supportsMultipleInputs()) { if (videoGraphFactory.supportsMultipleInputs()) {
this.videoEffects = ImmutableList.copyOf(newVideoEffects); this.videoEffects = ImmutableList.copyOf(newVideoEffects);
} else { } else {
this.videoEffects = this.videoEffects =
@ -964,9 +958,9 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
.buildUpon() .buildUpon()
.setColorInfo(getAdjustedInputColorInfo(inputFormat.colorInfo)) .setColorInfo(getAdjustedInputColorInfo(inputFormat.colorInfo))
.build(); .build();
checkStateNotNull(videoFrameProcessor) checkNotNull(videoGraph)
.registerInputStream( .registerInputStream(
inputType, adjustedInputFormat, videoEffects, /* offsetToAddUs= */ 0); inputIndex, inputType, adjustedInputFormat, videoEffects, /* offsetToAddUs= */ 0);
} }
} }
@ -1002,22 +996,18 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
} }
} }
/** /** Delays reflection for loading a {@link VideoGraph.Factory SingleInputVideoGraph} instance. */
* Delays reflection for loading a {@linkplain PreviewingVideoGraph.Factory private static final class ReflectiveSingleInputVideoGraphFactory implements VideoGraph.Factory {
* PreviewingSingleInputVideoGraph} instance.
*/
private static final class ReflectivePreviewingSingleInputVideoGraphFactory
implements PreviewingVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory; private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
public ReflectivePreviewingSingleInputVideoGraphFactory( public ReflectiveSingleInputVideoGraphFactory(
VideoFrameProcessor.Factory videoFrameProcessorFactory) { VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory; this.videoFrameProcessorFactory = videoFrameProcessorFactory;
} }
@Override @Override
public PreviewingVideoGraph create( public VideoGraph create(
Context context, Context context,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -1025,32 +1015,34 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs) long initialTimestampOffsetUs,
throws VideoFrameProcessingException { boolean renderFramesAutomatically) {
VideoGraph.Factory factory;
try { try {
// LINT.IfChange // LINT.IfChange
Class<?> previewingSingleInputVideoGraphFactoryClass = Class<?> singleInputVideoGraphFactoryClass =
Class.forName("androidx.media3.effect.PreviewingSingleInputVideoGraph$Factory"); Class.forName("androidx.media3.effect.SingleInputVideoGraph$Factory");
PreviewingVideoGraph.Factory factory = factory =
(PreviewingVideoGraph.Factory) (VideoGraph.Factory)
previewingSingleInputVideoGraphFactoryClass singleInputVideoGraphFactoryClass
.getConstructor(VideoFrameProcessor.Factory.class) .getConstructor(VideoFrameProcessor.Factory.class)
.newInstance(videoFrameProcessorFactory); .newInstance(videoFrameProcessorFactory);
// LINT.ThenChange( // LINT.ThenChange(
// ../../../../../../../proguard-rules.txt, // ../../../../../../../proguard-rules.txt,
// ../ExoPlayerImpl.java:set_video_effects) // ../ExoPlayerImpl.java:set_video_effects)
return factory.create(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
} catch (Exception e) { } catch (Exception e) {
throw VideoFrameProcessingException.from(e); throw new IllegalStateException(e);
} }
return factory.create(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
renderFramesAutomatically);
} }
@Override @Override

View File

@ -17,24 +17,21 @@ package androidx.media3.exoplayer.video;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.TimestampIterator;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -42,6 +39,7 @@ import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito; import org.mockito.Mockito;
/** Unit test for {@link PlaybackVideoGraphWrapper}. */ /** Unit test for {@link PlaybackVideoGraphWrapper}. */
@ -61,7 +59,7 @@ public final class PlaybackVideoGraphWrapperTest {
@Test @Test
public void initializeSink_calledTwice_throws() throws VideoSink.VideoSinkException { public void initializeSink_calledTwice_throws() throws VideoSink.VideoSinkException {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper = PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(new FakeVideoFrameProcessor()); createPlaybackVideoGraphWrapper(new TestVideoGraphFactory());
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0); VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
sink.initialize(new Format.Builder().build()); sink.initialize(new Format.Builder().build());
@ -73,27 +71,27 @@ public final class PlaybackVideoGraphWrapperTest {
ImmutableList<Effect> firstEffects = ImmutableList.of(Mockito.mock(Effect.class)); ImmutableList<Effect> firstEffects = ImmutableList.of(Mockito.mock(Effect.class));
ImmutableList<Effect> secondEffects = ImmutableList<Effect> secondEffects =
ImmutableList.of(Mockito.mock(Effect.class), Mockito.mock(Effect.class)); ImmutableList.of(Mockito.mock(Effect.class), Mockito.mock(Effect.class));
FakeVideoFrameProcessor videoFrameProcessor = new FakeVideoFrameProcessor(); TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper = PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(videoFrameProcessor); createPlaybackVideoGraphWrapper(testVideoGraphFactory);
Format format = new Format.Builder().build(); Format format = new Format.Builder().build();
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0); VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
sink.initialize(format); sink.initialize(format);
sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, firstEffects); sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, firstEffects);
assertThat(videoFrameProcessor.registeredEffects).isEqualTo(firstEffects);
sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, secondEffects); sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, secondEffects);
assertThat(videoFrameProcessor.registeredEffects).isEqualTo(secondEffects);
sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, ImmutableList.of()); sink.onInputStreamChanged(VideoSink.INPUT_TYPE_SURFACE, format, ImmutableList.of());
assertThat(videoFrameProcessor.registeredEffects).isEmpty(); testVideoGraphFactory.verifyRegisteredEffectsMatches(/* invocationTimes= */ 3);
assertThat(testVideoGraphFactory.getCapturedEffects())
.isEqualTo(ImmutableList.of(firstEffects, secondEffects, ImmutableList.of()));
} }
private static PlaybackVideoGraphWrapper createPlaybackVideoGraphWrapper( private static PlaybackVideoGraphWrapper createPlaybackVideoGraphWrapper(
VideoFrameProcessor videoFrameProcessor) { VideoGraph.Factory videoGraphFactory) {
Context context = ApplicationProvider.getApplicationContext(); Context context = ApplicationProvider.getApplicationContext();
return new PlaybackVideoGraphWrapper.Builder(context, createVideoFrameReleaseControl()) return new PlaybackVideoGraphWrapper.Builder(context, createVideoFrameReleaseControl())
.setPreviewingVideoGraphFactory(new TestPreviewingVideoGraphFactory(videoFrameProcessor)) .setVideoGraphFactory(videoGraphFactory)
.build(); .build();
} }
@ -126,79 +124,16 @@ public final class PlaybackVideoGraphWrapperTest {
context, frameTimingEvaluator, /* allowedJoiningTimeMs= */ 0); context, frameTimingEvaluator, /* allowedJoiningTimeMs= */ 0);
} }
private static class FakeVideoFrameProcessor implements VideoFrameProcessor { private static class TestVideoGraphFactory implements VideoGraph.Factory {
List<Effect> registeredEffects = ImmutableList.of();
@Override
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
return false;
}
@Override
public boolean queueInputTexture(int textureId, long presentationTimeUs) {
return false;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {}
@Override
public void setOnInputSurfaceReadyListener(Runnable listener) {}
@Override
public Surface getInputSurface() {
return null;
}
@Override
public void redraw() {}
@Override
public void registerInputStream(
@InputType int inputType, Format format, List<Effect> effects, long offsetToAddUs) {
registeredEffects = effects;
}
@Override
public boolean registerInputFrame() {
return true;
}
@Override
public int getPendingInputFrameCount() {
return 0;
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {}
@Override
public void renderOutputFrame(long renderTimeNs) {}
@Override
public void signalEndOfInput() {}
@Override
public void flush() {}
@Override
public void release() {}
}
private static class TestPreviewingVideoGraphFactory implements PreviewingVideoGraph.Factory {
// Using a mock but we don't assert mock interactions. If needed to assert interactions, we // Using a mock but we don't assert mock interactions. If needed to assert interactions, we
// should a fake instead. // should a fake instead.
private final PreviewingVideoGraph previewingVideoGraph = private final VideoGraph videoGraph = Mockito.mock(VideoGraph.class);
Mockito.mock(PreviewingVideoGraph.class);
private final VideoFrameProcessor videoFrameProcessor;
public TestPreviewingVideoGraphFactory(VideoFrameProcessor videoFrameProcessor) { @SuppressWarnings("unchecked")
this.videoFrameProcessor = videoFrameProcessor; private final ArgumentCaptor<List<Effect>> effectsCaptor = ArgumentCaptor.forClass(List.class);
}
@Override @Override
public PreviewingVideoGraph create( public VideoGraph create(
Context context, Context context,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -206,14 +141,30 @@ public final class PlaybackVideoGraphWrapperTest {
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs,
when(previewingVideoGraph.getProcessor(anyInt())).thenReturn(videoFrameProcessor); boolean renderFramesAutomatically) {
return previewingVideoGraph;
when(videoGraph.registerInputFrame(anyInt())).thenReturn(true);
return videoGraph;
} }
@Override @Override
public boolean supportsMultipleInputs() { public boolean supportsMultipleInputs() {
return false; return false;
} }
public void verifyRegisteredEffectsMatches(int invocationTimes) {
verify(videoGraph, times(invocationTimes))
.registerInputStream(
/* inputIndex= */ anyInt(),
/* inputType= */ eq(VideoSink.INPUT_TYPE_SURFACE),
/* format= */ any(),
effectsCaptor.capture(),
/* offsetToAddUs= */ anyLong());
}
public List<List<Effect>> getCapturedEffects() {
return effectsCaptor.getAllValues();
}
} }
} }

View File

@ -26,7 +26,6 @@ import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackException;
import androidx.media3.effect.GlEffect; import androidx.media3.effect.GlEffect;
import androidx.media3.effect.MultipleInputVideoGraph; import androidx.media3.effect.MultipleInputVideoGraph;
import androidx.media3.effect.PreviewingMultipleInputVideoGraph;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
@ -205,8 +204,7 @@ public class CompositionMultipleSequencePlaybackTest {
() -> { () -> {
player = player =
new CompositionPlayer.Builder(context) new CompositionPlayer.Builder(context)
.setPreviewingVideoGraphFactory( .setVideoGraphFactory(new MultipleInputVideoGraph.Factory())
new PreviewingMultipleInputVideoGraph.Factory())
.build(); .build();
player.addListener(playerTestListener); player.addListener(playerTestListener);
player.setComposition(composition); player.setComposition(composition);

View File

@ -39,14 +39,13 @@ import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player; import androidx.media3.common.Player;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.NullableType; import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffect; import androidx.media3.effect.GlEffect;
import androidx.media3.effect.PreviewingSingleInputVideoGraph; import androidx.media3.effect.SingleInputVideoGraph;
import androidx.test.ext.junit.rules.ActivityScenarioRule; import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -868,8 +867,7 @@ public class CompositionPlayerSeekTest {
() -> { () -> {
compositionPlayer = compositionPlayer =
new CompositionPlayer.Builder(applicationContext) new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory( .setVideoGraphFactory(new ListenerCapturingVideoGraphFactory(videoGraphEnded))
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
.setVideoPrewarmingEnabled(videoPrewarmingEnabled) .setVideoPrewarmingEnabled(videoPrewarmingEnabled)
.build(); .build();
// Set a surface on the player even though there is no UI on this test. We need a // Set a surface on the player even though there is no UI on this test. We need a
@ -945,8 +943,7 @@ public class CompositionPlayerSeekTest {
() -> { () -> {
compositionPlayer = compositionPlayer =
new CompositionPlayer.Builder(applicationContext) new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory( .setVideoGraphFactory(new ListenerCapturingVideoGraphFactory(videoGraphEnded))
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
.build(); .build();
// Set a surface on the player even though there is no UI on this test. We need a // Set a surface on the player even though there is no UI on this test. We need a
// surface otherwise the player will skip/drop video frames. // surface otherwise the player will skip/drop video frames.
@ -1037,19 +1034,18 @@ public class CompositionPlayerSeekTest {
.build(); .build();
} }
private static final class ListenerCapturingVideoGraphFactory private static final class ListenerCapturingVideoGraphFactory implements VideoGraph.Factory {
implements PreviewingVideoGraph.Factory {
private final PreviewingSingleInputVideoGraph.Factory singleInputVideoGraphFactory; private final VideoGraph.Factory singleInputVideoGraphFactory;
private final CountDownLatch videoGraphEnded; private final CountDownLatch videoGraphEnded;
public ListenerCapturingVideoGraphFactory(CountDownLatch videoGraphEnded) { public ListenerCapturingVideoGraphFactory(CountDownLatch videoGraphEnded) {
singleInputVideoGraphFactory = new PreviewingSingleInputVideoGraph.Factory(); singleInputVideoGraphFactory = new SingleInputVideoGraph.Factory();
this.videoGraphEnded = videoGraphEnded; this.videoGraphEnded = videoGraphEnded;
} }
@Override @Override
public PreviewingVideoGraph create( public VideoGraph create(
Context context, Context context,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -1057,7 +1053,8 @@ public class CompositionPlayerSeekTest {
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return singleInputVideoGraphFactory.create( return singleInputVideoGraphFactory.create(
context, context,
outputColorInfo, outputColorInfo,
@ -1093,7 +1090,8 @@ public class CompositionPlayerSeekTest {
listenerExecutor, listenerExecutor,
videoCompositorSettings, videoCompositorSettings,
compositionEffects, compositionEffects,
initialTimestampOffsetUs); initialTimestampOffsetUs,
renderFramesAutomatically);
} }
@Override @Override

View File

@ -15,13 +15,11 @@
*/ */
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED;
import static androidx.media3.common.util.Util.isRunningOnEmulator; import static androidx.media3.common.util.Util.isRunningOnEmulator;
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_ASSET; import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_ASSET;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET; import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import static com.google.common.util.concurrent.Futures.immediateFuture; import static com.google.common.util.concurrent.Futures.immediateFuture;
import static org.junit.Assert.assertThrows;
import android.app.Instrumentation; import android.app.Instrumentation;
import android.content.Context; import android.content.Context;
@ -30,7 +28,6 @@ import android.util.Pair;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import android.view.TextureView; import android.view.TextureView;
import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
@ -38,12 +35,7 @@ import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.audio.AudioProcessor; import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.util.SystemClock; import androidx.media3.common.util.SystemClock;
@ -51,7 +43,8 @@ import androidx.media3.common.util.Util;
import androidx.media3.datasource.AssetDataSource; import androidx.media3.datasource.AssetDataSource;
import androidx.media3.datasource.DataSourceUtil; import androidx.media3.datasource.DataSourceUtil;
import androidx.media3.datasource.DataSpec; import androidx.media3.datasource.DataSpec;
import androidx.media3.effect.PreviewingSingleInputVideoGraph; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.SingleInputVideoGraph;
import androidx.media3.exoplayer.RendererCapabilities; import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.image.BitmapFactoryImageDecoder; import androidx.media3.exoplayer.image.BitmapFactoryImageDecoder;
import androidx.media3.exoplayer.image.ImageDecoder; import androidx.media3.exoplayer.image.ImageDecoder;
@ -456,53 +449,6 @@ public class CompositionPlayerTest {
listener.waitUntilPlayerEnded(); listener.waitUntilPlayerEnded();
} }
@Test
public void playback_videoGraphWrapperFails_playerRaisesError() {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem video =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
.setDurationUs(MP4_ASSET.videoDurationUs)
.build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
new PreviewingVideoGraph.Factory() {
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException {
throw new VideoFrameProcessingException(
"Test video graph failed to initialize");
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
})
.build();
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence.Builder(video).build()).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
PlaybackException thrownException =
assertThrows(PlaybackException.class, listener::waitUntilPlayerEnded);
assertThat(thrownException.errorCode).isEqualTo(ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
@Test @Test
public void release_videoGraphWrapperFailsDuringRelease_playerDoesNotRaiseError() public void release_videoGraphWrapperFailsDuringRelease_playerDoesNotRaiseError()
throws Exception { throws Exception {
@ -515,7 +461,34 @@ public class CompositionPlayerTest {
() -> { () -> {
compositionPlayer = compositionPlayer =
new CompositionPlayer.Builder(applicationContext) new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(new FailingReleaseVideoGraph.Factory()) .setVideoGraphFactory(
new VideoGraph.Factory() {
@Override
public VideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return new FailingReleaseVideoGraph(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
})
.build(); .build();
compositionPlayer.addListener(playerTestListener); compositionPlayer.addListener(playerTestListener);
compositionPlayer.setComposition( compositionPlayer.setComposition(
@ -559,106 +532,31 @@ public class CompositionPlayerTest {
} }
} }
private static final class FailingReleaseVideoGraph extends ForwardingVideoGraph { private static final class FailingReleaseVideoGraph extends SingleInputVideoGraph {
public static final class Factory implements PreviewingVideoGraph.Factory { public FailingReleaseVideoGraph(
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs)
throws VideoFrameProcessingException {
return new FailingReleaseVideoGraph(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs);
}
@Override
public boolean supportsMultipleInputs() {
return false;
}
}
private FailingReleaseVideoGraph(
Context context, Context context,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
VideoGraph.Listener listener, Listener listener,
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, boolean renderFramesAutomatically) {
long initialTimestampOffsetUs) {
super( super(
new PreviewingSingleInputVideoGraph.Factory() context,
.create( new DefaultVideoFrameProcessor.Factory.Builder().build(),
context, outputColorInfo,
outputColorInfo, listener,
debugViewProvider, /* compositionEffects= */ ImmutableList.of(),
listener, debugViewProvider,
listenerExecutor, listenerExecutor,
videoCompositorSettings, videoCompositorSettings,
compositionEffects, renderFramesAutomatically);
initialTimestampOffsetUs));
} }
@Override @Override
public void release() { public void release() {
super.release();
throw new RuntimeException("VideoGraph release error"); throw new RuntimeException("VideoGraph release error");
} }
} }
private static class ForwardingVideoGraph implements PreviewingVideoGraph {
private final PreviewingVideoGraph videoGraph;
public ForwardingVideoGraph(PreviewingVideoGraph videoGraph) {
this.videoGraph = videoGraph;
}
@Override
public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize();
}
@Override
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
}
@Override
public VideoFrameProcessor getProcessor(int inputId) {
return videoGraph.getProcessor(inputId);
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
videoGraph.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public boolean hasProducedFrameWithTimestampZero() {
return videoGraph.hasProducedFrameWithTimestampZero();
}
@Override
public void release() {
videoGraph.release();
}
@Override
public void renderOutputFrame(long renderTimeNs) {
videoGraph.renderOutputFrame(renderTimeNs);
}
}
} }

View File

@ -43,7 +43,7 @@ import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.MatrixTransformation; import androidx.media3.effect.MatrixTransformation;
import androidx.media3.effect.PreviewingMultipleInputVideoGraph; import androidx.media3.effect.MultipleInputVideoGraph;
import androidx.media3.effect.StaticOverlaySettings; import androidx.media3.effect.StaticOverlaySettings;
import androidx.media3.transformer.Composition; import androidx.media3.transformer.Composition;
import androidx.media3.transformer.CompositionPlayer; import androidx.media3.transformer.CompositionPlayer;
@ -216,8 +216,7 @@ public class CompositionPlayerPixelTest {
() -> { () -> {
player = player =
new CompositionPlayer.Builder(context) new CompositionPlayer.Builder(context)
.setPreviewingVideoGraphFactory( .setVideoGraphFactory(new MultipleInputVideoGraph.Factory())
new PreviewingMultipleInputVideoGraph.Factory())
.build(); .build();
outputImageReader.setOnImageAvailableListener( outputImageReader.setOnImageAvailableListener(
imageReader -> { imageReader -> {
@ -316,8 +315,7 @@ public class CompositionPlayerPixelTest {
() -> { () -> {
player = player =
new CompositionPlayer.Builder(context) new CompositionPlayer.Builder(context)
.setPreviewingVideoGraphFactory( .setVideoGraphFactory(new MultipleInputVideoGraph.Factory())
new PreviewingMultipleInputVideoGraph.Factory())
.build(); .build();
outputImageReader.setOnImageAvailableListener( outputImageReader.setOnImageAvailableListener(
imageReader -> { imageReader -> {

View File

@ -41,10 +41,10 @@ import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player; import androidx.media3.common.Player;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SimpleBasePlayer; import androidx.media3.common.SimpleBasePlayer;
import androidx.media3.common.Timeline; import androidx.media3.common.Timeline;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.VideoSize; import androidx.media3.common.VideoSize;
import androidx.media3.common.audio.SpeedProvider; import androidx.media3.common.audio.SpeedProvider;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
@ -53,7 +53,7 @@ import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.PreviewingSingleInputVideoGraph; import androidx.media3.effect.SingleInputVideoGraph;
import androidx.media3.effect.TimestampAdjustment; import androidx.media3.effect.TimestampAdjustment;
import androidx.media3.exoplayer.ExoPlaybackException; import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.ExoPlayer; import androidx.media3.exoplayer.ExoPlayer;
@ -123,7 +123,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
private ImageDecoder.Factory imageDecoderFactory; private ImageDecoder.Factory imageDecoderFactory;
private boolean videoPrewarmingEnabled; private boolean videoPrewarmingEnabled;
private Clock clock; private Clock clock;
private PreviewingVideoGraph.@MonotonicNonNull Factory previewingVideoGraphFactory; private VideoGraph.@MonotonicNonNull Factory videoGraphFactory;
private boolean built; private boolean built;
/** /**
@ -232,18 +232,17 @@ public final class CompositionPlayer extends SimpleBasePlayer
} }
/** /**
* Sets the {@link PreviewingVideoGraph.Factory} that will be used by the player. * Sets the {@link VideoGraph.Factory} that will be used by the player.
* *
* <p>By default, a {@link PreviewingSingleInputVideoGraph.Factory} is used. * <p>By default, a {@link SingleInputVideoGraph.Factory} is used.
* *
* @param previewingVideoGraphFactory The {@link PreviewingVideoGraph.Factory}. * @param videoGraphFactory The {@link VideoGraph.Factory}.
* @return This builder, for convenience. * @return This builder, for convenience.
*/ */
@VisibleForTesting @VisibleForTesting
@CanIgnoreReturnValue @CanIgnoreReturnValue
public Builder setPreviewingVideoGraphFactory( public Builder setVideoGraphFactory(VideoGraph.Factory videoGraphFactory) {
PreviewingVideoGraph.Factory previewingVideoGraphFactory) { this.videoGraphFactory = videoGraphFactory;
this.previewingVideoGraphFactory = previewingVideoGraphFactory;
return this; return this;
} }
@ -262,8 +261,8 @@ public final class CompositionPlayer extends SimpleBasePlayer
if (audioSink == null) { if (audioSink == null) {
audioSink = new DefaultAudioSink.Builder(context).build(); audioSink = new DefaultAudioSink.Builder(context).build();
} }
if (previewingVideoGraphFactory == null) { if (videoGraphFactory == null) {
previewingVideoGraphFactory = new PreviewingSingleInputVideoGraph.Factory(); videoGraphFactory = new SingleInputVideoGraph.Factory();
} }
CompositionPlayer compositionPlayer = new CompositionPlayer(this); CompositionPlayer compositionPlayer = new CompositionPlayer(this);
built = true; built = true;
@ -309,7 +308,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
private final AudioSink finalAudioSink; private final AudioSink finalAudioSink;
private final MediaSource.Factory mediaSourceFactory; private final MediaSource.Factory mediaSourceFactory;
private final ImageDecoder.Factory imageDecoderFactory; private final ImageDecoder.Factory imageDecoderFactory;
private final PreviewingVideoGraph.Factory previewingVideoGraphFactory; private final VideoGraph.Factory videoGraphFactory;
private final boolean videoPrewarmingEnabled; private final boolean videoPrewarmingEnabled;
private final HandlerWrapper compositionInternalListenerHandler; private final HandlerWrapper compositionInternalListenerHandler;
@ -350,7 +349,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
finalAudioSink = checkNotNull(builder.audioSink); finalAudioSink = checkNotNull(builder.audioSink);
mediaSourceFactory = builder.mediaSourceFactory; mediaSourceFactory = builder.mediaSourceFactory;
imageDecoderFactory = builder.imageDecoderFactory; imageDecoderFactory = builder.imageDecoderFactory;
previewingVideoGraphFactory = checkNotNull(builder.previewingVideoGraphFactory); videoGraphFactory = checkNotNull(builder.videoGraphFactory);
videoPrewarmingEnabled = builder.videoPrewarmingEnabled; videoPrewarmingEnabled = builder.videoPrewarmingEnabled;
compositionInternalListenerHandler = clock.createHandler(builder.looper, /* callback= */ null); compositionInternalListenerHandler = clock.createHandler(builder.looper, /* callback= */ null);
videoTracksSelected = new SparseBooleanArray(); videoTracksSelected = new SparseBooleanArray();
@ -378,7 +377,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
checkState(this.composition == null); checkState(this.composition == null);
composition = deactivateSpeedAdjustingVideoEffects(composition); composition = deactivateSpeedAdjustingVideoEffects(composition);
if (composition.sequences.size() > 1 && !previewingVideoGraphFactory.supportsMultipleInputs()) { if (composition.sequences.size() > 1 && !videoGraphFactory.supportsMultipleInputs()) {
Log.w(TAG, "Setting multi-sequence Composition with single input video graph."); Log.w(TAG, "Setting multi-sequence Composition with single input video graph.");
} }
@ -727,7 +726,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
context, new CompositionFrameTimingEvaluator(), /* allowedJoiningTimeMs= */ 0); context, new CompositionFrameTimingEvaluator(), /* allowedJoiningTimeMs= */ 0);
playbackVideoGraphWrapper = playbackVideoGraphWrapper =
new PlaybackVideoGraphWrapper.Builder(context, videoFrameReleaseControl) new PlaybackVideoGraphWrapper.Builder(context, videoFrameReleaseControl)
.setPreviewingVideoGraphFactory(checkNotNull(previewingVideoGraphFactory)) .setVideoGraphFactory(checkNotNull(videoGraphFactory))
.setCompositorSettings(composition.videoCompositorSettings) .setCompositorSettings(composition.videoCompositorSettings)
.setCompositionEffects(composition.effects.videoEffects) .setCompositionEffects(composition.effects.videoEffects)
.setClock(clock) .setClock(clock)

View File

@ -1,113 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.effect.MultipleInputVideoGraph;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A {@link TransformerVideoGraph Transformer}-specific implementation of {@link
* MultipleInputVideoGraph}.
*/
/* package */ final class TransformerMultipleInputVideoGraph extends MultipleInputVideoGraph
implements TransformerVideoGraph {
/** A factory for creating {@link TransformerMultipleInputVideoGraph} instances. */
public static final class Factory implements TransformerVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
@Override
public TransformerMultipleInputVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return new TransformerMultipleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
renderFramesAutomatically);
}
}
private TransformerMultipleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
super(
context,
videoFrameProcessorFactory,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
renderFramesAutomatically);
}
@Override
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
registerInput(inputIndex);
return new VideoFrameProcessingWrapper(
getProcessor(inputIndex),
/* postProcessingEffects= */ ImmutableList.of(),
getInitialTimestampOffsetUs());
}
@Override
public void renderOutputFrameWithMediaPresentationTime() {
getCompositionVideoFrameProcessor()
.renderOutputFrame(RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME);
}
}

View File

@ -1,116 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME;
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.effect.SingleInputVideoGraph;
import java.util.List;
import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A {@link TransformerVideoGraph Transformer}-specific implementation of {@link
* SingleInputVideoGraph}.
*/
/* package */ final class TransformerSingleInputVideoGraph extends SingleInputVideoGraph
implements TransformerVideoGraph {
/** A factory for creating {@link TransformerSingleInputVideoGraph} instances. */
public static final class Factory implements TransformerVideoGraph.Factory {
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
@Override
public TransformerSingleInputVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
return new TransformerSingleInputVideoGraph(
context,
videoFrameProcessorFactory,
outputColorInfo,
listener,
debugViewProvider,
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically,
compositionEffects,
initialTimestampOffsetUs);
}
}
private final List<Effect> compositionEffects;
private @MonotonicNonNull VideoFrameProcessingWrapper videoFrameProcessingWrapper;
private TransformerSingleInputVideoGraph(
Context context,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
ColorInfo outputColorInfo,
Listener listener,
DebugViewProvider debugViewProvider,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
context,
videoFrameProcessorFactory,
outputColorInfo,
listener,
debugViewProvider,
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically,
initialTimestampOffsetUs);
this.compositionEffects = compositionEffects;
}
@Override
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
checkState(videoFrameProcessingWrapper == null);
registerInput(inputIndex);
videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper(
getProcessor(inputIndex), compositionEffects, getInitialTimestampOffsetUs());
return videoFrameProcessingWrapper;
}
@Override
public void renderOutputFrameWithMediaPresentationTime() {
getProcessor(getInputIndex()).renderOutputFrame(RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME);
}
}

View File

@ -1,96 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import android.content.Context;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import java.util.List;
import java.util.concurrent.Executor;
/** The {@link VideoGraph} to support {@link Transformer} specific use cases. */
/* package */ interface TransformerVideoGraph extends VideoGraph {
/** A factory for creating a {@link TransformerVideoGraph}. */
interface Factory {
/**
* Creates a new {@link TransformerVideoGraph} instance.
*
* @param context A {@link Context}.
* @param outputColorInfo The {@link ColorInfo} for the output frames.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param listener A {@link Listener}.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param videoCompositorSettings The {@link VideoCompositorSettings} to apply to the
* composition.
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as the
* instance is done processing them. If {@code false}, the instance will block until {@link
* #renderOutputFrameWithMediaPresentationTime()} is called, to render the frame.
* @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* VideoFrameProcessor}.
*/
TransformerVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically)
throws VideoFrameProcessingException;
}
/**
* Returns a {@link GraphInput} object to which the {@code VideoGraph} inputs are queued.
*
* <p>This method must be called after successfully {@linkplain #initialize() initializing} the
* {@code VideoGraph}.
*
* <p>This method must called exactly once for every input stream.
*
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
*
* @param inputIndex The index of the input, which could be used to order the inputs.
*/
GraphInput createInput(int inputIndex) throws VideoFrameProcessingException;
/**
* Renders the oldest unrendered output frame that has become {@linkplain
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} to the output
* surface.
*
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
* {@linkplain Listener#onOutputFrameAvailableForRendering(long) available for rendering}.
*
* <p>This will render the output frame to the {@linkplain #setOutputSurfaceInfo output surface}
* with the presentation seen in {@link Listener#onOutputFrameAvailableForRendering(long)}.
*/
void renderOutputFrameWithMediaPresentationTime();
}

View File

@ -1,168 +0,0 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.TimestampIterator;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
/** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */
/* package */ final class VideoFrameProcessingWrapper implements GraphInput {
private final VideoFrameProcessor videoFrameProcessor;
private final List<Effect> postProcessingEffects;
private final long initialTimestampOffsetUs;
private final AtomicLong mediaItemOffsetUs;
public VideoFrameProcessingWrapper(
VideoFrameProcessor videoFrameProcessor,
List<Effect> postProcessingEffects,
long initialTimestampOffsetUs) {
this.videoFrameProcessor = videoFrameProcessor;
this.postProcessingEffects = postProcessingEffects;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
mediaItemOffsetUs = new AtomicLong();
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format decodedFormat,
boolean isLast) {
boolean isSurfaceAssetLoaderMediaItem = isMediaItemForSurfaceAssetLoader(editedMediaItem);
durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs);
if (decodedFormat != null) {
decodedFormat = applyDecoderRotation(decodedFormat);
ImmutableList<Effect> combinedEffects =
new ImmutableList.Builder<Effect>()
.addAll(editedMediaItem.effects.videoEffects)
.addAll(postProcessingEffects)
.build();
videoFrameProcessor.registerInputStream(
isSurfaceAssetLoaderMediaItem
? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION
: getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)),
decodedFormat,
combinedEffects,
/* offsetToAddUs= */ initialTimestampOffsetUs + mediaItemOffsetUs.get());
}
mediaItemOffsetUs.addAndGet(durationUs);
}
@Override
public @InputResult int queueInputBitmap(
Bitmap inputBitmap, TimestampIterator timestampIterator) {
return videoFrameProcessor.queueInputBitmap(inputBitmap, timestampIterator)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public void setOnInputSurfaceReadyListener(Runnable runnable) {
videoFrameProcessor.setOnInputSurfaceReadyListener(runnable);
}
@Override
public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
return videoFrameProcessor.queueInputTexture(texId, presentationTimeUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public Surface getInputSurface() {
return videoFrameProcessor.getInputSurface();
}
@Override
public int getPendingVideoFrameCount() {
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public boolean registerVideoFrame(long presentationTimeUs) {
return videoFrameProcessor.registerInputFrame();
}
@Override
public void signalEndOfVideoInput() {
videoFrameProcessor.signalEndOfInput();
}
public void release() {
videoFrameProcessor.release();
}
private static Format applyDecoderRotation(Format format) {
// The decoder rotates encoded frames for display by format.rotationDegrees.
if (format.rotationDegrees % 180 == 0) {
return format;
}
return format
.buildUpon()
.setWidth(format.height)
.setHeight(format.width)
.setRotationDegrees(0)
.build();
}
private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
private static boolean isMediaItemForSurfaceAssetLoader(EditedMediaItem editedMediaItem) {
@Nullable
MediaItem.LocalConfiguration localConfiguration = editedMediaItem.mediaItem.localConfiguration;
if (localConfiguration == null) {
return false;
}
@Nullable String scheme = localConfiguration.uri.getScheme();
if (scheme == null) {
return false;
}
return scheme.equals(SurfaceAssetLoader.MEDIA_ITEM_URI_SCHEME);
}
}

View File

@ -22,18 +22,23 @@ import static androidx.media3.common.C.COLOR_TRANSFER_HLG;
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED; import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL; import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
import static androidx.media3.common.ColorInfo.isTransferHdr; import static androidx.media3.common.ColorInfo.isTransferHdr;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.VideoFrameProcessor.RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR; import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL; import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
import static androidx.media3.transformer.TransformerUtil.getOutputMimeTypeAndHdrModeAfterFallback; import static androidx.media3.transformer.TransformerUtil.getOutputMimeTypeAndHdrModeAfterFallback;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.util.Pair; import android.util.Pair;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -41,18 +46,23 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoCompositorSettings; import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.Consumer; import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
import com.google.common.util.concurrent.MoreExecutors; import androidx.media3.effect.MultipleInputVideoGraph;
import androidx.media3.effect.SingleInputVideoGraph;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.atomic.AtomicLong;
import org.checkerframework.checker.initialization.qual.Initialized; import org.checkerframework.checker.initialization.qual.Initialized;
import org.checkerframework.checker.lock.qual.GuardedBy; import org.checkerframework.checker.lock.qual.GuardedBy;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -64,7 +74,6 @@ import org.checkerframework.dataflow.qual.Pure;
private final VideoGraphWrapper videoGraph; private final VideoGraphWrapper videoGraph;
private final EncoderWrapper encoderWrapper; private final EncoderWrapper encoderWrapper;
private final DecoderInputBuffer encoderOutputBuffer; private final DecoderInputBuffer encoderOutputBuffer;
private final long initialTimestampOffsetUs;
/** /**
* The timestamp of the last buffer processed before {@linkplain * The timestamp of the last buffer processed before {@linkplain
@ -95,7 +104,9 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO: b/278259383 - Consider delaying configuration of VideoSampleExporter to use the decoder // TODO: b/278259383 - Consider delaying configuration of VideoSampleExporter to use the decoder
// output format instead of the extractor output format, to match AudioSampleExporter behavior. // output format instead of the extractor output format, to match AudioSampleExporter behavior.
super(firstInputFormat, muxerWrapper); super(firstInputFormat, muxerWrapper);
this.initialTimestampOffsetUs = initialTimestampOffsetUs; // Automatically render frames if the sample exporter does not limit the number of frames in
// the encoder.
boolean renderFramesAutomatically = maxFramesInEncoder < 1;
finalFramePresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
lastMuxerInputBufferTimestampUs = C.TIME_UNSET; lastMuxerInputBufferTimestampUs = C.TIME_UNSET;
@ -132,7 +143,7 @@ import org.checkerframework.dataflow.qual.Pure;
boolean isGlToneMapping = boolean isGlToneMapping =
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL
&& ColorInfo.isTransferHdr(videoGraphInputColor); && isTransferHdr(videoGraphInputColor);
if (isGlToneMapping) { if (isGlToneMapping) {
videoGraphOutputColor = SDR_BT709_LIMITED; videoGraphOutputColor = SDR_BT709_LIMITED;
} }
@ -142,14 +153,16 @@ import org.checkerframework.dataflow.qual.Pure;
new VideoGraphWrapper( new VideoGraphWrapper(
context, context,
hasMultipleInputs hasMultipleInputs
? new TransformerMultipleInputVideoGraph.Factory(videoFrameProcessorFactory) ? new MultipleInputVideoGraph.Factory(videoFrameProcessorFactory)
: new TransformerSingleInputVideoGraph.Factory(videoFrameProcessorFactory), : new SingleInputVideoGraph.Factory(videoFrameProcessorFactory),
videoGraphOutputColor, videoGraphOutputColor,
errorConsumer,
debugViewProvider, debugViewProvider,
videoCompositorSettings, videoCompositorSettings,
compositionEffects, compositionEffects,
maxFramesInEncoder); errorConsumer,
initialTimestampOffsetUs,
maxFramesInEncoder,
renderFramesAutomatically);
videoGraph.initialize(); videoGraph.initialize();
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e); throw ExportException.createForVideoFrameProcessingException(e);
@ -367,10 +380,10 @@ import org.checkerframework.dataflow.qual.Pure;
if (isInputToneMapped) { if (isInputToneMapped) {
// When tone-mapping HDR to SDR is enabled, assume we get BT.709 to avoid having the encoder // When tone-mapping HDR to SDR is enabled, assume we get BT.709 to avoid having the encoder
// populate default color info, which depends on the resolution. // populate default color info, which depends on the resolution.
return ColorInfo.SDR_BT709_LIMITED; return SDR_BT709_LIMITED;
} }
if (SRGB_BT709_FULL.equals(inputFormat.colorInfo)) { if (SRGB_BT709_FULL.equals(inputFormat.colorInfo)) {
return ColorInfo.SDR_BT709_LIMITED; return SDR_BT709_LIMITED;
} }
return checkNotNull(inputFormat.colorInfo); return checkNotNull(inputFormat.colorInfo);
} }
@ -462,51 +475,82 @@ import org.checkerframework.dataflow.qual.Pure;
} }
} }
private final class VideoGraphWrapper implements TransformerVideoGraph, VideoGraph.Listener { private final class VideoGraphWrapper implements VideoGraph.Listener {
private final TransformerVideoGraph videoGraph; private final VideoGraph videoGraph;
private final Consumer<ExportException> errorConsumer;
private final int maxFramesInEncoder;
private final boolean renderFramesAutomatically;
private final Object lock; private final Object lock;
private final Consumer<ExportException> errorConsumer;
private final boolean renderFramesAutomatically;
private final long initialTimestampOffsetUs;
private final int maxFramesInEncoder;
private @GuardedBy("lock") int framesInEncoder; private @GuardedBy("lock") int framesInEncoder;
private @GuardedBy("lock") int framesAvailableToRender; private @GuardedBy("lock") int framesAvailableToRender;
public VideoGraphWrapper( public VideoGraphWrapper(
Context context, Context context,
TransformerVideoGraph.Factory videoGraphFactory, VideoGraph.Factory videoGraphFactory,
ColorInfo videoFrameProcessorOutputColor, ColorInfo videoFrameProcessorOutputColor,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects, List<Effect> compositionEffects,
int maxFramesInEncoder) Consumer<ExportException> errorConsumer,
long initialTimestampOffsetUs,
int maxFramesInEncoder,
boolean renderFramesAutomatically)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
this.errorConsumer = errorConsumer; this.errorConsumer = errorConsumer;
// To satisfy the nullness checker by declaring an initialized this reference used in the this.lock = new Object();
// videoGraphFactory.create method this.renderFramesAutomatically = renderFramesAutomatically;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.maxFramesInEncoder = maxFramesInEncoder;
@SuppressWarnings("nullness:assignment") @SuppressWarnings("nullness:assignment")
@Initialized @Initialized
VideoGraphWrapper thisRef = this; VideoGraphWrapper thisRef = this;
this.maxFramesInEncoder = maxFramesInEncoder;
// Automatically render frames if the sample exporter does not limit the number of frames in
// the encoder.
renderFramesAutomatically = maxFramesInEncoder < 1;
lock = new Object();
videoGraph = videoGraph =
videoGraphFactory.create( videoGraphFactory.create(
context, context,
videoFrameProcessorOutputColor, videoFrameProcessorOutputColor,
debugViewProvider, debugViewProvider,
/* listener= */ thisRef, /* listener= */ thisRef,
/* listenerExecutor= */ MoreExecutors.directExecutor(), /* listenerExecutor= */ directExecutor(),
videoCompositorSettings, videoCompositorSettings,
compositionEffects, compositionEffects,
initialTimestampOffsetUs, initialTimestampOffsetUs,
renderFramesAutomatically); renderFramesAutomatically);
} }
public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize();
}
public boolean hasProducedFrameWithTimestampZero() {
return videoGraph.hasProducedFrameWithTimestampZero();
}
/**
* Returns a {@link GraphInput} object to which the {@code VideoGraph} inputs are queued.
*
* <p>This method must be called after successfully {@linkplain #initialize() initializing} the
* {@code VideoGraph}.
*
* <p>This method must called exactly once for every input stream.
*
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
*
* @param inputIndex The index of the input, which could be used to order the inputs.
*/
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
// Applies the composition effects here if there's only one input. In multiple-input case, the
// effects are applied as a part of the video graph.
return new VideoGraphInput(videoGraph, inputIndex, initialTimestampOffsetUs);
}
public void release() {
videoGraph.release();
}
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
@Nullable SurfaceInfo surfaceInfo = null; @Nullable SurfaceInfo surfaceInfo = null;
@ -515,7 +559,7 @@ import org.checkerframework.dataflow.qual.Pure;
} catch (ExportException e) { } catch (ExportException e) {
errorConsumer.accept(e); errorConsumer.accept(e);
} }
setOutputSurfaceInfo(surfaceInfo); videoGraph.setOutputSurfaceInfo(surfaceInfo);
} }
@Override @Override
@ -543,47 +587,6 @@ import org.checkerframework.dataflow.qual.Pure;
errorConsumer.accept(ExportException.createForVideoFrameProcessingException(e)); errorConsumer.accept(ExportException.createForVideoFrameProcessingException(e));
} }
@Override
public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize();
}
@Override
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
}
@Override
public VideoFrameProcessor getProcessor(int inputIndex) {
return videoGraph.getProcessor(inputIndex);
}
@Override
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
return videoGraph.createInput(inputIndex);
}
@Override
public void renderOutputFrameWithMediaPresentationTime() {
videoGraph.renderOutputFrameWithMediaPresentationTime();
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
videoGraph.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public boolean hasProducedFrameWithTimestampZero() {
return videoGraph.hasProducedFrameWithTimestampZero();
}
@Override
public void release() {
videoGraph.release();
}
public boolean hasEncoderReleasedAllBuffersAfterEndOfStream() { public boolean hasEncoderReleasedAllBuffersAfterEndOfStream() {
if (renderFramesAutomatically) { if (renderFramesAutomatically) {
// Video graph wrapper does not track encoder buffers. // Video graph wrapper does not track encoder buffers.
@ -616,8 +619,131 @@ import org.checkerframework.dataflow.qual.Pure;
} }
} }
if (shouldRender) { if (shouldRender) {
renderOutputFrameWithMediaPresentationTime(); videoGraph.renderOutputFrame(RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME);
} }
} }
} }
/** A wrapper for {@link VideoGraph} input that handles {@link GraphInput} events. */
private static final class VideoGraphInput implements GraphInput {
private final VideoGraph videoGraph;
private final int inputIndex;
private final long initialTimestampOffsetUs;
private final AtomicLong mediaItemOffsetUs;
public VideoGraphInput(VideoGraph videoGraph, int inputIndex, long initialTimestampOffsetUs) {
this.videoGraph = videoGraph;
this.inputIndex = inputIndex;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
mediaItemOffsetUs = new AtomicLong();
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem,
long durationUs,
@Nullable Format decodedFormat,
boolean isLast) {
boolean isSurfaceAssetLoaderMediaItem = isMediaItemForSurfaceAssetLoader(editedMediaItem);
durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs);
if (decodedFormat != null) {
decodedFormat = applyDecoderRotation(decodedFormat);
videoGraph.registerInputStream(
inputIndex,
isSurfaceAssetLoaderMediaItem
? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION
: getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)),
decodedFormat,
editedMediaItem.effects.videoEffects,
/* offsetToAddUs= */ initialTimestampOffsetUs + mediaItemOffsetUs.get());
}
mediaItemOffsetUs.addAndGet(durationUs);
}
@Override
public @InputResult int queueInputBitmap(
Bitmap inputBitmap, TimestampIterator timestampIterator) {
return videoGraph.queueInputBitmap(inputIndex, inputBitmap, timestampIterator)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoGraph.setOnInputFrameProcessedListener(inputIndex, listener);
}
@Override
public void setOnInputSurfaceReadyListener(Runnable runnable) {
videoGraph.setOnInputSurfaceReadyListener(inputIndex, runnable);
}
@Override
public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
return videoGraph.queueInputTexture(inputIndex, texId, presentationTimeUs)
? INPUT_RESULT_SUCCESS
: INPUT_RESULT_TRY_AGAIN_LATER;
}
@Override
public Surface getInputSurface() {
return videoGraph.getInputSurface(inputIndex);
}
@Override
public int getPendingVideoFrameCount() {
return videoGraph.getPendingInputFrameCount(inputIndex);
}
@Override
public boolean registerVideoFrame(long presentationTimeUs) {
return videoGraph.registerInputFrame(inputIndex);
}
@Override
public void signalEndOfVideoInput() {
videoGraph.signalEndOfInput(inputIndex);
}
private static Format applyDecoderRotation(Format format) {
// The decoder rotates encoded frames for display by format.rotationDegrees.
if (format.rotationDegrees % 180 == 0) {
return format;
}
return format
.buildUpon()
.setWidth(format.height)
.setHeight(format.width)
.setRotationDegrees(0)
.build();
}
private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(
String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
private static boolean isMediaItemForSurfaceAssetLoader(EditedMediaItem editedMediaItem) {
@Nullable
MediaItem.LocalConfiguration localConfiguration =
editedMediaItem.mediaItem.localConfiguration;
if (localConfiguration == null) {
return false;
}
@Nullable String scheme = localConfiguration.uri.getScheme();
if (scheme == null) {
return false;
}
return scheme.equals(SurfaceAssetLoader.MEDIA_ITEM_URI_SCHEME);
}
}
} }