PiperOrigin-RevId: 565069442
This commit is contained in:
claincly 2023-09-13 09:11:59 -07:00 committed by Copybara-Service
parent 56fbf7377e
commit 5106f2f45a
8 changed files with 585 additions and 42 deletions

View File

@ -55,7 +55,9 @@ public final class DebugTraceUtil {
EVENT_VFP_QUEUE_BITMAP, EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE, EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_OUTPUT_TEXTURE_RENDERED,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED,
EVENT_ENCODER_ENCODED_FRAME, EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO, EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO, EVENT_MUXER_WRITE_SAMPLE_VIDEO,
@ -84,7 +86,10 @@ public final class DebugTraceUtil {
public static final String EVENT_VFP_QUEUE_BITMAP = "VFP-QueueBitmap"; public static final String EVENT_VFP_QUEUE_BITMAP = "VFP-QueueBitmap";
public static final String EVENT_VFP_QUEUE_TEXTURE = "VFP-QueueTexture"; public static final String EVENT_VFP_QUEUE_TEXTURE = "VFP-QueueTexture";
public static final String EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE = "VFP-RenderedToOutputSurface"; public static final String EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE = "VFP-RenderedToOutputSurface";
public static final String EVENT_VFP_OUTPUT_TEXTURE_RENDERED = "VFP-OutputTextureRendered";
public static final String EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM = "VFP-FinishOneInputStream"; public static final String EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM = "VFP-FinishOneInputStream";
public static final String EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED =
"COMP-OutputTextureRendered";
public static final String EVENT_ENCODER_ENCODED_FRAME = "Encoder-EncodedFrame"; public static final String EVENT_ENCODER_ENCODED_FRAME = "Encoder-EncodedFrame";
public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO = "Muxer-CanWriteSample_Video"; public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO = "Muxer-CanWriteSample_Video";
public static final String EVENT_MUXER_WRITE_SAMPLE_VIDEO = "Muxer-WriteSample_Video"; public static final String EVENT_MUXER_WRITE_SAMPLE_VIDEO = "Muxer-WriteSample_Video";
@ -115,7 +120,9 @@ public final class DebugTraceUtil {
EVENT_VFP_QUEUE_BITMAP, EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE, EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_OUTPUT_TEXTURE_RENDERED,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED,
EVENT_ENCODER_ENCODED_FRAME, EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO, EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO, EVENT_MUXER_WRITE_SAMPLE_VIDEO,

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
@ -87,8 +86,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */ /** A builder for {@link DefaultVideoFrameProcessor.Factory} instances. */
public static final class Builder { public static final class Builder {
private boolean enableColorTransfers; private boolean enableColorTransfers;
@Nullable private ExecutorService executorService;
private @MonotonicNonNull GlObjectsProvider glObjectsProvider; private @MonotonicNonNull GlObjectsProvider glObjectsProvider;
private @MonotonicNonNull ExecutorService executorService;
private GlTextureProducer.@MonotonicNonNull Listener textureOutputListener; private GlTextureProducer.@MonotonicNonNull Listener textureOutputListener;
private int textureOutputCapacity; private int textureOutputCapacity;
@ -97,6 +96,14 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
enableColorTransfers = true; enableColorTransfers = true;
} }
private Builder(Factory factory) {
enableColorTransfers = factory.enableColorTransfers;
executorService = factory.executorService;
glObjectsProvider = factory.glObjectsProvider;
textureOutputListener = factory.textureOutputListener;
textureOutputCapacity = factory.textureOutputCapacity;
}
/** /**
* Sets whether to transfer colors to an intermediate color space when applying effects. * Sets whether to transfer colors to an intermediate color space when applying effects.
* *
@ -122,18 +129,18 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** /**
* Sets the {@link Util#newSingleThreadScheduledExecutor} to execute GL commands from. * Sets the {@link Util#newSingleThreadScheduledExecutor} to execute GL commands from.
* *
* <p>If set, the {@link ExecutorService} must be {@linkplain ExecutorService#shutdown shut * <p>If set to a non-null value, the {@link ExecutorService} must be {@linkplain
* down} by the caller after all {@linkplain VideoFrameProcessor VideoFrameProcessors} using * ExecutorService#shutdown shut down} by the caller after all {@linkplain VideoFrameProcessor
* it have been {@linkplain #release released}. * VideoFrameProcessors} using it have been {@linkplain #release released}.
* *
* <p>The default value is a new {@link Util#newSingleThreadScheduledExecutor}, owned and * <p>The default value is a new {@link Util#newSingleThreadScheduledExecutor}, owned and
* {@link ExecutorService#shutdown} by the created {@link DefaultVideoFrameProcessor}. * {@link ExecutorService#shutdown} by the created {@link DefaultVideoFrameProcessor}. Setting
* a {@code null} {@link ExecutorService} is equivalent to using the default value.
* *
* @param executorService The {@link ExecutorService}. * @param executorService The {@link ExecutorService}.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
@VisibleForTesting(otherwise = PACKAGE_PRIVATE) public Builder setExecutorService(@Nullable ExecutorService executorService) {
public Builder setExecutorService(ExecutorService executorService) {
this.executorService = executorService; this.executorService = executorService;
return this; return this;
} }
@ -156,7 +163,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* @param textureOutputCapacity The amount of output textures that may be allocated at a time * @param textureOutputCapacity The amount of output textures that may be allocated at a time
* before texture output blocks. Must be greater than or equal to 1. * before texture output blocks. Must be greater than or equal to 1.
*/ */
@VisibleForTesting
@CanIgnoreReturnValue @CanIgnoreReturnValue
public Builder setTextureOutput( public Builder setTextureOutput(
GlTextureProducer.Listener textureOutputListener, GlTextureProducer.Listener textureOutputListener,
@ -197,6 +203,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
this.textureOutputCapacity = textureOutputCapacity; this.textureOutputCapacity = textureOutputCapacity;
} }
public Builder buildUpon() {
return new Builder(this);
}
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -266,6 +276,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
boolean shouldShutdownExecutorService = executorService == null; boolean shouldShutdownExecutorService = executorService == null;
ExecutorService instanceExecutorService = ExecutorService instanceExecutorService =
executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService; executorService == null ? Util.newSingleThreadExecutor(THREAD_NAME) : executorService;
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor = VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
instanceExecutorService, shouldShutdownExecutorService, listener::onError); instanceExecutorService, shouldShutdownExecutorService, listener::onError);
@ -427,6 +438,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
if (!inputStreamRegisteredCondition.isOpen()) { if (!inputStreamRegisteredCondition.isOpen()) {
return false; return false;
} }
inputSwitcher.activeTextureManager().queueInputTexture(textureId, presentationTimeUs); inputSwitcher.activeTextureManager().queueInputTexture(textureId, presentationTimeUs);
return true; return true;
} }

View File

@ -0,0 +1,482 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static androidx.media3.common.util.Util.newSingleThreadScheduledExecutor;
import static androidx.media3.effect.DebugTraceUtil.EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_OUTPUT_TEXTURE_RENDERED;
import static androidx.media3.effect.DebugTraceUtil.logEvent;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.content.Context;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.SparseArray;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.GlUtil;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoCompositor;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.GlTextureProducer;
import androidx.media3.effect.VideoCompositor;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** A {@link VideoGraph} that handles multiple input streams. */
/* package */ final class MultipleInputVideoGraph implements VideoGraph {
public static final class Factory implements VideoGraph.Factory {
@Override
public MultipleInputVideoGraph create(
Context context,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return new MultipleInputVideoGraph(
context,
inputColorInfo,
outputColorInfo,
errorConsumer,
debugViewProvider,
listener,
listenerExecutor,
compositionEffects,
initialTimestampOffsetUs);
}
}
private static final String SHARED_EXECUTOR_NAME = "Transformer:MultipleInputVideoGraph:Thread";
private static final long RELEASE_WAIT_TIME_MS = 1_000;
private static final int PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY = 2;
private static final int COMPOSITOR_TEXTURE_OUTPUT_CAPACITY = 1;
private final Context context;
private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo;
private final Consumer<ExportException> errorConsumer;
private final GlObjectsProvider glObjectsProvider;
private final DebugViewProvider debugViewProvider;
private final Listener listener;
private final Executor listenerExecutor;
private final List<Effect> compositionEffects;
private final List<VideoFrameProcessingWrapper> preProcessingWrappers;
private final ExecutorService sharedExecutorService;
private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Queue<CompositorOutputTextureInfo> compositorOutputTextures;
private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases;
private final long initialTimestampOffsetUs;
@Nullable private VideoFrameProcessor compositionVideoFrameProcessor;
@Nullable private VideoCompositor videoCompositor;
private boolean compositionVideoFrameProcessorInputStreamRegistered;
private boolean compositionVideoFrameProcessorInputStreamRegistrationCompleted;
private boolean compositorEnded;
private boolean released;
private long lastRenderedPresentationTimeUs;
private volatile boolean hasProducedFrameWithTimestampZero;
// TODO - b/289986435: Remove errorConsumer and use Listener.onError().
private MultipleInputVideoGraph(
Context context,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
Consumer<ExportException> errorConsumer,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
this.context = context;
this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo;
this.errorConsumer = errorConsumer;
this.debugViewProvider = debugViewProvider;
this.listener = listener;
this.listenerExecutor = listenerExecutor;
this.compositionEffects = new ArrayList<>(compositionEffects);
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
lastRenderedPresentationTimeUs = C.TIME_UNSET;
preProcessingWrappers = new ArrayList<>();
sharedExecutorService = newSingleThreadScheduledExecutor(SHARED_EXECUTOR_NAME);
glObjectsProvider = new SingleContextGlObjectsProvider();
// TODO - b/289986435: Support injecting VideoFrameProcessor.Factory.
videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(glObjectsProvider)
.setExecutorService(sharedExecutorService)
.build();
compositorOutputTextures = new ArrayDeque<>();
compositorOutputTextureReleases = new SparseArray<>();
}
/**
* {@inheritDoc}
*
* <p>This method must be called at most once.
*/
@Override
public void initialize() throws VideoFrameProcessingException {
checkState(
preProcessingWrappers.isEmpty()
&& videoCompositor == null
&& compositionVideoFrameProcessor == null
&& !released);
// Setting up the compositionVideoFrameProcessor
compositionVideoFrameProcessor =
videoFrameProcessorFactory.create(
context,
debugViewProvider,
// Pre-processing VideoFrameProcessors have converted the inputColor to outputColor
// already.
/* inputColorInfo= */ outputColorInfo,
outputColorInfo,
/* renderFramesAutomatically= */ true,
/* listenerExecutor= */ MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
List<Effect> effects,
FrameInfo frameInfo) {
compositionVideoFrameProcessorInputStreamRegistrationCompleted = true;
queueCompositionOutputInternal();
}
@Override
public void onOutputSizeChanged(int width, int height) {
checkNotNull(compositionVideoFrameProcessor)
.setOutputSurfaceInfo(listener.onOutputSizeChanged(width, height));
}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
if (presentationTimeUs == 0) {
hasProducedFrameWithTimestampZero = true;
}
lastRenderedPresentationTimeUs = presentationTimeUs;
}
@Override
public void onError(VideoFrameProcessingException exception) {
handleException(exception);
}
@Override
public void onEnded() {
listenerExecutor.execute(() -> listener.onEnded(lastRenderedPresentationTimeUs));
}
});
// Release the compositor's output texture.
compositionVideoFrameProcessor.setOnInputFrameProcessedListener(
(textureId, syncObject) -> {
checkState(contains(compositorOutputTextureReleases, textureId));
compositorOutputTextureReleases.get(textureId).release();
compositorOutputTextureReleases.remove(textureId);
queueCompositionOutputInternal();
});
// Setting up the compositor.
videoCompositor =
new DefaultVideoCompositor(
context,
glObjectsProvider,
new DefaultVideoCompositor.Settings(),
sharedExecutorService,
new VideoCompositor.Listener() {
@Override
public void onError(VideoFrameProcessingException exception) {
handleException(exception);
}
@Override
public void onEnded() {
compositorEnded = true;
if (compositorOutputTextures.isEmpty()) {
compositionVideoFrameProcessor.signalEndOfInput();
} else {
queueCompositionOutputInternal();
}
}
},
/* textureOutputListener= */ this::processCompositorOutputTexture,
COMPOSITOR_TEXTURE_OUTPUT_CAPACITY);
}
@Override
public GraphInput createInput() throws VideoFrameProcessingException {
checkStateNotNull(videoCompositor);
int videoCompositorInputId = videoCompositor.registerInputSource();
// Creating a new VideoFrameProcessor for the input.
VideoFrameProcessingWrapper preProcessingVideoFrameProcessorWrapper =
new VideoFrameProcessingWrapper(
context,
videoFrameProcessorFactory
.buildUpon()
.setTextureOutput(
// Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) -> {
logEvent(EVENT_VFP_OUTPUT_TEXTURE_RENDERED, presentationTimeUs);
checkNotNull(videoCompositor)
.queueInputTexture(
videoCompositorInputId,
textureProducer,
texture,
// Color is converted to outputColor in pre processing.
/* colorInfo= */ outputColorInfo,
presentationTimeUs);
},
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build(),
inputColorInfo,
outputColorInfo,
DebugViewProvider.NONE,
listenerExecutor,
new VideoFrameProcessor.Listener() {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
List<Effect> effects,
FrameInfo frameInfo) {
// Do nothing.
}
@Override
public void onOutputSizeChanged(int width, int height) {}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {}
@Override
public void onError(VideoFrameProcessingException ex) {
errorConsumer.accept(ExportException.createForVideoFrameProcessingException(ex));
}
@Override
public void onEnded() {
checkNotNull(videoCompositor).signalEndOfInputSource(videoCompositorInputId);
}
},
/* renderFramesAutomatically= */ true,
/* presentation= */ null,
initialTimestampOffsetUs);
preProcessingWrappers.add(preProcessingVideoFrameProcessorWrapper);
return preProcessingVideoFrameProcessorWrapper;
}
@Override
public boolean hasProducedFrameWithTimestampZero() {
return hasProducedFrameWithTimestampZero;
}
@Override
public void release() {
if (released) {
return;
}
// Needs to release the frame processors before their internal executor services are released.
for (int i = 0; i < preProcessingWrappers.size(); i++) {
preProcessingWrappers.get(i).release();
}
preProcessingWrappers.clear();
if (videoCompositor != null) {
videoCompositor.release();
videoCompositor = null;
}
if (compositionVideoFrameProcessor != null) {
compositionVideoFrameProcessor.release();
compositionVideoFrameProcessor = null;
}
sharedExecutorService.shutdown();
try {
sharedExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
listenerExecutor.execute(() -> listener.onError(VideoFrameProcessingException.from(e)));
}
released = true;
}
private void handleException(Exception e) {
errorConsumer.accept(
ExportException.createForVideoFrameProcessingException(
e instanceof VideoFrameProcessingException
? (VideoFrameProcessingException) e
: VideoFrameProcessingException.from(e)));
}
private void processCompositorOutputTexture(
GlTextureProducer textureProducer,
GlTextureInfo outputTexture,
long presentationTimeUs,
long syncObject) {
checkStateNotNull(compositionVideoFrameProcessor);
checkState(!compositorEnded);
logEvent(EVENT_COMPOSITOR_OUTPUT_TEXTURE_RENDERED, presentationTimeUs);
compositorOutputTextures.add(
new CompositorOutputTextureInfo(outputTexture, presentationTimeUs));
compositorOutputTextureReleases.put(
outputTexture.texId,
new CompositorOutputTextureRelease(textureProducer, presentationTimeUs));
if (!compositionVideoFrameProcessorInputStreamRegistered) {
checkNotNull(compositionVideoFrameProcessor)
.registerInputStream(
INPUT_TYPE_TEXTURE_ID,
compositionEffects,
new FrameInfo.Builder(outputTexture.width, outputTexture.height).build());
compositionVideoFrameProcessorInputStreamRegistered = true;
// Return as the VideoFrameProcessor rejects input textures until the input is registered.
return;
}
queueCompositionOutputInternal();
}
private void queueCompositionOutputInternal() {
checkStateNotNull(compositionVideoFrameProcessor);
if (!compositionVideoFrameProcessorInputStreamRegistrationCompleted) {
return;
}
@Nullable CompositorOutputTextureInfo outputTexture = compositorOutputTextures.peek();
if (outputTexture == null) {
return;
}
checkState(
checkNotNull(compositionVideoFrameProcessor)
.queueInputTexture(
outputTexture.glTextureInfo.texId, outputTexture.presentationTimeUs));
compositorOutputTextures.remove();
if (compositorEnded && compositorOutputTextures.isEmpty()) {
checkNotNull(compositionVideoFrameProcessor).signalEndOfInput();
}
}
private static final class CompositorOutputTextureInfo {
public final GlTextureInfo glTextureInfo;
public final long presentationTimeUs;
private CompositorOutputTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
this.glTextureInfo = glTextureInfo;
this.presentationTimeUs = presentationTimeUs;
}
}
private static final class CompositorOutputTextureRelease {
private final GlTextureProducer textureProducer;
private final long presentationTimeUs;
public CompositorOutputTextureRelease(
GlTextureProducer textureProducer, long presentationTimeUs) {
this.textureProducer = textureProducer;
this.presentationTimeUs = presentationTimeUs;
}
public void release() {
textureProducer.releaseOutputTexture(presentationTimeUs);
}
}
/**
* A {@link GlObjectsProvider} that creates a new {@link EGLContext} in {@link #createEglContext}
* with the same shared EGLContext.
*/
private static final class SingleContextGlObjectsProvider implements GlObjectsProvider {
private final GlObjectsProvider glObjectsProvider;
private @MonotonicNonNull EGLContext singleEglContext;
public SingleContextGlObjectsProvider() {
this.glObjectsProvider = new DefaultGlObjectsProvider();
}
@Override
public EGLContext createEglContext(
EGLDisplay eglDisplay, int openGlVersion, int[] configAttributes)
throws GlUtil.GlException {
if (singleEglContext == null) {
singleEglContext =
glObjectsProvider.createEglContext(eglDisplay, openGlVersion, configAttributes);
}
return singleEglContext;
}
@Override
public EGLSurface createEglSurface(
EGLDisplay eglDisplay,
Object surface,
@C.ColorTransfer int colorTransfer,
boolean isEncoderInputSurface)
throws GlUtil.GlException {
return glObjectsProvider.createEglSurface(
eglDisplay, surface, colorTransfer, isEncoderInputSurface);
}
@Override
public EGLSurface createFocusedPlaceholderEglSurface(
EGLContext eglContext, EGLDisplay eglDisplay) throws GlUtil.GlException {
return glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
}
@Override
public GlTextureInfo createBuffersForTexture(int texId, int width, int height)
throws GlUtil.GlException {
return glObjectsProvider.createBuffersForTexture(texId, width, height);
}
}
}

View File

@ -125,7 +125,17 @@ import java.util.concurrent.Executor;
* <p>This method must be called at most once. * <p>This method must be called at most once.
*/ */
@Override @Override
public void initialize() throws VideoFrameProcessingException { public void initialize() {
// Initialization is deferred to createInput().
}
/**
* {@inheritDoc}
*
* <p>This method must only be called once.
*/
@Override
public GraphInput createInput() throws VideoFrameProcessingException {
checkStateNotNull(videoFrameProcessingWrapper == null && !released); checkStateNotNull(videoFrameProcessingWrapper == null && !released);
videoFrameProcessingWrapper = videoFrameProcessingWrapper =
@ -175,12 +185,7 @@ import java.util.concurrent.Executor;
renderFramesAutomatically, renderFramesAutomatically,
presentation, presentation,
initialTimestampOffsetUs); initialTimestampOffsetUs);
} return videoFrameProcessingWrapper;
/** Returns the {@link GraphInput}. */
@Override
public GraphInput getInput() {
return checkNotNull(videoFrameProcessingWrapper);
} }
@Override @Override

View File

@ -782,12 +782,10 @@ public final class Transformer {
* <p>This method is under development. A {@link Composition} must meet the following conditions: * <p>This method is under development. A {@link Composition} must meet the following conditions:
* *
* <ul> * <ul>
* <li>The composition must have at most one {@linkplain EditedMediaItemSequence sequence} with
* video/image data. There are no restrictions on the number of audio sequences.
* <li>The {@linkplain Composition#effects composition effects} must contain no {@linkplain * <li>The {@linkplain Composition#effects composition effects} must contain no {@linkplain
* Effects#audioProcessors audio effects}. * Effects#audioProcessors audio effects}.
* <li>The composition effects must either contain no {@linkplain Effects#videoEffects video * <li>The video composition {@link Presentation} effect is applied after input streams are
* effects}, or exactly one {@link Presentation}. * composited. Other composition effects are ignored.
* </ul> * </ul>
* *
* <p>{@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the * <p>{@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the
@ -972,11 +970,6 @@ public final class Transformer {
private void startInternal( private void startInternal(
Composition composition, MuxerWrapper muxerWrapper, ComponentListener componentListener) { Composition composition, MuxerWrapper muxerWrapper, ComponentListener componentListener) {
checkArgument(composition.effects.audioProcessors.isEmpty()); checkArgument(composition.effects.audioProcessors.isEmpty());
// Only supports Presentation in video effects.
ImmutableList<Effect> videoEffects = composition.effects.videoEffects;
checkArgument(
videoEffects.isEmpty()
|| (videoEffects.size() == 1 && videoEffects.get(0) instanceof Presentation));
verifyApplicationThread(); verifyApplicationThread();
checkState(transformerInternal == null, "There is already an export in progress."); checkState(transformerInternal == null, "There is already an export in progress.");
HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null); HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null);

View File

@ -16,6 +16,8 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.C.TRACK_TYPE_AUDIO;
import static androidx.media3.common.C.TRACK_TYPE_VIDEO;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Util.contains; import static androidx.media3.common.util.Util.contains;
@ -558,7 +560,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
assetLoaderInputTracker.getAssetLoaderInputFormat(sequenceIndex, trackType); assetLoaderInputTracker.getAssetLoaderInputFormat(sequenceIndex, trackType);
if (MimeTypes.isAudio(assetLoaderOutputFormat.sampleMimeType)) { if (MimeTypes.isAudio(assetLoaderOutputFormat.sampleMimeType)) {
assetLoaderInputTracker.registerSampleExporter( assetLoaderInputTracker.registerSampleExporter(
C.TRACK_TYPE_AUDIO, TRACK_TYPE_AUDIO,
new AudioSampleExporter( new AudioSampleExporter(
firstAssetLoaderInputFormat, firstAssetLoaderInputFormat,
/* firstInputFormat= */ assetLoaderOutputFormat, /* firstInputFormat= */ assetLoaderOutputFormat,
@ -568,9 +570,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,
fallbackListener)); fallbackListener));
} else { } else {
ImmutableList<Effect> compositionVideoEffects = composition.effects.videoEffects;
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP. // TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
assetLoaderInputTracker.registerSampleExporter( assetLoaderInputTracker.registerSampleExporter(
C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_VIDEO,
@ -578,14 +578,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
context, context,
firstAssetLoaderInputFormat, firstAssetLoaderInputFormat,
transformationRequest, transformationRequest,
compositionVideoEffects, composition.effects.videoEffects,
videoFrameProcessorFactory, videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,
/* errorConsumer= */ this::onError, /* errorConsumer= */ this::onError,
fallbackListener, fallbackListener,
debugViewProvider, debugViewProvider,
videoSampleTimestampOffsetUs)); videoSampleTimestampOffsetUs,
/* hasMultipleInputs= */ assetLoaderInputTracker
.hasMultipleConcurrentVideoTracks()));
} }
} }
@ -659,7 +661,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean shouldTranscode = false; boolean shouldTranscode = false;
if (!assetLoaderCanOutputEncoded) { if (!assetLoaderCanOutputEncoded) {
shouldTranscode = true; shouldTranscode = true;
} else if (trackType == C.TRACK_TYPE_AUDIO) { } else if (trackType == TRACK_TYPE_AUDIO) {
shouldTranscode = shouldTranscodeAudio(inputFormat); shouldTranscode = shouldTranscodeAudio(inputFormat);
} else if (trackType == C.TRACK_TYPE_VIDEO) { } else if (trackType == C.TRACK_TYPE_VIDEO) {
shouldTranscode = shouldTranscodeVideo(inputFormat); shouldTranscode = shouldTranscodeVideo(inputFormat);
@ -891,7 +893,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
for (int i = 0; i < sequencesMetadata.size(); i++) { for (int i = 0; i < sequencesMetadata.size(); i++) {
SparseArray<Format> trackTypeToFirstAssetLoaderInputFormat = SparseArray<Format> trackTypeToFirstAssetLoaderInputFormat =
sequencesMetadata.get(i).trackTypeToFirstAssetLoaderInputFormat; sequencesMetadata.get(i).trackTypeToFirstAssetLoaderInputFormat;
if (contains(trackTypeToFirstAssetLoaderInputFormat, C.TRACK_TYPE_AUDIO)) { if (contains(trackTypeToFirstAssetLoaderInputFormat, TRACK_TYPE_AUDIO)) {
outputHasAudio = true; outputHasAudio = true;
} }
if (contains(trackTypeToFirstAssetLoaderInputFormat, C.TRACK_TYPE_VIDEO)) { if (contains(trackTypeToFirstAssetLoaderInputFormat, C.TRACK_TYPE_VIDEO)) {
@ -901,6 +903,25 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return (outputHasAudio ? 1 : 0) + (outputHasVideo ? 1 : 0); return (outputHasAudio ? 1 : 0) + (outputHasVideo ? 1 : 0);
} }
/**
* Returns whether more than one {@link EditedMediaItemSequence EditedMediaItemSequences} have
* video tracks.
*/
public boolean hasMultipleConcurrentVideoTracks() {
if (sequencesMetadata.size() < 2) {
return false;
}
int numberOfVideoTracks = 0;
for (int i = 0; i < sequencesMetadata.size(); i++) {
if (contains(
sequencesMetadata.get(i).trackTypeToFirstAssetLoaderInputFormat, TRACK_TYPE_VIDEO)) {
numberOfVideoTracks++;
}
}
return numberOfVideoTracks > 1;
}
/** Registers a {@link SampleExporter} for the given {@link C.TrackType trackType}. */ /** Registers a {@link SampleExporter} for the given {@link C.TrackType trackType}. */
public void registerSampleExporter(int trackType, SampleExporter sampleExporter) { public void registerSampleExporter(int trackType, SampleExporter sampleExporter) {
checkState( checkState(

View File

@ -68,14 +68,23 @@ import java.util.concurrent.Executor;
* *
* @param width The new output width in pixels. * @param width The new output width in pixels.
* @param height The new output width in pixels. * @param height The new output width in pixels.
* @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code * @return A {@link SurfaceInfo} to which the {@link VideoGraph} renders to, or {@code null} if
* null} if the output is not needed. * the output is not needed.
*/ */
// TODO - b/289985577: Consider returning void from this method.
@Nullable @Nullable
SurfaceInfo onOutputSizeChanged(int width, int height); SurfaceInfo onOutputSizeChanged(int width, int height);
/** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */ /** Called after the {@link VideoGraph} has rendered its final output frame. */
void onEnded(long finalFramePresentationTimeUs); void onEnded(long finalFramePresentationTimeUs);
/**
* Called when an exception occurs during video frame processing.
*
* <p>If this is called, the calling {@link VideoGraph} must immediately be {@linkplain
* #release() released}.
*/
void onError(VideoFrameProcessingException exception);
} }
/** /**
@ -93,15 +102,21 @@ import java.util.concurrent.Executor;
* <p>This method must be called after successfully {@linkplain #initialize() initializing} the * <p>This method must be called after successfully {@linkplain #initialize() initializing} the
* {@code VideoGraph}. * {@code VideoGraph}.
* *
* <p>This method must called exactly once for every input stream.
*
* <p>If the method throws any {@link Exception}, the caller must call {@link #release}. * <p>If the method throws any {@link Exception}, the caller must call {@link #release}.
*/ */
GraphInput getInput() throws VideoFrameProcessingException; GraphInput createInput() throws VideoFrameProcessingException;
/** /**
* Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp. * Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp.
*/ */
boolean hasProducedFrameWithTimestampZero(); boolean hasProducedFrameWithTimestampZero();
/** Releases the associated resources. */ /**
* Releases the associated resources.
*
* <p>This {@code VideoGraph} instance must not be used after this method is called.
*/
void release(); void release();
} }

View File

@ -84,7 +84,8 @@ import org.checkerframework.dataflow.qual.Pure;
Consumer<ExportException> errorConsumer, Consumer<ExportException> errorConsumer,
FallbackListener fallbackListener, FallbackListener fallbackListener,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
long initialTimestampOffsetUs) long initialTimestampOffsetUs,
boolean hasMultipleInputs)
throws ExportException { throws ExportException {
// TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder // TODO(b/278259383) Consider delaying configuration of VideoSampleExporter to use the decoder
// output format instead of the extractor output format, to match AudioSampleExporter behavior. // output format instead of the extractor output format, to match AudioSampleExporter behavior.
@ -143,7 +144,9 @@ import org.checkerframework.dataflow.qual.Pure;
videoGraph = videoGraph =
new VideoGraphWrapper( new VideoGraphWrapper(
context, context,
new SingleInputVideoGraph.Factory(videoFrameProcessorFactory), hasMultipleInputs
? new MultipleInputVideoGraph.Factory()
: new SingleInputVideoGraph.Factory(videoFrameProcessorFactory),
videoGraphInputColor, videoGraphInputColor,
videoGraphOutputColor, videoGraphOutputColor,
errorConsumer, errorConsumer,
@ -159,7 +162,7 @@ import org.checkerframework.dataflow.qual.Pure;
public GraphInput getInput(EditedMediaItem editedMediaItem, Format format) public GraphInput getInput(EditedMediaItem editedMediaItem, Format format)
throws ExportException { throws ExportException {
try { try {
return videoGraph.getInput(); return videoGraph.createInput();
} catch (VideoFrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw ExportException.createForVideoFrameProcessingException(e); throw ExportException.createForVideoFrameProcessingException(e);
} }
@ -517,14 +520,19 @@ import org.checkerframework.dataflow.qual.Pure;
} }
} }
@Override
public void onError(VideoFrameProcessingException e) {
errorConsumer.accept(ExportException.createForVideoFrameProcessingException(e));
}
@Override @Override
public void initialize() throws VideoFrameProcessingException { public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize(); videoGraph.initialize();
} }
@Override @Override
public GraphInput getInput() throws VideoFrameProcessingException { public GraphInput createInput() throws VideoFrameProcessingException {
return videoGraph.getInput(); return videoGraph.createInput();
} }
@Override @Override