Avoid spinning in between intermediate texture processors.

This change adds a new method onReadyToAcceptInputFrame to
GlTextureProcesssor.InputListener and changes maybeQueueInputFrame
to queueInputFrame, removing the boolean return value.
This avoids the re-trying in ChainingGlTextureProcessorListener
by allowing it to only feed frames from the producing to the consuming
GlTextureProcessor when there is capacity.

MediaPipeProcessor still needs re-trying when processing isn't 1:1.

PiperOrigin-RevId: 466626369
This commit is contained in:
Googler 2022-08-10 09:56:24 +00:00 committed by Marc Baechinger
parent 05fcda510a
commit 97e6a86d2b
10 changed files with 271 additions and 117 deletions

View File

@ -279,7 +279,12 @@ public final class TransformerActivity extends AppCompatActivity {
Class<?> clazz = Class.forName("androidx.media3.demo.transformer.MediaPipeProcessor"); Class<?> clazz = Class.forName("androidx.media3.demo.transformer.MediaPipeProcessor");
Constructor<?> constructor = Constructor<?> constructor =
clazz.getConstructor( clazz.getConstructor(
Context.class, boolean.class, String.class, String.class, String.class); Context.class,
boolean.class,
String.class,
boolean.class,
String.class,
String.class);
effects.add( effects.add(
(GlEffect) (GlEffect)
(Context context, boolean useHdr) -> { (Context context, boolean useHdr) -> {
@ -289,6 +294,7 @@ public final class TransformerActivity extends AppCompatActivity {
context, context,
useHdr, useHdr,
/* graphName= */ "edge_detector_mediapipe_graph.binarypb", /* graphName= */ "edge_detector_mediapipe_graph.binarypb",
/* isSingleFrameGraph= */ true,
/* inputStreamName= */ "input_video", /* inputStreamName= */ "input_video",
/* outputStreamName= */ "output_video"); /* outputStreamName= */ "output_video");
} catch (Exception e) { } catch (Exception e) {

View File

@ -18,23 +18,35 @@ package androidx.media3.demo.transformer;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.content.Context; import android.content.Context;
import android.opengl.EGL14; import android.opengl.EGL14;
import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.util.LibraryLoader; import androidx.media3.common.util.LibraryLoader;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlTextureProcessor; import androidx.media3.effect.GlTextureProcessor;
import androidx.media3.effect.TextureInfo; import androidx.media3.effect.TextureInfo;
import com.google.mediapipe.components.FrameProcessor; import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.framework.AppTextureFrame; import com.google.mediapipe.framework.AppTextureFrame;
import com.google.mediapipe.framework.TextureFrame; import com.google.mediapipe.framework.TextureFrame;
import com.google.mediapipe.glutil.EglManager; import com.google.mediapipe.glutil.EglManager;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
/** Runs a MediaPipe graph on input frames. */ /** Runs a MediaPipe graph on input frames. */
/* package */ final class MediaPipeProcessor implements GlTextureProcessor { /* package */ final class MediaPipeProcessor implements GlTextureProcessor {
private static final String THREAD_NAME = "Demo:MediaPipeProcessor";
private static final long RELEASE_WAIT_TIME_MS = 100;
private static final long RETRY_WAIT_TIME_MS = 1;
private static final LibraryLoader LOADER = private static final LibraryLoader LOADER =
new LibraryLoader("mediapipe_jni") { new LibraryLoader("mediapipe_jni") {
@Override @Override
@ -55,6 +67,9 @@ import java.util.concurrent.ConcurrentHashMap;
private final FrameProcessor frameProcessor; private final FrameProcessor frameProcessor;
private final ConcurrentHashMap<TextureInfo, TextureFrame> outputFrames; private final ConcurrentHashMap<TextureInfo, TextureFrame> outputFrames;
private final boolean isSingleFrameGraph;
@Nullable private final ExecutorService singleThreadExecutorService;
private final Queue<Future<?>> futures;
private InputListener inputListener; private InputListener inputListener;
private OutputListener outputListener; private OutputListener outputListener;
@ -64,10 +79,16 @@ import java.util.concurrent.ConcurrentHashMap;
/** /**
* Creates a new texture processor that wraps a MediaPipe graph. * Creates a new texture processor that wraps a MediaPipe graph.
* *
* <p>If {@code isSingleFrameGraph} is {@code false}, the {@code MediaPipeProcessor} may waste CPU
* time by continuously attempting to queue input frames to MediaPipe until they are accepted or
* waste memory if MediaPipe accepts and stores many frames internally.
*
* @param context The {@link Context}. * @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in gamma RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in gamma RGB BT.709.
* @param graphName Name of a MediaPipe graph asset to load. * @param graphName Name of a MediaPipe graph asset to load.
* @param isSingleFrameGraph Whether the MediaPipe graph will eventually produce one output frame
* each time an input frame (and no other input) has been queued.
* @param inputStreamName Name of the input video stream in the graph. * @param inputStreamName Name of the input video stream in the graph.
* @param outputStreamName Name of the input video stream in the graph. * @param outputStreamName Name of the input video stream in the graph.
*/ */
@ -75,11 +96,17 @@ import java.util.concurrent.ConcurrentHashMap;
Context context, Context context,
boolean useHdr, boolean useHdr,
String graphName, String graphName,
boolean isSingleFrameGraph,
String inputStreamName, String inputStreamName,
String outputStreamName) { String outputStreamName) {
checkState(LOADER.isAvailable()); checkState(LOADER.isAvailable());
// TODO(b/227624622): Confirm whether MediaPipeProcessor could support HDR colors. // TODO(b/227624622): Confirm whether MediaPipeProcessor could support HDR colors.
checkArgument(!useHdr, "MediaPipeProcessor does not support HDR colors."); checkArgument(!useHdr, "MediaPipeProcessor does not support HDR colors.");
this.isSingleFrameGraph = isSingleFrameGraph;
singleThreadExecutorService =
isSingleFrameGraph ? null : Util.newSingleThreadExecutor(THREAD_NAME);
futures = new ArrayDeque<>();
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {}; errorListener = (frameProcessingException) -> {};
@ -96,6 +123,9 @@ import java.util.concurrent.ConcurrentHashMap;
@Override @Override
public void setInputListener(InputListener inputListener) { public void setInputListener(InputListener inputListener) {
this.inputListener = inputListener; this.inputListener = inputListener;
if (!isSingleFrameGraph || outputFrames.isEmpty()) {
inputListener.onReadyToAcceptInputFrame();
}
} }
@Override @Override
@ -122,13 +152,32 @@ import java.util.concurrent.ConcurrentHashMap;
} }
@Override @Override
public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
acceptedFrame = false;
AppTextureFrame appTextureFrame = AppTextureFrame appTextureFrame =
new AppTextureFrame(inputTexture.texId, inputTexture.width, inputTexture.height); new AppTextureFrame(inputTexture.texId, inputTexture.width, inputTexture.height);
// TODO(b/238302213): Handle timestamps restarting from 0 when applying effects to a playlist. // TODO(b/238302213): Handle timestamps restarting from 0 when applying effects to a playlist.
// MediaPipe will fail if the timestamps are not monotonically increasing. // MediaPipe will fail if the timestamps are not monotonically increasing.
// Also make sure that a MediaPipe graph producing additional frames only starts producing
// frames for the next MediaItem after receiving the first frame of that MediaItem as input
// to avoid MediaPipe producing extra frames after the last MediaItem has ended.
appTextureFrame.setTimestamp(presentationTimeUs); appTextureFrame.setTimestamp(presentationTimeUs);
if (isSingleFrameGraph) {
boolean acceptedFrame = maybeQueueInputFrameSynchronous(appTextureFrame, inputTexture);
checkState(
acceptedFrame,
"queueInputFrame must only be called when a new input frame can be accepted");
return;
}
// TODO(b/241782273): Avoid retrying continuously until the frame is accepted by using a
// currently non-existent MediaPipe API to be notified when MediaPipe has capacity to accept a
// new frame.
queueInputFrameAsynchronous(appTextureFrame, inputTexture);
}
private boolean maybeQueueInputFrameSynchronous(
AppTextureFrame appTextureFrame, TextureInfo inputTexture) {
acceptedFrame = false;
frameProcessor.onNewFrame(appTextureFrame); frameProcessor.onNewFrame(appTextureFrame);
try { try {
appTextureFrame.waitUntilReleasedWithGpuSync(); appTextureFrame.waitUntilReleasedWithGpuSync();
@ -136,23 +185,98 @@ import java.util.concurrent.ConcurrentHashMap;
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListener.onFrameProcessingError(new FrameProcessingException(e)); errorListener.onFrameProcessingError(new FrameProcessingException(e));
} }
if (acceptedFrame) {
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
}
return acceptedFrame; return acceptedFrame;
} }
private void queueInputFrameAsynchronous(
AppTextureFrame appTextureFrame, TextureInfo inputTexture) {
removeFinishedFutures();
futures.add(
checkStateNotNull(singleThreadExecutorService)
.submit(
() -> {
while (!maybeQueueInputFrameSynchronous(appTextureFrame, inputTexture)) {
try {
Thread.sleep(RETRY_WAIT_TIME_MS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (errorListener != null) {
errorListener.onFrameProcessingError(new FrameProcessingException(e));
}
}
}
inputListener.onReadyToAcceptInputFrame();
}));
}
@Override @Override
public void releaseOutputFrame(TextureInfo outputTexture) { public void releaseOutputFrame(TextureInfo outputTexture) {
checkStateNotNull(outputFrames.get(outputTexture)).release(); checkStateNotNull(outputFrames.get(outputTexture)).release();
if (isSingleFrameGraph) {
inputListener.onReadyToAcceptInputFrame();
}
} }
@Override @Override
public void release() { public void release() {
if (isSingleFrameGraph) {
frameProcessor.close();
return;
}
Queue<Future<?>> futures = checkStateNotNull(this.futures);
while (!futures.isEmpty()) {
futures.remove().cancel(/* mayInterruptIfRunning= */ false);
}
ExecutorService singleThreadExecutorService =
checkStateNotNull(this.singleThreadExecutorService);
singleThreadExecutorService.shutdown();
try {
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
errorListener.onFrameProcessingError(new FrameProcessingException("Release timed out"));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListener.onFrameProcessingError(new FrameProcessingException(e));
}
frameProcessor.close(); frameProcessor.close();
} }
@Override @Override
public final void signalEndOfCurrentInputStream() { public final void signalEndOfCurrentInputStream() {
if (isSingleFrameGraph) {
frameProcessor.waitUntilIdle(); frameProcessor.waitUntilIdle();
outputListener.onCurrentOutputStreamEnded(); outputListener.onCurrentOutputStreamEnded();
return;
}
removeFinishedFutures();
futures.add(
checkStateNotNull(singleThreadExecutorService)
.submit(
() -> {
frameProcessor.waitUntilIdle();
outputListener.onCurrentOutputStreamEnded();
}));
}
private void removeFinishedFutures() {
while (!futures.isEmpty()) {
if (!futures.element().isDone()) {
return;
}
try {
futures.remove().get();
} catch (ExecutionException e) {
errorListener.onFrameProcessingError(new FrameProcessingException(e));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListener.onFrameProcessingError(new FrameProcessingException(e));
}
}
} }
} }

View File

@ -16,6 +16,9 @@
package androidx.media3.effect; package androidx.media3.effect;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.effect.GlTextureProcessor.InputListener; import androidx.media3.effect.GlTextureProcessor.InputListener;
import androidx.media3.effect.GlTextureProcessor.OutputListener; import androidx.media3.effect.GlTextureProcessor.OutputListener;
import java.util.ArrayDeque; import java.util.ArrayDeque;
@ -33,8 +36,13 @@ import java.util.Queue;
private final GlTextureProcessor producingGlTextureProcessor; private final GlTextureProcessor producingGlTextureProcessor;
private final GlTextureProcessor consumingGlTextureProcessor; private final GlTextureProcessor consumingGlTextureProcessor;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
@GuardedBy("this")
private final Queue<Pair<TextureInfo, Long>> availableFrames; private final Queue<Pair<TextureInfo, Long>> availableFrames;
@GuardedBy("this")
private int nextGlTextureProcessorInputCapacity;
/** /**
* Creates a new instance. * Creates a new instance.
* *
@ -57,6 +65,26 @@ import java.util.Queue;
availableFrames = new ArrayDeque<>(); availableFrames = new ArrayDeque<>();
} }
@Override
public synchronized void onReadyToAcceptInputFrame() {
@Nullable Pair<TextureInfo, Long> pendingFrame = availableFrames.poll();
if (pendingFrame == null) {
nextGlTextureProcessorInputCapacity++;
return;
}
long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
frameProcessingTaskExecutor.submit(
consumingGlTextureProcessor::signalEndOfCurrentInputStream);
} else {
frameProcessingTaskExecutor.submit(
() ->
consumingGlTextureProcessor.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs));
}
}
@Override @Override
public void onInputFrameProcessed(TextureInfo inputTexture) { public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit( frameProcessingTaskExecutor.submit(
@ -64,27 +92,26 @@ import java.util.Queue;
} }
@Override @Override
public void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs) { public synchronized void onOutputFrameAvailable(
TextureInfo outputTexture, long presentationTimeUs) {
if (nextGlTextureProcessorInputCapacity > 0) {
frameProcessingTaskExecutor.submit( frameProcessingTaskExecutor.submit(
() -> { () ->
availableFrames.add(new Pair<>(outputTexture, presentationTimeUs)); consumingGlTextureProcessor.queueInputFrame(
processFrameNowOrLater(); /* inputTexture= */ outputTexture, presentationTimeUs));
}); nextGlTextureProcessorInputCapacity--;
}
private void processFrameNowOrLater() {
Pair<TextureInfo, Long> pendingFrame = availableFrames.element();
TextureInfo outputTexture = pendingFrame.first;
long presentationTimeUs = pendingFrame.second;
if (consumingGlTextureProcessor.maybeQueueInputFrame(outputTexture, presentationTimeUs)) {
availableFrames.remove();
} else { } else {
frameProcessingTaskExecutor.submit(this::processFrameNowOrLater); availableFrames.add(new Pair<>(outputTexture, presentationTimeUs));
} }
} }
@Override @Override
public void onCurrentOutputStreamEnded() { public synchronized void onCurrentOutputStreamEnded() {
frameProcessingTaskExecutor.submit(consumingGlTextureProcessor::signalEndOfCurrentInputStream); if (!availableFrames.isEmpty()) {
availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else {
frameProcessingTaskExecutor.submit(
consumingGlTextureProcessor::signalEndOfCurrentInputStream);
}
} }
} }

View File

@ -31,4 +31,11 @@ package androidx.media3.effect;
* android.graphics.SurfaceTexture#getTransformMatrix(float[]) transform matrix}. * android.graphics.SurfaceTexture#getTransformMatrix(float[]) transform matrix}.
*/ */
void setTextureTransformMatrix(float[] textureTransformMatrix); void setTextureTransformMatrix(float[] textureTransformMatrix);
/**
* Returns whether another input frame can be {@linkplain #queueInputFrame(TextureInfo, long)
* queued}.
*/
// TODO(b/227625423): Remove this method and use the input listener instead.
boolean acceptsInputFrame();
} }

View File

@ -119,6 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void setInputListener(InputListener inputListener) { public void setInputListener(InputListener inputListener) {
this.inputListener = inputListener; this.inputListener = inputListener;
inputListener.onReadyToAcceptInputFrame();
} }
@Override @Override
@ -134,13 +135,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { public boolean acceptsInputFrame() {
return true;
}
@Override
public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream specified."); checkState(!streamOffsetUsQueue.isEmpty(), "No input stream specified.");
try { try {
synchronized (this) { synchronized (this) {
if (!ensureConfigured(inputTexture.width, inputTexture.height)) { if (!ensureConfigured(inputTexture.width, inputTexture.height)) {
return false; inputListener.onInputFrameProcessed(inputTexture);
return; // Drop frames when there is no output surface.
} }
EGLSurface outputEglSurface = this.outputEglSurface; EGLSurface outputEglSurface = this.outputEglSurface;
@ -181,7 +188,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
return true; inputListener.onReadyToAcceptInputFrame();
} }
@EnsuresNonNullIf( @EnsuresNonNullIf(

View File

@ -386,13 +386,14 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(inputTextureInUse); checkState(inputTextureInUse);
FrameInfo inputFrameInfo = checkStateNotNull(pendingInputFrames.peek()); FrameInfo inputFrameInfo = checkStateNotNull(pendingInputFrames.peek());
if (inputExternalTextureProcessor.maybeQueueInputFrame( if (inputExternalTextureProcessor.acceptsInputFrame()) {
inputExternalTextureProcessor.queueInputFrame(
new TextureInfo( new TextureInfo(
inputExternalTextureId, inputExternalTextureId,
/* fboId= */ C.INDEX_UNSET, /* fboId= */ C.INDEX_UNSET,
inputFrameInfo.width, inputFrameInfo.width,
inputFrameInfo.height), inputFrameInfo.height),
presentationTimeUs)) { presentationTimeUs);
inputTextureInUse = false; inputTextureInUse = false;
pendingInputFrames.remove(); pendingInputFrames.remove();
// After the externalTextureProcessor has produced an output frame, it is processed // After the externalTextureProcessor has produced an output frame, it is processed

View File

@ -22,7 +22,7 @@ import androidx.media3.common.util.UnstableApi;
* Processes frames from one OpenGL 2D texture to another. * Processes frames from one OpenGL 2D texture to another.
* *
* <p>The {@code GlTextureProcessor} consumes input frames it accepts via {@link * <p>The {@code GlTextureProcessor} consumes input frames it accepts via {@link
* #maybeQueueInputFrame(TextureInfo, long)} and surrenders each texture back to the caller via its * #queueInputFrame(TextureInfo, long)} and surrenders each texture back to the caller via its
* {@linkplain InputListener#onInputFrameProcessed(TextureInfo) listener} once the texture's * {@linkplain InputListener#onInputFrameProcessed(TextureInfo) listener} once the texture's
* contents have been processed. * contents have been processed.
* *
@ -51,11 +51,19 @@ public interface GlTextureProcessor {
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
interface InputListener { interface InputListener {
/**
* Called when the {@link GlTextureProcessor} is ready to accept another input frame.
*
* <p>For each time this method is called, {@link #queueInputFrame(TextureInfo, long)} can be
* called once.
*/
default void onReadyToAcceptInputFrame() {}
/** /**
* Called when the {@link GlTextureProcessor} has processed an input frame. * Called when the {@link GlTextureProcessor} has processed an input frame.
* *
* @param inputTexture The {@link TextureInfo} that was used to {@linkplain * @param inputTexture The {@link TextureInfo} that was used to {@linkplain
* #maybeQueueInputFrame(TextureInfo, long) queue} the input frame. * #queueInputFrame(TextureInfo, long) queue} the input frame.
*/ */
default void onInputFrameProcessed(TextureInfo inputTexture) {} default void onInputFrameProcessed(TextureInfo inputTexture) {}
} }
@ -114,19 +122,17 @@ public interface GlTextureProcessor {
/** /**
* Processes an input frame if possible. * Processes an input frame if possible.
* *
* <p>If this method returns {@code true} the input frame has been accepted. The {@code * <p>The {@code GlTextureProcessor} owns the accepted frame until it calls {@link
* GlTextureProcessor} owns the accepted frame until it calls {@link
* InputListener#onInputFrameProcessed(TextureInfo)}. The caller should not overwrite or release * InputListener#onInputFrameProcessed(TextureInfo)}. The caller should not overwrite or release
* the texture before the {@code GlTextureProcessor} has finished processing it. * the texture before the {@code GlTextureProcessor} has finished processing it.
* *
* <p>If this method returns {@code false}, the input frame could not be accepted and the caller * <p>This method must only be called when the {@code GlTextureProcessor} can {@linkplain
* should decide whether to drop the frame or try again later. * InputListener#onReadyToAcceptInputFrame() accept an input frame}.
* *
* @param inputTexture A {@link TextureInfo} describing the texture containing the input frame. * @param inputTexture A {@link TextureInfo} describing the texture containing the input frame.
* @param presentationTimeUs The presentation timestamp of the input frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the input frame, in microseconds.
* @return Whether the frame was accepted.
*/ */
boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs); void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs);
/** /**
* Notifies the texture processor that the frame on the given output texture is no longer used and * Notifies the texture processor that the frame on the given output texture is no longer used and

View File

@ -15,6 +15,8 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.CallSuper; import androidx.annotation.CallSuper;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.FrameProcessingException;
@ -36,6 +38,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@UnstableApi @UnstableApi
public abstract class SingleFrameGlTextureProcessor implements GlTextureProcessor { public abstract class SingleFrameGlTextureProcessor implements GlTextureProcessor {
private final boolean useHdr;
private InputListener inputListener; private InputListener inputListener;
private OutputListener outputListener; private OutputListener outputListener;
private ErrorListener errorListener; private ErrorListener errorListener;
@ -43,7 +47,6 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
private int inputHeight; private int inputHeight;
private @MonotonicNonNull TextureInfo outputTexture; private @MonotonicNonNull TextureInfo outputTexture;
private boolean outputTextureInUse; private boolean outputTextureInUse;
private final boolean useHdr;
/** /**
* Creates a {@code SingleFrameGlTextureProcessor} instance. * Creates a {@code SingleFrameGlTextureProcessor} instance.
@ -90,6 +93,9 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
@Override @Override
public final void setInputListener(InputListener inputListener) { public final void setInputListener(InputListener inputListener) {
this.inputListener = inputListener; this.inputListener = inputListener;
if (!outputTextureInUse) {
inputListener.onReadyToAcceptInputFrame();
}
} }
@Override @Override
@ -102,12 +108,17 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
this.errorListener = errorListener; this.errorListener = errorListener;
} }
@Override public final boolean acceptsInputFrame() {
public final boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { return !outputTextureInUse;
if (outputTextureInUse) {
return false;
} }
@Override
public final void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
checkState(
!outputTextureInUse,
"The texture processor does not currently accept input frames. Release prior output frames"
+ " first.");
try { try {
if (outputTexture == null if (outputTexture == null
|| inputTexture.width != inputWidth || inputTexture.width != inputWidth
@ -127,7 +138,6 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
? (FrameProcessingException) e ? (FrameProcessingException) e
: new FrameProcessingException(e)); : new FrameProcessingException(e));
} }
return true;
} }
@EnsuresNonNull("outputTexture") @EnsuresNonNull("outputTexture")
@ -151,6 +161,7 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
@Override @Override
public final void releaseOutputFrame(TextureInfo outputTexture) { public final void releaseOutputFrame(TextureInfo outputTexture) {
outputTextureInUse = false; outputTextureInUse = false;
inputListener.onReadyToAcceptInputFrame();
} }
@Override @Override

View File

@ -15,11 +15,17 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import androidx.media3.common.C;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
/** Contains information describing an OpenGL texture. */ /** Contains information describing an OpenGL texture. */
@UnstableApi @UnstableApi
public final class TextureInfo { public final class TextureInfo {
/** A {@link TextureInfo} instance with all fields unset. */
public static final TextureInfo UNSET =
new TextureInfo(C.INDEX_UNSET, C.INDEX_UNSET, C.LENGTH_UNSET, C.LENGTH_UNSET);
/** The OpenGL texture identifier. */ /** The OpenGL texture identifier. */
public final int texId; public final int texId;
/** Identifier of a framebuffer object associated with the texture. */ /** Identifier of a framebuffer object associated with the texture. */

View File

@ -16,8 +16,6 @@
package androidx.media3.effect; package androidx.media3.effect;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.FrameProcessor;
@ -32,16 +30,17 @@ import org.junit.runner.RunWith;
public final class ChainingGlTextureProcessorListenerTest { public final class ChainingGlTextureProcessorListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100; private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockFrameProcessorListener =
mock(FrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor = private final FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor( new FrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mock(FrameProcessor.Listener.class)); Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener);
private final GlTextureProcessor mockProducingGlTextureProcessor = mock(GlTextureProcessor.class); private final GlTextureProcessor mockProducingGlTextureProcessor = mock(GlTextureProcessor.class);
private final FakeGlTextureProcessor fakeConsumingGlTextureProcessor = private final GlTextureProcessor mockConsumingGlTextureProcessor = mock(GlTextureProcessor.class);
spy(new FakeGlTextureProcessor());
private final ChainingGlTextureProcessorListener chainingGlTextureProcessorListener = private final ChainingGlTextureProcessorListener chainingGlTextureProcessorListener =
new ChainingGlTextureProcessorListener( new ChainingGlTextureProcessorListener(
mockProducingGlTextureProcessor, mockProducingGlTextureProcessor,
fakeConsumingGlTextureProcessor, mockConsumingGlTextureProcessor,
frameProcessingTaskExecutor); frameProcessingTaskExecutor);
@After @After
@ -62,35 +61,35 @@ public final class ChainingGlTextureProcessorListenerTest {
} }
@Test @Test
public void onOutputFrameAvailable_passesFrameToNextGlTextureProcessor() public void onOutputFrameAvailable_afterAcceptsInputFrame_passesFrameToNextGlTextureProcessor()
throws InterruptedException {
TextureInfo texture =
new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100);
long presentationTimeUs = 123;
chainingGlTextureProcessorListener.onReadyToAcceptInputFrame();
chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(mockConsumingGlTextureProcessor).queueInputFrame(texture, presentationTimeUs);
}
@Test
public void onOutputFrameAvailable_beforeAcceptsInputFrame_passesFrameToNextGlTextureProcessor()
throws InterruptedException { throws InterruptedException {
TextureInfo texture = TextureInfo texture =
new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100); new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100);
long presentationTimeUs = 123; long presentationTimeUs = 123;
chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs); chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs);
chainingGlTextureProcessorListener.onReadyToAcceptInputFrame();
Thread.sleep(EXECUTOR_WAIT_TIME_MS); Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeConsumingGlTextureProcessor).maybeQueueInputFrame(texture, presentationTimeUs); verify(mockConsumingGlTextureProcessor).queueInputFrame(texture, presentationTimeUs);
} }
@Test @Test
public void onOutputFrameAvailable_nextGlTextureProcessorRejectsFrame_triesAgain() public void onOutputFrameAvailable_twoFrames_passesFirstBeforeSecondToNextGlTextureProcessor()
throws InterruptedException {
TextureInfo texture =
new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100);
long presentationTimeUs = 123;
fakeConsumingGlTextureProcessor.rejectNextFrame();
chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeConsumingGlTextureProcessor, times(2))
.maybeQueueInputFrame(texture, presentationTimeUs);
}
@Test
public void onOutputFrameAvailable_twoFramesWithFirstRejected_retriesFirstBeforeSecond()
throws InterruptedException { throws InterruptedException {
TextureInfo firstTexture = TextureInfo firstTexture =
new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100); new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100);
@ -98,18 +97,18 @@ public final class ChainingGlTextureProcessorListenerTest {
TextureInfo secondTexture = TextureInfo secondTexture =
new TextureInfo(/* texId= */ 2, /* fboId= */ 2, /* width= */ 100, /* height= */ 100); new TextureInfo(/* texId= */ 2, /* fboId= */ 2, /* width= */ 100, /* height= */ 100);
long secondPresentationTimeUs = 567; long secondPresentationTimeUs = 567;
fakeConsumingGlTextureProcessor.rejectNextFrame();
chainingGlTextureProcessorListener.onOutputFrameAvailable( chainingGlTextureProcessorListener.onOutputFrameAvailable(
firstTexture, firstPresentationTimeUs); firstTexture, firstPresentationTimeUs);
chainingGlTextureProcessorListener.onOutputFrameAvailable( chainingGlTextureProcessorListener.onOutputFrameAvailable(
secondTexture, secondPresentationTimeUs); secondTexture, secondPresentationTimeUs);
chainingGlTextureProcessorListener.onReadyToAcceptInputFrame();
chainingGlTextureProcessorListener.onReadyToAcceptInputFrame();
Thread.sleep(EXECUTOR_WAIT_TIME_MS); Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeConsumingGlTextureProcessor, times(2)) verify(mockConsumingGlTextureProcessor).queueInputFrame(firstTexture, firstPresentationTimeUs);
.maybeQueueInputFrame(firstTexture, firstPresentationTimeUs); verify(mockConsumingGlTextureProcessor)
verify(fakeConsumingGlTextureProcessor) .queueInputFrame(secondTexture, secondPresentationTimeUs);
.maybeQueueInputFrame(secondTexture, secondPresentationTimeUs);
} }
@Test @Test
@ -118,46 +117,6 @@ public final class ChainingGlTextureProcessorListenerTest {
chainingGlTextureProcessorListener.onCurrentOutputStreamEnded(); chainingGlTextureProcessorListener.onCurrentOutputStreamEnded();
Thread.sleep(EXECUTOR_WAIT_TIME_MS); Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeConsumingGlTextureProcessor).signalEndOfCurrentInputStream(); verify(mockConsumingGlTextureProcessor).signalEndOfCurrentInputStream();
}
private static class FakeGlTextureProcessor implements GlTextureProcessor {
private volatile boolean rejectNextFrame;
public void rejectNextFrame() {
rejectNextFrame = true;
}
@Override
public void setInputListener(InputListener inputListener) {
throw new UnsupportedOperationException();
}
@Override
public void setOutputListener(OutputListener outputListener) {
throw new UnsupportedOperationException();
}
@Override
public void setErrorListener(ErrorListener errorListener) {
throw new UnsupportedOperationException();
}
@Override
public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
boolean acceptFrame = !rejectNextFrame;
rejectNextFrame = false;
return acceptFrame;
}
@Override
public void releaseOutputFrame(TextureInfo outputTexture) {}
@Override
public void signalEndOfCurrentInputStream() {}
@Override
public void release() {}
} }
} }