Split GlTextureProcessor.Listener into input/output/error listener.

This simplifies ChainingGlTextureProcessor as it now only connects a
consuming and a producing GlTextureProcessor rather than a previous,
current, and next GlTextureProcessor.

Also use default no-op implementations of the listeners in
SingleFrameGlTextureProcessor and MediaPipeProcessor to avoid
null-checks.

PiperOrigin-RevId: 466301642
(cherry picked from commit 7dc05edbab233dcd631a4a78ad65f1c30a1e9734)
This commit is contained in:
Googler 2022-08-09 10:05:14 +00:00 committed by microkatz
parent 37274c91de
commit cdd47ff703
7 changed files with 183 additions and 168 deletions

View File

@ -31,7 +31,6 @@ import com.google.mediapipe.framework.AppTextureFrame;
import com.google.mediapipe.framework.TextureFrame;
import com.google.mediapipe.glutil.EglManager;
import java.util.concurrent.ConcurrentHashMap;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Runs a MediaPipe graph on input frames. */
/* package */ final class MediaPipeProcessor implements GlTextureProcessor {
@ -55,10 +54,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private final FrameProcessor frameProcessor;
private volatile GlTextureProcessor.@MonotonicNonNull Listener listener;
private volatile boolean acceptedFrame;
private final ConcurrentHashMap<TextureInfo, TextureFrame> outputFrames;
private InputListener inputListener;
private OutputListener outputListener;
private ErrorListener errorListener;
private boolean acceptedFrame;
/**
* Creates a new texture processor that wraps a MediaPipe graph.
*
@ -78,11 +80,27 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState(LOADER.isAvailable());
// TODO(b/227624622): Confirm whether MediaPipeProcessor could support HDR colors.
checkArgument(!useHdr, "MediaPipeProcessor does not support HDR colors.");
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext());
frameProcessor =
new FrameProcessor(
context, eglManager.getNativeContext(), graphName, inputStreamName, outputStreamName);
outputFrames = new ConcurrentHashMap<>();
// OnWillAddFrameListener is called on the same thread as frameProcessor.onNewFrame(...), so no
// synchronization is needed for acceptedFrame.
frameProcessor.setOnWillAddFrameListener((long timestamp) -> acceptedFrame = true);
}
@Override
public void setInputListener(InputListener inputListener) {
this.inputListener = inputListener;
}
@Override
public void setOutputListener(OutputListener outputListener) {
this.outputListener = outputListener;
frameProcessor.setConsumer(
frame -> {
TextureInfo texture =
@ -92,22 +110,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
frame.getWidth(),
frame.getHeight());
outputFrames.put(texture, frame);
if (listener != null) {
listener.onOutputFrameAvailable(texture, frame.getTimestamp());
}
outputListener.onOutputFrameAvailable(texture, frame.getTimestamp());
});
frameProcessor.setAsynchronousErrorListener(
error -> {
if (listener != null) {
listener.onFrameProcessingError(new FrameProcessingException(error));
}
});
frameProcessor.setOnWillAddFrameListener((long timestamp) -> acceptedFrame = true);
}
@Override
public void setListener(GlTextureProcessor.Listener listener) {
this.listener = listener;
public void setErrorListener(ErrorListener errorListener) {
this.errorListener = errorListener;
frameProcessor.setAsynchronousErrorListener(
error -> errorListener.onFrameProcessingError(new FrameProcessingException(error)));
}
@Override
@ -123,13 +134,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
appTextureFrame.waitUntilReleasedWithGpuSync();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (listener != null) {
listener.onFrameProcessingError(new FrameProcessingException(e));
}
}
if (listener != null) {
listener.onInputFrameProcessed(inputTexture);
errorListener.onFrameProcessingError(new FrameProcessingException(e));
}
inputListener.onInputFrameProcessed(inputTexture);
return acceptedFrame;
}
@ -146,8 +153,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public final void signalEndOfCurrentInputStream() {
frameProcessor.waitUntilIdle();
if (listener != null) {
listener.onCurrentOutputStreamEnded();
}
outputListener.onCurrentOutputStreamEnded();
}
}

View File

@ -16,92 +16,75 @@
package androidx.media3.effect;
import android.util.Pair;
import androidx.annotation.Nullable;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.effect.GlTextureProcessor.InputListener;
import androidx.media3.effect.GlTextureProcessor.OutputListener;
import java.util.ArrayDeque;
import java.util.Queue;
/**
* A {@link GlTextureProcessor.Listener} that connects the {@link GlTextureProcessor} it is
* {@linkplain GlTextureProcessor#setListener(GlTextureProcessor.Listener) set} on to a previous and
* next {@link GlTextureProcessor}.
* Connects a producing and a consuming {@link GlTextureProcessor} instance.
*
* <p>This listener should be set as {@link InputListener} on the consuming {@link
* GlTextureProcessor} and as {@link OutputListener} on the producing {@link GlTextureProcessor}.
*/
/* package */ final class ChainingGlTextureProcessorListener
implements GlTextureProcessor.Listener {
implements GlTextureProcessor.InputListener, GlTextureProcessor.OutputListener {
@Nullable private final GlTextureProcessor previousGlTextureProcessor;
@Nullable private final GlTextureProcessor nextGlTextureProcessor;
private final GlTextureProcessor producingGlTextureProcessor;
private final GlTextureProcessor consumingGlTextureProcessor;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final FrameProcessor.Listener frameProcessorListener;
private final Queue<Pair<TextureInfo, Long>> pendingFrames;
private final Queue<Pair<TextureInfo, Long>> availableFrames;
/**
* Creates a new instance.
*
* @param previousGlTextureProcessor The {@link GlTextureProcessor} that comes before the {@link
* GlTextureProcessor} this listener is set on or {@code null} if not applicable.
* @param nextGlTextureProcessor The {@link GlTextureProcessor} that comes after the {@link
* GlTextureProcessor} this listener is set on or {@code null} if not applicable.
* @param producingGlTextureProcessor The {@link GlTextureProcessor} for which this listener will
* be set as {@link OutputListener}.
* @param consumingGlTextureProcessor The {@link GlTextureProcessor} for which this listener will
* be set as {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for
* OpenGL calls. All calls to the previous/next {@link GlTextureProcessor} will be executed by
* the {@link FrameProcessingTaskExecutor}. The caller is responsible for releasing the {@link
* FrameProcessingTaskExecutor}.
* @param frameProcessorListener The {@link FrameProcessor.Listener} to forward exceptions to.
* OpenGL calls. All calls to the producing/consuming {@link GlTextureProcessor} will be
* executed by the {@link FrameProcessingTaskExecutor}. The caller is responsible for
* releasing the {@link FrameProcessingTaskExecutor}.
*/
public ChainingGlTextureProcessorListener(
@Nullable GlTextureProcessor previousGlTextureProcessor,
@Nullable GlTextureProcessor nextGlTextureProcessor,
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
FrameProcessor.Listener frameProcessorListener) {
this.previousGlTextureProcessor = previousGlTextureProcessor;
this.nextGlTextureProcessor = nextGlTextureProcessor;
GlTextureProcessor producingGlTextureProcessor,
GlTextureProcessor consumingGlTextureProcessor,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
this.producingGlTextureProcessor = producingGlTextureProcessor;
this.consumingGlTextureProcessor = consumingGlTextureProcessor;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.frameProcessorListener = frameProcessorListener;
pendingFrames = new ArrayDeque<>();
availableFrames = new ArrayDeque<>();
}
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
if (previousGlTextureProcessor != null) {
GlTextureProcessor nonNullPreviousGlTextureProcessor = previousGlTextureProcessor;
frameProcessingTaskExecutor.submit(
() -> nonNullPreviousGlTextureProcessor.releaseOutputFrame(inputTexture));
}
() -> producingGlTextureProcessor.releaseOutputFrame(inputTexture));
}
@Override
public void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs) {
if (nextGlTextureProcessor != null) {
GlTextureProcessor nonNullNextGlTextureProcessor = nextGlTextureProcessor;
frameProcessingTaskExecutor.submit(
() -> {
pendingFrames.add(new Pair<>(outputTexture, presentationTimeUs));
processFrameNowOrLater(nonNullNextGlTextureProcessor);
availableFrames.add(new Pair<>(outputTexture, presentationTimeUs));
processFrameNowOrLater();
});
}
}
private void processFrameNowOrLater(GlTextureProcessor nextGlTextureProcessor) {
Pair<TextureInfo, Long> pendingFrame = pendingFrames.element();
private void processFrameNowOrLater() {
Pair<TextureInfo, Long> pendingFrame = availableFrames.element();
TextureInfo outputTexture = pendingFrame.first;
long presentationTimeUs = pendingFrame.second;
if (nextGlTextureProcessor.maybeQueueInputFrame(outputTexture, presentationTimeUs)) {
pendingFrames.remove();
if (consumingGlTextureProcessor.maybeQueueInputFrame(outputTexture, presentationTimeUs)) {
availableFrames.remove();
} else {
frameProcessingTaskExecutor.submit(() -> processFrameNowOrLater(nextGlTextureProcessor));
frameProcessingTaskExecutor.submit(this::processFrameNowOrLater);
}
}
@Override
public void onCurrentOutputStreamEnded() {
if (nextGlTextureProcessor != null) {
frameProcessingTaskExecutor.submit(nextGlTextureProcessor::signalEndOfCurrentInputStream);
}
}
@Override
public void onFrameProcessingError(FrameProcessingException e) {
frameProcessorListener.onFrameProcessingError(e);
frameProcessingTaskExecutor.submit(consumingGlTextureProcessor::signalEndOfCurrentInputStream);
}
}

View File

@ -78,7 +78,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private int inputHeight;
@Nullable private MatrixTransformationProcessor matrixTransformationProcessor;
@Nullable private SurfaceViewWrapper debugSurfaceViewWrapper;
private @MonotonicNonNull Listener listener;
private InputListener inputListener;
private @MonotonicNonNull Pair<Integer, Integer> outputSizeBeforeSurfaceTransformation;
@Nullable private SurfaceView debugSurfaceView;
@ -113,18 +113,24 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
textureTransformMatrix = new float[16];
Matrix.setIdentityM(textureTransformMatrix, /* smOffset= */ 0);
streamOffsetUsQueue = new ArrayDeque<>();
inputListener = new InputListener() {};
}
/**
* {@inheritDoc}
*
* <p>The {@code FinalMatrixTransformationProcessorWrapper} will only call {@link
* Listener#onInputFrameProcessed(TextureInfo)}. Other events are handled via the {@link
* FrameProcessor.Listener} passed to the constructor.
*/
@Override
public void setListener(Listener listener) {
this.listener = listener;
public void setInputListener(InputListener inputListener) {
this.inputListener = inputListener;
}
@Override
public void setOutputListener(OutputListener outputListener) {
// The FrameProcessor.Listener passed to the constructor is used for output-related events.
throw new UnsupportedOperationException();
}
@Override
public void setErrorListener(ErrorListener errorListener) {
// The FrameProcessor.Listener passed to the constructor is used for errors.
throw new UnsupportedOperationException();
}
@Override
@ -174,9 +180,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Log.d(TAG, "Error rendering to debug preview", e);
}
}
if (listener != null) {
listener.onInputFrameProcessed(inputTexture);
}
inputListener.onInputFrameProcessed(inputTexture);
return true;
}
@ -278,8 +282,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void releaseOutputFrame(TextureInfo outputTexture) {
throw new UnsupportedOperationException(
"The final texture processor writes to a surface so there is no texture to release");
// The final texture processor writes to a surface so there is no texture to release.
throw new UnsupportedOperationException();
}
@Override

View File

@ -219,26 +219,21 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private static void chainTextureProcessorsWithListeners(
ImmutableList<GlTextureProcessor> textureProcessors,
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
FrameProcessor.Listener listener) {
for (int i = 0; i < textureProcessors.size(); i++) {
@Nullable
GlTextureProcessor previousGlTextureProcessor =
i - 1 >= 0 ? textureProcessors.get(i - 1) : null;
@Nullable
GlTextureProcessor nextGlTextureProcessor =
i + 1 < textureProcessors.size() ? textureProcessors.get(i + 1) : null;
textureProcessors
.get(i)
.setListener(
FrameProcessor.Listener frameProcessorListener) {
for (int i = 0; i < textureProcessors.size() - 1; i++) {
GlTextureProcessor producingGlTextureProcessor = textureProcessors.get(i);
GlTextureProcessor consumingGlTextureProcessor = textureProcessors.get(i + 1);
ChainingGlTextureProcessorListener chainingGlTextureProcessorListener =
new ChainingGlTextureProcessorListener(
previousGlTextureProcessor,
nextGlTextureProcessor,
frameProcessingTaskExecutor,
listener));
producingGlTextureProcessor,
consumingGlTextureProcessor,
frameProcessingTaskExecutor);
producingGlTextureProcessor.setOutputListener(chainingGlTextureProcessorListener);
producingGlTextureProcessor.setErrorListener(frameProcessorListener::onFrameProcessingError);
consumingGlTextureProcessor.setInputListener(chainingGlTextureProcessorListener);
}
}
private static final String TAG = "GlEffectsFrameProcessor";
private static final String THREAD_NAME = "Transformer:GlEffectsFrameProcessor";
private static final long RELEASE_WAIT_TIME_MS = 100;

View File

@ -23,14 +23,14 @@ import androidx.media3.common.util.UnstableApi;
*
* <p>The {@code GlTextureProcessor} consumes input frames it accepts via {@link
* #maybeQueueInputFrame(TextureInfo, long)} and surrenders each texture back to the caller via its
* {@linkplain Listener#onInputFrameProcessed(TextureInfo) listener} once the texture's contents
* have been processed.
* {@linkplain InputListener#onInputFrameProcessed(TextureInfo) listener} once the texture's
* contents have been processed.
*
* <p>The {@code GlTextureProcessor} produces output frames asynchronously and notifies its owner
* when they are available via its {@linkplain Listener#onOutputFrameAvailable(TextureInfo, long)
* listener}. The {@code GlTextureProcessor} instance's owner must surrender the texture back to the
* {@code GlTextureProcessor} via {@link #releaseOutputFrame(TextureInfo)} when it has finished
* processing it.
* when they are available via its {@linkplain OutputListener#onOutputFrameAvailable(TextureInfo,
* long) listener}. The {@code GlTextureProcessor} instance's owner must surrender the texture back
* to the {@code GlTextureProcessor} via {@link #releaseOutputFrame(TextureInfo)} when it has
* finished processing it.
*
* <p>{@code GlTextureProcessor} implementations can choose to produce output frames before
* receiving input frames or process several input frames before producing an output frame. However,
@ -46,19 +46,26 @@ import androidx.media3.common.util.UnstableApi;
public interface GlTextureProcessor {
/**
* Listener for frame processing events.
* Listener for input-related frame processing events.
*
* <p>This listener can be called from any thread.
*/
interface Listener {
interface InputListener {
/**
* Called when the {@link GlTextureProcessor} has processed an input frame.
*
* @param inputTexture The {@link TextureInfo} that was used to {@linkplain
* #maybeQueueInputFrame(TextureInfo, long) queue} the input frame.
*/
void onInputFrameProcessed(TextureInfo inputTexture);
default void onInputFrameProcessed(TextureInfo inputTexture) {}
}
/**
* Listener for output-related frame processing events.
*
* <p>This listener can be called from any thread.
*/
interface OutputListener {
/**
* Called when the {@link GlTextureProcessor} has produced an output frame.
*
@ -71,14 +78,21 @@ public interface GlTextureProcessor {
* frame.
* @param presentationTimeUs The presentation timestamp of the output frame, in microseconds.
*/
void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs);
default void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs) {}
/**
* Called when the {@link GlTextureProcessor} will not produce further output frames belonging
* to the current output stream.
*/
void onCurrentOutputStreamEnded();
default void onCurrentOutputStreamEnded() {}
}
/**
* Listener for frame processing errors.
*
* <p>This listener can be called from any thread.
*/
interface ErrorListener {
/**
* Called when an exception occurs during asynchronous frame processing.
*
@ -88,16 +102,22 @@ public interface GlTextureProcessor {
void onFrameProcessingError(FrameProcessingException e);
}
/** Sets the {@link Listener} for frame processing events. */
void setListener(Listener listener);
/** Sets the {@link InputListener}. */
void setInputListener(InputListener inputListener);
/** Sets the {@link OutputListener}. */
void setOutputListener(OutputListener outputListener);
/** Sets the {@link ErrorListener}. */
void setErrorListener(ErrorListener errorListener);
/**
* Processes an input frame if possible.
*
* <p>If this method returns {@code true} the input frame has been accepted. The {@code
* GlTextureProcessor} owns the accepted frame until it calls {@link
* Listener#onInputFrameProcessed(TextureInfo)}. The caller should not overwrite or release the
* texture before the {@code GlTextureProcessor} has finished processing it.
* InputListener#onInputFrameProcessed(TextureInfo)}. The caller should not overwrite or release
* the texture before the {@code GlTextureProcessor} has finished processing it.
*
* <p>If this method returns {@code false}, the input frame could not be accepted and the caller
* should decide whether to drop the frame or try again later.

View File

@ -36,7 +36,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@UnstableApi
public abstract class SingleFrameGlTextureProcessor implements GlTextureProcessor {
private @MonotonicNonNull Listener listener;
private InputListener inputListener;
private OutputListener outputListener;
private ErrorListener errorListener;
private int inputWidth;
private int inputHeight;
private @MonotonicNonNull TextureInfo outputTexture;
@ -51,6 +53,9 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
*/
public SingleFrameGlTextureProcessor(boolean useHdr) {
this.useHdr = useHdr;
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
}
/**
@ -83,8 +88,18 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
throws FrameProcessingException;
@Override
public final void setListener(Listener listener) {
this.listener = listener;
public final void setInputListener(InputListener inputListener) {
this.inputListener = inputListener;
}
@Override
public final void setOutputListener(OutputListener outputListener) {
this.outputListener = outputListener;
}
@Override
public final void setErrorListener(ErrorListener errorListener) {
this.errorListener = errorListener;
}
@Override
@ -104,18 +119,14 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
outputTexture.fboId, outputTexture.width, outputTexture.height);
GlUtil.clearOutputFrame();
drawFrame(inputTexture.texId, presentationTimeUs);
if (listener != null) {
listener.onInputFrameProcessed(inputTexture);
listener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
}
inputListener.onInputFrameProcessed(inputTexture);
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) {
if (listener != null) {
listener.onFrameProcessingError(
errorListener.onFrameProcessingError(
e instanceof FrameProcessingException
? (FrameProcessingException) e
: new FrameProcessingException(e));
}
}
return true;
}
@ -144,9 +155,7 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
@Override
public final void signalEndOfCurrentInputStream() {
if (listener != null) {
listener.onCurrentOutputStreamEnded();
}
outputListener.onCurrentOutputStreamEnded();
}
@Override

View File

@ -20,7 +20,6 @@ import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -33,35 +32,23 @@ import org.junit.runner.RunWith;
public final class ChainingGlTextureProcessorListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockframeProcessorListener =
mock(FrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mockframeProcessorListener);
private final GlTextureProcessor mockPreviousGlTextureProcessor = mock(GlTextureProcessor.class);
private final FakeGlTextureProcessor fakeNextGlTextureProcessor =
Util.newSingleThreadExecutor("Test"), mock(FrameProcessor.Listener.class));
private final GlTextureProcessor mockProducingGlTextureProcessor = mock(GlTextureProcessor.class);
private final FakeGlTextureProcessor fakeConsumingGlTextureProcessor =
spy(new FakeGlTextureProcessor());
private final ChainingGlTextureProcessorListener chainingGlTextureProcessorListener =
new ChainingGlTextureProcessorListener(
mockPreviousGlTextureProcessor,
fakeNextGlTextureProcessor,
frameProcessingTaskExecutor,
mockframeProcessorListener);
mockProducingGlTextureProcessor,
fakeConsumingGlTextureProcessor,
frameProcessingTaskExecutor);
@After
public void release() throws InterruptedException {
frameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
}
@Test
public void onFrameProcessingError_callsListener() {
FrameProcessingException exception = new FrameProcessingException("message");
chainingGlTextureProcessorListener.onFrameProcessingError(exception);
verify(mockframeProcessorListener, times(1)).onFrameProcessingError(exception);
}
@Test
public void onInputFrameProcessed_surrendersFrameToPreviousGlTextureProcessor()
throws InterruptedException {
@ -71,7 +58,7 @@ public final class ChainingGlTextureProcessorListenerTest {
chainingGlTextureProcessorListener.onInputFrameProcessed(texture);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(mockPreviousGlTextureProcessor, times(1)).releaseOutputFrame(texture);
verify(mockProducingGlTextureProcessor, times(1)).releaseOutputFrame(texture);
}
@Test
@ -84,7 +71,8 @@ public final class ChainingGlTextureProcessorListenerTest {
chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeNextGlTextureProcessor, times(1)).maybeQueueInputFrame(texture, presentationTimeUs);
verify(fakeConsumingGlTextureProcessor, times(1))
.maybeQueueInputFrame(texture, presentationTimeUs);
}
@Test
@ -93,12 +81,13 @@ public final class ChainingGlTextureProcessorListenerTest {
TextureInfo texture =
new TextureInfo(/* texId= */ 1, /* fboId= */ 1, /* width= */ 100, /* height= */ 100);
long presentationTimeUs = 123;
fakeNextGlTextureProcessor.rejectNextFrame();
fakeConsumingGlTextureProcessor.rejectNextFrame();
chainingGlTextureProcessorListener.onOutputFrameAvailable(texture, presentationTimeUs);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeNextGlTextureProcessor, times(2)).maybeQueueInputFrame(texture, presentationTimeUs);
verify(fakeConsumingGlTextureProcessor, times(2))
.maybeQueueInputFrame(texture, presentationTimeUs);
}
@Test
@ -110,7 +99,7 @@ public final class ChainingGlTextureProcessorListenerTest {
TextureInfo secondTexture =
new TextureInfo(/* texId= */ 2, /* fboId= */ 2, /* width= */ 100, /* height= */ 100);
long secondPresentationTimeUs = 567;
fakeNextGlTextureProcessor.rejectNextFrame();
fakeConsumingGlTextureProcessor.rejectNextFrame();
chainingGlTextureProcessorListener.onOutputFrameAvailable(
firstTexture, firstPresentationTimeUs);
@ -118,9 +107,9 @@ public final class ChainingGlTextureProcessorListenerTest {
secondTexture, secondPresentationTimeUs);
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeNextGlTextureProcessor, times(2))
verify(fakeConsumingGlTextureProcessor, times(2))
.maybeQueueInputFrame(firstTexture, firstPresentationTimeUs);
verify(fakeNextGlTextureProcessor, times(1))
verify(fakeConsumingGlTextureProcessor, times(1))
.maybeQueueInputFrame(secondTexture, secondPresentationTimeUs);
}
@ -130,7 +119,7 @@ public final class ChainingGlTextureProcessorListenerTest {
chainingGlTextureProcessorListener.onCurrentOutputStreamEnded();
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
verify(fakeNextGlTextureProcessor, times(1)).signalEndOfCurrentInputStream();
verify(fakeConsumingGlTextureProcessor, times(1)).signalEndOfCurrentInputStream();
}
private static class FakeGlTextureProcessor implements GlTextureProcessor {
@ -142,7 +131,17 @@ public final class ChainingGlTextureProcessorListenerTest {
}
@Override
public void setListener(Listener listener) {
public void setInputListener(InputListener inputListener) {
throw new UnsupportedOperationException();
}
@Override
public void setOutputListener(OutputListener outputListener) {
throw new UnsupportedOperationException();
}
@Override
public void setErrorListener(ErrorListener errorListener) {
throw new UnsupportedOperationException();
}