Add a listener once one MediaItem is fully processed

Add `VideoFrameProcessor.registerInputStream()` to signal a new type of input.

And `InputHandler.signalEndOfCurrentInputStream()` to signal to `InputHandler`
partial input stream completion.

Fully processed means after FinalShaderProgramWrapper releases the last frame.

PiperOrigin-RevId: 527356646
This commit is contained in:
claincly 2023-04-26 21:20:48 +01:00 committed by Ian Baker
parent fdeeaba9d8
commit 12cac0d69f
9 changed files with 119 additions and 25 deletions

View File

@ -179,6 +179,15 @@ public interface VideoFrameProcessor {
*/
Surface getInputSurface();
/**
* Informs the {@code VideoFrameProcessor} that a new input stream will be queued.
*
* <p>Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input
* stream differs from that of the current input stream.
*/
// TODO(b/274109008) Merge this and setInputFrameInfo.
void registerInputStream(@InputType int inputType);
/**
* Sets information about the input frames.
*

View File

@ -350,10 +350,11 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
checkNotNull(defaultVideoFrameProcessor)
.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build());
// A frame needs to be registered despite not queuing any external input to ensure
// that
// the video frame processor knows about the stream offset.
// that the video frame processor knows about the stream offset.
defaultVideoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
defaultVideoFrameProcessor.registerInputFrame();
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput();
});
videoFrameProcessingEndedCountDownLatch.await();
@Nullable

View File

@ -48,9 +48,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs;
private boolean inputEnded;
private boolean useHdr;
private boolean outputEnded;
private volatile boolean inputEnded;
/**
* Creates a new instance.
@ -90,12 +89,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return 0;
}
@Override
public void signalEndOfCurrentInputStream() {
// Do nothing here. End of current input signaling is handled in maybeQueueToShaderProgram().
}
@Override
public void signalEndOfInput() {
videoFrameProcessingTaskExecutor.submit(
() -> {
inputEnded = true;
maybeSignalEndOfOutput();
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
shaderProgram.signalEndOfCurrentInputStream();
} else {
inputEnded = true;
}
});
}
@ -120,9 +127,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException {
this.useHdr = useHdr;
if (inputEnded) {
return;
}
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd));
@ -174,17 +178,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) {
pendingBitmaps.remove();
maybeSignalEndOfOutput();
}
}
private void maybeSignalEndOfOutput() {
if (framesToQueueForCurrentBitmap == 0
&& pendingBitmaps.isEmpty()
&& inputEnded
&& !outputEnded) {
shaderProgram.signalEndOfCurrentInputStream();
outputEnded = true;
if (pendingBitmaps.isEmpty() && inputEnded) {
// Only signal end of stream after all pending bitmaps are processed.
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream();
}
}
}

View File

@ -50,6 +50,8 @@ import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
@ -254,6 +256,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final boolean releaseFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms;
// A queue of input streams that have not been fully processed identified by their input types.
private final Queue<@InputType Integer> unprocessedInputStreams;
@Nullable private volatile CountDownLatch latch;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
@ -272,6 +278,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
this.eglContext = eglContext;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
checkState(!shaderPrograms.isEmpty());
checkState(getLast(shaderPrograms) instanceof FinalShaderProgramWrapper);
@ -296,6 +303,22 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
inputShaderProgram.setInputListener(inputHandler);
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
finalShaderProgramWrapper.setOnInputStreamProcessedListener(
() -> {
@InputType int currentInputType = unprocessedInputStreams.remove();
if (latch != null) {
latch.countDown();
}
if (currentInputType == INPUT_TYPE_BITMAP) {
// Remove all pending bitmap input, because BitmapTextureManager signals end of input
// after all queued bitmaps are processed.
while (!unprocessedInputStreams.isEmpty()
&& checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_BITMAP) {
unprocessedInputStreams.remove();
}
}
return inputStreamEnded && unprocessedInputStreams.isEmpty();
});
allShaderPrograms = shaderPrograms;
}
@ -343,6 +366,24 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
return inputHandler.getInputSurface();
}
@Override
public void registerInputStream(@InputType int inputType) {
if (!unprocessedInputStreams.isEmpty()) {
inputHandler.signalEndOfCurrentInputStream();
// Wait until the current video is processed before continuing to the next input.
if (checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_SURFACE) {
latch = new CountDownLatch(1);
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
Log.e(TAG, "Error waiting for end of stream " + e);
}
}
}
unprocessedInputStreams.add(inputType);
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
@ -382,7 +423,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void signalEndOfInput() {
checkState(!inputStreamEnded);
inputStreamEnded = true;
videoFrameProcessingTaskExecutor.submit(inputHandler::signalEndOfInput);
inputHandler.signalEndOfCurrentInputStream();
inputHandler.signalEndOfInput();
}
@Override

View File

@ -57,6 +57,9 @@ import java.util.concurrent.atomic.AtomicInteger;
// Read and written on the GL thread only.
private boolean inputStreamEnded;
// Read and written on the GL thread only.
private boolean currentInputStreamEnded;
// The frame that is sent downstream and is not done processing yet.
// Set to null on any thread. Read and set to non-null on the GL thread only.
@Nullable private volatile FrameInfo currentFrame;
@ -135,7 +138,9 @@ import java.util.concurrent.atomic.AtomicInteger;
videoFrameProcessingTaskExecutor.submit(
() -> {
currentFrame = null;
if (inputStreamEnded && pendingFrames.isEmpty()) {
if (currentInputStreamEnded && pendingFrames.isEmpty()) {
// Reset because there could be further input streams after the current one ends.
currentInputStreamEnded = false;
externalShaderProgram.signalEndOfCurrentInputStream();
} else {
maybeQueueFrameToExternalShaderProgram();
@ -177,16 +182,23 @@ import java.util.concurrent.atomic.AtomicInteger;
}
@Override
public void signalEndOfInput() {
public void signalEndOfCurrentInputStream() {
videoFrameProcessingTaskExecutor.submit(
() -> {
inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) {
externalShaderProgram.signalEndOfCurrentInputStream();
} else {
currentInputStreamEnded = true;
}
});
}
@Override
public void signalEndOfInput() {
// TODO(b/274109008) Consider remove inputStreamEnded boolean.
videoFrameProcessingTaskExecutor.submit(() -> inputStreamEnded = true);
}
@Override
public void release() {
surfaceTexture.release();

View File

@ -65,6 +65,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/
/* package */ final class FinalShaderProgramWrapper implements ExternalShaderProgram {
/** Listener interface for the current input stream ending. */
interface OnInputStreamProcessedListener {
/**
* Returns whether {@link FinalShaderProgramWrapper} should invoke {@link
* VideoFrameProcessor.Listener#signalEndOfInput}.
*/
boolean onInputStreamProcessed();
}
private static final String TAG = "FinalShaderWrapper";
private final Context context;
@ -94,6 +103,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation;
@Nullable private SurfaceView debugSurfaceView;
@Nullable private GlTextureInfo outputTexture;
@Nullable private OnInputStreamProcessedListener onInputStreamProcessedListener;
private boolean frameProcessingStarted;
private volatile boolean outputSurfaceInfoChanged;
@ -172,10 +182,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException();
}
public void setOnInputStreamProcessedListener(
@Nullable OnInputStreamProcessedListener onInputStreamProcessedListener) {
this.onInputStreamProcessedListener = onInputStreamProcessedListener;
}
@Override
public void signalEndOfCurrentInputStream() {
frameProcessingStarted = true;
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
boolean frameProcessingEnded =
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
if (frameProcessingEnded) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
}
// Methods that must be called on the GL thread.

View File

@ -69,6 +69,14 @@ import androidx.media3.common.VideoFrameProcessor;
/** See {@link VideoFrameProcessor#getPendingInputFrameCount}. */
int getPendingFrameCount();
/**
* Signals the end of the current input stream.
*
* <p>This method must be called on the last input stream, before calling {@link
* #signalEndOfInput}.
*/
void signalEndOfCurrentInputStream();
/**
* Signals the end of the input.
*

View File

@ -15,6 +15,7 @@
*/
package androidx.media3.test.utils;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
@ -324,6 +325,7 @@ public final class VideoFrameProcessorTestRunner {
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
videoFrameProcessor.registerInputFrame();
}
@ -343,6 +345,7 @@ public final class VideoFrameProcessorTestRunner {
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.setOffsetToAddUs(offsetToAddUs)
.build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}

View File

@ -218,6 +218,8 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.build());
videoFrameProcessor.registerInputStream(
MimeTypes.isVideo(trackFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP);
}
mediaItemOffsetUs.addAndGet(durationUs);
}