Signal end of stream to VideoFrameProcessor in previewing

This will be used by VideoCompositor

PiperOrigin-RevId: 702031907
This commit is contained in:
claincly 2024-12-02 11:27:41 -08:00 committed by Copybara-Service
parent 25c927e9f3
commit 8908d82cac
8 changed files with 206 additions and 16 deletions

View File

@ -739,6 +739,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
if (!inputSwitcher.hasActiveInput()) {
return;
}
inputStreamEnded = false;
try {
TextureManager textureManager = inputSwitcher.activeTextureManager();
textureManager.dropIncomingRegisteredFrames();

View File

@ -25,12 +25,15 @@ import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.UnstableApi;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.List;
import java.util.concurrent.Executor;
/** A {@link VideoGraph} that handles one input stream. */
@ -48,7 +51,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
@Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private SurfaceInfo outputSurfaceInfo;
private boolean isEnded;
private boolean released;
private volatile boolean hasProducedFrameWithTimestampZero;
private int inputIndex;
@ -108,6 +110,17 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
/* listenerExecutor= */ MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs;
private boolean isEnded;
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
Format format,
List<Effect> effects) {
// An input stream could be registered after VideoFrameProcessor ends, following
// a flush() for example.
isEnded = false;
}
@Override
public void onOutputSizeChanged(int width, int height) {

View File

@ -116,6 +116,11 @@ import java.util.concurrent.Executor;
videoFrameRenderControl.signalEndOfInput();
}
@Override
public void signalEndOfInput() {
// Ignored.
}
@Override
public boolean isEnded() {
return videoFrameRenderControl.isEnded();

View File

@ -394,7 +394,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override
public void onEnded(long finalFramePresentationTimeUs) {
throw new UnsupportedOperationException();
// Ignored.
}
@Override
@ -545,6 +545,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private VideoSink.Listener listener;
private Executor listenerExecutor;
private boolean signaledEndOfStream;
/** Creates a new instance. */
public InputVideoSink(Context context) {
@ -604,6 +605,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
lastBufferPresentationTimeUs = C.TIME_UNSET;
PlaybackVideoGraphWrapper.this.flush(resetPosition);
signaledEndOfStream = false;
// Don't change input stream start position or reset the pending input stream timestamp info
// change so that it's announced with the next input frame.
// Don't reset isInputStreamChangePending because it's not guaranteed to receive a new input
@ -621,6 +623,17 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
finalBufferPresentationTimeUs = lastBufferPresentationTimeUs;
}
@Override
public void signalEndOfInput() {
if (signaledEndOfStream) {
return;
}
if (isInitialized()) {
videoFrameProcessor.signalEndOfInput();
signaledEndOfStream = true;
}
}
@Override
public boolean isEnded() {
return isInitialized() && PlaybackVideoGraphWrapper.this.isEnded();
@ -767,12 +780,10 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
checkState(isInitialized());
if (!checkStateNotNull(videoFrameProcessor)
.queueInputBitmap(inputBitmap, timestampIterator)) {
return false;
}
// TimestampIterator generates frame time.
long lastBufferPresentationTimeUs =
timestampIterator.getLastTimestampUs() - inputBufferTimestampAdjustmentUs;

View File

@ -180,6 +180,9 @@ public interface VideoSink {
/** Signals the end of the current input stream. */
void signalEndOfCurrentInputStream();
/** Signals the end of the last input stream. */
void signalEndOfInput();
/**
* Returns whether all the data has been rendered to the output surface.
*

View File

@ -26,12 +26,18 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.content.Context;
import android.view.SurfaceView;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.MediaItem;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffect;
import androidx.media3.effect.PreviewingSingleInputVideoGraph;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
@ -39,6 +45,7 @@ import com.google.common.collect.Iterables;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.After;
@ -65,6 +72,7 @@ public class CompositionPlayerSeekTest {
// 200 ms at 30 fps (default frame rate)
private static final ImmutableList<Long> IMAGE_TIMESTAMPS_US =
ImmutableList.of(0L, 33_333L, 66_667L, 100_000L, 133_333L, 166_667L);
private static final long VIDEO_GRAPH_END_TIMEOUT_MS = 1_000;
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
@ -119,11 +127,15 @@ public class CompositionPlayerSeekTest {
.addAll(sequenceTimestampsUs)
.addAll(sequenceTimestampsUs)
.build();
CountDownLatch videoGraphEnded = new CountDownLatch(1);
getInstrumentation()
.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
.build();
// Set a surface on the player even though there is no UI on this test. We need a
// surface otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
@ -139,6 +151,61 @@ public class CompositionPlayerSeekTest {
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(0));
playerTestListener.waitUntilPlayerEnded();
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
assertThat(inputTimestampRecordingShaderProgram.getInputTimestampsUs())
.isEqualTo(expectedTimestampsUs);
}
@Test
public void seekToZero_afterPlayingSingleSequenceOfTwoImages() throws Exception {
InputTimestampRecordingShaderProgram inputTimestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
EditedMediaItem image =
createEditedMediaItem(
IMAGE_MEDIA_ITEM,
IMAGE_DURATION_US,
/* videoEffect= */ (GlEffect)
(context, useHdr) -> inputTimestampRecordingShaderProgram);
ImmutableList<Long> sequenceTimestampsUs =
new ImmutableList.Builder<Long>()
// Plays the first video
.addAll(IMAGE_TIMESTAMPS_US)
// Plays the second video
.addAll(
Iterables.transform(
IMAGE_TIMESTAMPS_US, timestampUs -> IMAGE_DURATION_US + timestampUs))
.build();
// Seeked after the first playback ends, so the timestamps are repeated twice.
ImmutableList<Long> expectedTimestampsUs =
new ImmutableList.Builder<Long>()
.addAll(sequenceTimestampsUs)
.addAll(sequenceTimestampsUs)
.build();
CountDownLatch videoGraphEnded = new CountDownLatch(1);
getInstrumentation()
.runOnMainSync(
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
.build();
// Set a surface on the player even though there is no UI on this test. We need a
// surface otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(playerTestListener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence.Builder(image, image).build())
.build());
compositionPlayer.prepare();
compositionPlayer.play();
});
playerTestListener.waitUntilPlayerEnded();
playerTestListener.resetStatus();
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(0));
playerTestListener.waitUntilPlayerEnded();
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
assertThat(inputTimestampRecordingShaderProgram.getInputTimestampsUs())
.isEqualTo(expectedTimestampsUs);
}
@ -563,10 +630,15 @@ public class CompositionPlayerSeekTest {
(context, useHdr) -> inputTimestampRecordingShaderProgram));
}
CountDownLatch videoGraphEnded = new CountDownLatch(1);
getInstrumentation()
.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
.build();
// Set a surface on the player even though there is no UI on this test. We need a
// surface otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
@ -583,6 +655,8 @@ public class CompositionPlayerSeekTest {
framesReceivedLatch.await();
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(seekTimeMs));
playerTestListener.waitUntilPlayerEnded();
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
return inputTimestampRecordingShaderProgram.getInputTimestampsUs();
}
@ -595,6 +669,64 @@ public class CompositionPlayerSeekTest {
.build();
}
private static final class ListenerCapturingVideoGraphFactory
implements PreviewingVideoGraph.Factory {
private final PreviewingSingleInputVideoGraph.Factory singleInputVideoGraphFactory;
private final CountDownLatch videoGraphEnded;
public ListenerCapturingVideoGraphFactory(CountDownLatch videoGraphEnded) {
singleInputVideoGraphFactory = new PreviewingSingleInputVideoGraph.Factory();
this.videoGraphEnded = videoGraphEnded;
}
@Override
public PreviewingVideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
return singleInputVideoGraphFactory.create(
context,
outputColorInfo,
debugViewProvider,
new VideoGraph.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
listener.onOutputSizeChanged(width, height);
}
@Override
public void onOutputFrameRateChanged(float frameRate) {
listener.onOutputFrameRateChanged(frameRate);
}
@Override
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
listener.onOutputFrameAvailableForRendering(framePresentationTimeUs);
}
@Override
public void onEnded(long finalFramePresentationTimeUs) {
videoGraphEnded.countDown();
listener.onEnded(finalFramePresentationTimeUs);
}
@Override
public void onError(VideoFrameProcessingException exception) {
listener.onError(exception);
}
},
listenerExecutor,
compositionEffects,
initialTimestampOffsetUs);
}
}
private static final class ResettableCountDownLatch {
private CountDownLatch latch;

View File

@ -139,6 +139,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
executeOrDelay(VideoSink::signalEndOfCurrentInputStream);
}
@Override
public void signalEndOfInput() {
executeOrDelay(VideoSink::signalEndOfInput);
}
@Override
public boolean isEnded() {
return videoSink != null && videoSink.isEnded();

View File

@ -31,6 +31,7 @@ import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Timeline;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.Renderer;
@ -161,6 +162,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return offsetToCompositionTimeUs;
}
private static boolean isLastInSequence(
Timeline timeline, EditedMediaItemSequence sequence, EditedMediaItem mediaItem) {
int lastEditedMediaItemIndex = timeline.getPeriodCount() - 1;
return mediaItem == getRepeatedEditedMediaItem(sequence, lastEditedMediaItemIndex);
}
/**
* Gets the {@link EditedMediaItem} of a given {@code index}.
*
@ -251,12 +258,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private void onMediaItemChanged() {
EditedMediaItem currentEditedMediaItem = checkStateNotNull(pendingEditedMediaItem);
// Use reference equality intentionally.
int lastEditedMediaItemIndex = getTimeline().getPeriodCount() - 1;
boolean isLastInSequence =
currentEditedMediaItem == getRepeatedEditedMediaItem(sequence, lastEditedMediaItemIndex);
audioSink.onMediaItemChanged(
currentEditedMediaItem, pendingOffsetToCompositionTimeUs, isLastInSequence);
currentEditedMediaItem,
pendingOffsetToCompositionTimeUs,
isLastInSequence(getTimeline(), sequence, currentEditedMediaItem));
}
}
@ -265,6 +270,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final VideoSink videoSink;
@Nullable private ImmutableList<Effect> pendingEffect;
@Nullable private EditedMediaItem currentEditedMediaItem;
private long offsetToCompositionTimeUs;
public SequenceVideoRenderer(
@ -299,6 +305,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState(getTimeline().getWindowCount() == 1);
// The media item might have been repeated in the sequence.
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
// The renderer has started processing this item, VideoGraph might still be processing the
// previous one.
currentEditedMediaItem = getRepeatedEditedMediaItem(sequence, mediaItemIndex);
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
pendingEffect = sequence.editedMediaItems.get(mediaItemIndex).effects.videoEffects;
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
@ -309,6 +318,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return offsetToCompositionTimeUs;
}
@Override
protected void renderToEndOfStream() {
super.renderToEndOfStream();
if (isLastInSequence(getTimeline(), sequence, checkNotNull(currentEditedMediaItem))) {
videoSink.signalEndOfInput();
}
}
@Override
protected void onReadyToChangeVideoSinkInputStream() {
@Nullable ImmutableList<Effect> pendingEffect = this.pendingEffect;
@ -326,7 +343,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private ImmutableList<Effect> videoEffects;
private @MonotonicNonNull ConstantRateTimestampIterator timestampIterator;
private @MonotonicNonNull EditedMediaItem editedMediaItem;
private @MonotonicNonNull EditedMediaItem currentEditedMediaItem;
@Nullable private ExoPlaybackException pendingExoPlaybackException;
private boolean inputStreamPending;
private long streamStartPositionUs;
@ -431,10 +448,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
streamStartPositionUs = startPositionUs;
// The media item might have been repeated in the sequence.
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
editedMediaItem = sequence.editedMediaItems.get(mediaItemIndex);
currentEditedMediaItem = sequence.editedMediaItems.get(mediaItemIndex);
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs);
videoEffects = editedMediaItem.effects.videoEffects;
videoEffects = currentEditedMediaItem.effects.videoEffects;
inputStreamPending = true;
}
@ -479,6 +496,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return false;
}
videoSink.signalEndOfCurrentInputStream();
if (isLastInSequence(getTimeline(), sequence, checkNotNull(currentEditedMediaItem))) {
videoSink.signalEndOfInput();
}
return true;
}
@ -500,7 +520,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long positionWithinImage = positionUs - streamOffsetUs;
long firstBitmapTimeUs = imageBaseTimestampUs + positionWithinImage;
long lastBitmapTimeUs =
imageBaseTimestampUs + checkNotNull(editedMediaItem).getPresentationDurationUs();
imageBaseTimestampUs + checkNotNull(currentEditedMediaItem).getPresentationDurationUs();
return new ConstantRateTimestampIterator(
/* startPositionUs= */ firstBitmapTimeUs,
/* endPositionUs= */ lastBitmapTimeUs,