mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Signal end of stream to VideoFrameProcessor in previewing
This will be used by VideoCompositor PiperOrigin-RevId: 702031907
This commit is contained in:
parent
25c927e9f3
commit
8908d82cac
@ -739,6 +739,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
if (!inputSwitcher.hasActiveInput()) {
|
if (!inputSwitcher.hasActiveInput()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
inputStreamEnded = false;
|
||||||
try {
|
try {
|
||||||
TextureManager textureManager = inputSwitcher.activeTextureManager();
|
TextureManager textureManager = inputSwitcher.activeTextureManager();
|
||||||
textureManager.dropIncomingRegisteredFrames();
|
textureManager.dropIncomingRegisteredFrames();
|
||||||
|
@ -25,12 +25,15 @@ import androidx.annotation.Nullable;
|
|||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.DebugViewProvider;
|
import androidx.media3.common.DebugViewProvider;
|
||||||
|
import androidx.media3.common.Effect;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.SurfaceInfo;
|
import androidx.media3.common.SurfaceInfo;
|
||||||
import androidx.media3.common.VideoFrameProcessingException;
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
import androidx.media3.common.VideoFrameProcessor;
|
import androidx.media3.common.VideoFrameProcessor;
|
||||||
import androidx.media3.common.VideoGraph;
|
import androidx.media3.common.VideoGraph;
|
||||||
import androidx.media3.common.util.UnstableApi;
|
import androidx.media3.common.util.UnstableApi;
|
||||||
import com.google.common.util.concurrent.MoreExecutors;
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
|
import java.util.List;
|
||||||
import java.util.concurrent.Executor;
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
/** A {@link VideoGraph} that handles one input stream. */
|
/** A {@link VideoGraph} that handles one input stream. */
|
||||||
@ -48,7 +51,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
|
|
||||||
@Nullable private VideoFrameProcessor videoFrameProcessor;
|
@Nullable private VideoFrameProcessor videoFrameProcessor;
|
||||||
@Nullable private SurfaceInfo outputSurfaceInfo;
|
@Nullable private SurfaceInfo outputSurfaceInfo;
|
||||||
private boolean isEnded;
|
|
||||||
private boolean released;
|
private boolean released;
|
||||||
private volatile boolean hasProducedFrameWithTimestampZero;
|
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||||
private int inputIndex;
|
private int inputIndex;
|
||||||
@ -108,6 +110,17 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
|||||||
/* listenerExecutor= */ MoreExecutors.directExecutor(),
|
/* listenerExecutor= */ MoreExecutors.directExecutor(),
|
||||||
new VideoFrameProcessor.Listener() {
|
new VideoFrameProcessor.Listener() {
|
||||||
private long lastProcessedFramePresentationTimeUs;
|
private long lastProcessedFramePresentationTimeUs;
|
||||||
|
private boolean isEnded;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onInputStreamRegistered(
|
||||||
|
@VideoFrameProcessor.InputType int inputType,
|
||||||
|
Format format,
|
||||||
|
List<Effect> effects) {
|
||||||
|
// An input stream could be registered after VideoFrameProcessor ends, following
|
||||||
|
// a flush() for example.
|
||||||
|
isEnded = false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputSizeChanged(int width, int height) {
|
public void onOutputSizeChanged(int width, int height) {
|
||||||
|
@ -116,6 +116,11 @@ import java.util.concurrent.Executor;
|
|||||||
videoFrameRenderControl.signalEndOfInput();
|
videoFrameRenderControl.signalEndOfInput();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void signalEndOfInput() {
|
||||||
|
// Ignored.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isEnded() {
|
public boolean isEnded() {
|
||||||
return videoFrameRenderControl.isEnded();
|
return videoFrameRenderControl.isEnded();
|
||||||
|
@ -394,7 +394,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onEnded(long finalFramePresentationTimeUs) {
|
public void onEnded(long finalFramePresentationTimeUs) {
|
||||||
throw new UnsupportedOperationException();
|
// Ignored.
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -545,6 +545,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
|
|
||||||
private VideoSink.Listener listener;
|
private VideoSink.Listener listener;
|
||||||
private Executor listenerExecutor;
|
private Executor listenerExecutor;
|
||||||
|
private boolean signaledEndOfStream;
|
||||||
|
|
||||||
/** Creates a new instance. */
|
/** Creates a new instance. */
|
||||||
public InputVideoSink(Context context) {
|
public InputVideoSink(Context context) {
|
||||||
@ -604,6 +605,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
}
|
}
|
||||||
lastBufferPresentationTimeUs = C.TIME_UNSET;
|
lastBufferPresentationTimeUs = C.TIME_UNSET;
|
||||||
PlaybackVideoGraphWrapper.this.flush(resetPosition);
|
PlaybackVideoGraphWrapper.this.flush(resetPosition);
|
||||||
|
signaledEndOfStream = false;
|
||||||
// Don't change input stream start position or reset the pending input stream timestamp info
|
// Don't change input stream start position or reset the pending input stream timestamp info
|
||||||
// change so that it's announced with the next input frame.
|
// change so that it's announced with the next input frame.
|
||||||
// Don't reset isInputStreamChangePending because it's not guaranteed to receive a new input
|
// Don't reset isInputStreamChangePending because it's not guaranteed to receive a new input
|
||||||
@ -621,6 +623,17 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
finalBufferPresentationTimeUs = lastBufferPresentationTimeUs;
|
finalBufferPresentationTimeUs = lastBufferPresentationTimeUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void signalEndOfInput() {
|
||||||
|
if (signaledEndOfStream) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (isInitialized()) {
|
||||||
|
videoFrameProcessor.signalEndOfInput();
|
||||||
|
signaledEndOfStream = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isEnded() {
|
public boolean isEnded() {
|
||||||
return isInitialized() && PlaybackVideoGraphWrapper.this.isEnded();
|
return isInitialized() && PlaybackVideoGraphWrapper.this.isEnded();
|
||||||
@ -767,12 +780,10 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
@Override
|
@Override
|
||||||
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
||||||
checkState(isInitialized());
|
checkState(isInitialized());
|
||||||
|
|
||||||
if (!checkStateNotNull(videoFrameProcessor)
|
if (!checkStateNotNull(videoFrameProcessor)
|
||||||
.queueInputBitmap(inputBitmap, timestampIterator)) {
|
.queueInputBitmap(inputBitmap, timestampIterator)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TimestampIterator generates frame time.
|
// TimestampIterator generates frame time.
|
||||||
long lastBufferPresentationTimeUs =
|
long lastBufferPresentationTimeUs =
|
||||||
timestampIterator.getLastTimestampUs() - inputBufferTimestampAdjustmentUs;
|
timestampIterator.getLastTimestampUs() - inputBufferTimestampAdjustmentUs;
|
||||||
|
@ -180,6 +180,9 @@ public interface VideoSink {
|
|||||||
/** Signals the end of the current input stream. */
|
/** Signals the end of the current input stream. */
|
||||||
void signalEndOfCurrentInputStream();
|
void signalEndOfCurrentInputStream();
|
||||||
|
|
||||||
|
/** Signals the end of the last input stream. */
|
||||||
|
void signalEndOfInput();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns whether all the data has been rendered to the output surface.
|
* Returns whether all the data has been rendered to the output surface.
|
||||||
*
|
*
|
||||||
|
@ -26,12 +26,18 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
|
|||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.view.SurfaceView;
|
import android.view.SurfaceView;
|
||||||
|
import androidx.media3.common.ColorInfo;
|
||||||
|
import androidx.media3.common.DebugViewProvider;
|
||||||
import androidx.media3.common.Effect;
|
import androidx.media3.common.Effect;
|
||||||
import androidx.media3.common.GlObjectsProvider;
|
import androidx.media3.common.GlObjectsProvider;
|
||||||
import androidx.media3.common.GlTextureInfo;
|
import androidx.media3.common.GlTextureInfo;
|
||||||
import androidx.media3.common.MediaItem;
|
import androidx.media3.common.MediaItem;
|
||||||
|
import androidx.media3.common.PreviewingVideoGraph;
|
||||||
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
|
import androidx.media3.common.VideoGraph;
|
||||||
import androidx.media3.common.util.Util;
|
import androidx.media3.common.util.Util;
|
||||||
import androidx.media3.effect.GlEffect;
|
import androidx.media3.effect.GlEffect;
|
||||||
|
import androidx.media3.effect.PreviewingSingleInputVideoGraph;
|
||||||
import androidx.test.ext.junit.rules.ActivityScenarioRule;
|
import androidx.test.ext.junit.rules.ActivityScenarioRule;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
@ -39,6 +45,7 @@ import com.google.common.collect.Iterables;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
@ -65,6 +72,7 @@ public class CompositionPlayerSeekTest {
|
|||||||
// 200 ms at 30 fps (default frame rate)
|
// 200 ms at 30 fps (default frame rate)
|
||||||
private static final ImmutableList<Long> IMAGE_TIMESTAMPS_US =
|
private static final ImmutableList<Long> IMAGE_TIMESTAMPS_US =
|
||||||
ImmutableList.of(0L, 33_333L, 66_667L, 100_000L, 133_333L, 166_667L);
|
ImmutableList.of(0L, 33_333L, 66_667L, 100_000L, 133_333L, 166_667L);
|
||||||
|
private static final long VIDEO_GRAPH_END_TIMEOUT_MS = 1_000;
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public ActivityScenarioRule<SurfaceTestActivity> rule =
|
public ActivityScenarioRule<SurfaceTestActivity> rule =
|
||||||
@ -119,11 +127,15 @@ public class CompositionPlayerSeekTest {
|
|||||||
.addAll(sequenceTimestampsUs)
|
.addAll(sequenceTimestampsUs)
|
||||||
.addAll(sequenceTimestampsUs)
|
.addAll(sequenceTimestampsUs)
|
||||||
.build();
|
.build();
|
||||||
|
CountDownLatch videoGraphEnded = new CountDownLatch(1);
|
||||||
getInstrumentation()
|
getInstrumentation()
|
||||||
.runOnMainSync(
|
.runOnMainSync(
|
||||||
() -> {
|
() -> {
|
||||||
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
|
compositionPlayer =
|
||||||
|
new CompositionPlayer.Builder(applicationContext)
|
||||||
|
.setPreviewingVideoGraphFactory(
|
||||||
|
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
|
||||||
|
.build();
|
||||||
// Set a surface on the player even though there is no UI on this test. We need a
|
// Set a surface on the player even though there is no UI on this test. We need a
|
||||||
// surface otherwise the player will skip/drop video frames.
|
// surface otherwise the player will skip/drop video frames.
|
||||||
compositionPlayer.setVideoSurfaceView(surfaceView);
|
compositionPlayer.setVideoSurfaceView(surfaceView);
|
||||||
@ -139,6 +151,61 @@ public class CompositionPlayerSeekTest {
|
|||||||
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(0));
|
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(0));
|
||||||
playerTestListener.waitUntilPlayerEnded();
|
playerTestListener.waitUntilPlayerEnded();
|
||||||
|
|
||||||
|
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
|
||||||
|
assertThat(inputTimestampRecordingShaderProgram.getInputTimestampsUs())
|
||||||
|
.isEqualTo(expectedTimestampsUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void seekToZero_afterPlayingSingleSequenceOfTwoImages() throws Exception {
|
||||||
|
InputTimestampRecordingShaderProgram inputTimestampRecordingShaderProgram =
|
||||||
|
new InputTimestampRecordingShaderProgram();
|
||||||
|
EditedMediaItem image =
|
||||||
|
createEditedMediaItem(
|
||||||
|
IMAGE_MEDIA_ITEM,
|
||||||
|
IMAGE_DURATION_US,
|
||||||
|
/* videoEffect= */ (GlEffect)
|
||||||
|
(context, useHdr) -> inputTimestampRecordingShaderProgram);
|
||||||
|
ImmutableList<Long> sequenceTimestampsUs =
|
||||||
|
new ImmutableList.Builder<Long>()
|
||||||
|
// Plays the first video
|
||||||
|
.addAll(IMAGE_TIMESTAMPS_US)
|
||||||
|
// Plays the second video
|
||||||
|
.addAll(
|
||||||
|
Iterables.transform(
|
||||||
|
IMAGE_TIMESTAMPS_US, timestampUs -> IMAGE_DURATION_US + timestampUs))
|
||||||
|
.build();
|
||||||
|
// Seeked after the first playback ends, so the timestamps are repeated twice.
|
||||||
|
ImmutableList<Long> expectedTimestampsUs =
|
||||||
|
new ImmutableList.Builder<Long>()
|
||||||
|
.addAll(sequenceTimestampsUs)
|
||||||
|
.addAll(sequenceTimestampsUs)
|
||||||
|
.build();
|
||||||
|
CountDownLatch videoGraphEnded = new CountDownLatch(1);
|
||||||
|
getInstrumentation()
|
||||||
|
.runOnMainSync(
|
||||||
|
() -> {
|
||||||
|
compositionPlayer =
|
||||||
|
new CompositionPlayer.Builder(applicationContext)
|
||||||
|
.setPreviewingVideoGraphFactory(
|
||||||
|
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
|
||||||
|
.build();
|
||||||
|
// Set a surface on the player even though there is no UI on this test. We need a
|
||||||
|
// surface otherwise the player will skip/drop video frames.
|
||||||
|
compositionPlayer.setVideoSurfaceView(surfaceView);
|
||||||
|
compositionPlayer.addListener(playerTestListener);
|
||||||
|
compositionPlayer.setComposition(
|
||||||
|
new Composition.Builder(new EditedMediaItemSequence.Builder(image, image).build())
|
||||||
|
.build());
|
||||||
|
compositionPlayer.prepare();
|
||||||
|
compositionPlayer.play();
|
||||||
|
});
|
||||||
|
playerTestListener.waitUntilPlayerEnded();
|
||||||
|
playerTestListener.resetStatus();
|
||||||
|
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(0));
|
||||||
|
playerTestListener.waitUntilPlayerEnded();
|
||||||
|
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
|
||||||
|
|
||||||
assertThat(inputTimestampRecordingShaderProgram.getInputTimestampsUs())
|
assertThat(inputTimestampRecordingShaderProgram.getInputTimestampsUs())
|
||||||
.isEqualTo(expectedTimestampsUs);
|
.isEqualTo(expectedTimestampsUs);
|
||||||
}
|
}
|
||||||
@ -563,10 +630,15 @@ public class CompositionPlayerSeekTest {
|
|||||||
(context, useHdr) -> inputTimestampRecordingShaderProgram));
|
(context, useHdr) -> inputTimestampRecordingShaderProgram));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CountDownLatch videoGraphEnded = new CountDownLatch(1);
|
||||||
getInstrumentation()
|
getInstrumentation()
|
||||||
.runOnMainSync(
|
.runOnMainSync(
|
||||||
() -> {
|
() -> {
|
||||||
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
|
compositionPlayer =
|
||||||
|
new CompositionPlayer.Builder(applicationContext)
|
||||||
|
.setPreviewingVideoGraphFactory(
|
||||||
|
new ListenerCapturingVideoGraphFactory(videoGraphEnded))
|
||||||
|
.build();
|
||||||
// Set a surface on the player even though there is no UI on this test. We need a
|
// Set a surface on the player even though there is no UI on this test. We need a
|
||||||
// surface otherwise the player will skip/drop video frames.
|
// surface otherwise the player will skip/drop video frames.
|
||||||
compositionPlayer.setVideoSurfaceView(surfaceView);
|
compositionPlayer.setVideoSurfaceView(surfaceView);
|
||||||
@ -583,6 +655,8 @@ public class CompositionPlayerSeekTest {
|
|||||||
framesReceivedLatch.await();
|
framesReceivedLatch.await();
|
||||||
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(seekTimeMs));
|
getInstrumentation().runOnMainSync(() -> compositionPlayer.seekTo(seekTimeMs));
|
||||||
playerTestListener.waitUntilPlayerEnded();
|
playerTestListener.waitUntilPlayerEnded();
|
||||||
|
|
||||||
|
assertThat(videoGraphEnded.await(VIDEO_GRAPH_END_TIMEOUT_MS, MILLISECONDS)).isTrue();
|
||||||
return inputTimestampRecordingShaderProgram.getInputTimestampsUs();
|
return inputTimestampRecordingShaderProgram.getInputTimestampsUs();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -595,6 +669,64 @@ public class CompositionPlayerSeekTest {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final class ListenerCapturingVideoGraphFactory
|
||||||
|
implements PreviewingVideoGraph.Factory {
|
||||||
|
|
||||||
|
private final PreviewingSingleInputVideoGraph.Factory singleInputVideoGraphFactory;
|
||||||
|
private final CountDownLatch videoGraphEnded;
|
||||||
|
|
||||||
|
public ListenerCapturingVideoGraphFactory(CountDownLatch videoGraphEnded) {
|
||||||
|
singleInputVideoGraphFactory = new PreviewingSingleInputVideoGraph.Factory();
|
||||||
|
this.videoGraphEnded = videoGraphEnded;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PreviewingVideoGraph create(
|
||||||
|
Context context,
|
||||||
|
ColorInfo outputColorInfo,
|
||||||
|
DebugViewProvider debugViewProvider,
|
||||||
|
VideoGraph.Listener listener,
|
||||||
|
Executor listenerExecutor,
|
||||||
|
List<Effect> compositionEffects,
|
||||||
|
long initialTimestampOffsetUs) {
|
||||||
|
return singleInputVideoGraphFactory.create(
|
||||||
|
context,
|
||||||
|
outputColorInfo,
|
||||||
|
debugViewProvider,
|
||||||
|
new VideoGraph.Listener() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputSizeChanged(int width, int height) {
|
||||||
|
listener.onOutputSizeChanged(width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputFrameRateChanged(float frameRate) {
|
||||||
|
listener.onOutputFrameRateChanged(frameRate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
|
||||||
|
listener.onOutputFrameAvailableForRendering(framePresentationTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onEnded(long finalFramePresentationTimeUs) {
|
||||||
|
videoGraphEnded.countDown();
|
||||||
|
listener.onEnded(finalFramePresentationTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(VideoFrameProcessingException exception) {
|
||||||
|
listener.onError(exception);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
listenerExecutor,
|
||||||
|
compositionEffects,
|
||||||
|
initialTimestampOffsetUs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static final class ResettableCountDownLatch {
|
private static final class ResettableCountDownLatch {
|
||||||
private CountDownLatch latch;
|
private CountDownLatch latch;
|
||||||
|
|
||||||
|
@ -139,6 +139,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
executeOrDelay(VideoSink::signalEndOfCurrentInputStream);
|
executeOrDelay(VideoSink::signalEndOfCurrentInputStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void signalEndOfInput() {
|
||||||
|
executeOrDelay(VideoSink::signalEndOfInput);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isEnded() {
|
public boolean isEnded() {
|
||||||
return videoSink != null && videoSink.isEnded();
|
return videoSink != null && videoSink.isEnded();
|
||||||
|
@ -31,6 +31,7 @@ import androidx.media3.common.Effect;
|
|||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.MimeTypes;
|
import androidx.media3.common.MimeTypes;
|
||||||
import androidx.media3.common.PlaybackException;
|
import androidx.media3.common.PlaybackException;
|
||||||
|
import androidx.media3.common.Timeline;
|
||||||
import androidx.media3.common.util.ConstantRateTimestampIterator;
|
import androidx.media3.common.util.ConstantRateTimestampIterator;
|
||||||
import androidx.media3.exoplayer.ExoPlaybackException;
|
import androidx.media3.exoplayer.ExoPlaybackException;
|
||||||
import androidx.media3.exoplayer.Renderer;
|
import androidx.media3.exoplayer.Renderer;
|
||||||
@ -161,6 +162,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
return offsetToCompositionTimeUs;
|
return offsetToCompositionTimeUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static boolean isLastInSequence(
|
||||||
|
Timeline timeline, EditedMediaItemSequence sequence, EditedMediaItem mediaItem) {
|
||||||
|
int lastEditedMediaItemIndex = timeline.getPeriodCount() - 1;
|
||||||
|
return mediaItem == getRepeatedEditedMediaItem(sequence, lastEditedMediaItemIndex);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the {@link EditedMediaItem} of a given {@code index}.
|
* Gets the {@link EditedMediaItem} of a given {@code index}.
|
||||||
*
|
*
|
||||||
@ -251,12 +258,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
|
|
||||||
private void onMediaItemChanged() {
|
private void onMediaItemChanged() {
|
||||||
EditedMediaItem currentEditedMediaItem = checkStateNotNull(pendingEditedMediaItem);
|
EditedMediaItem currentEditedMediaItem = checkStateNotNull(pendingEditedMediaItem);
|
||||||
// Use reference equality intentionally.
|
|
||||||
int lastEditedMediaItemIndex = getTimeline().getPeriodCount() - 1;
|
|
||||||
boolean isLastInSequence =
|
|
||||||
currentEditedMediaItem == getRepeatedEditedMediaItem(sequence, lastEditedMediaItemIndex);
|
|
||||||
audioSink.onMediaItemChanged(
|
audioSink.onMediaItemChanged(
|
||||||
currentEditedMediaItem, pendingOffsetToCompositionTimeUs, isLastInSequence);
|
currentEditedMediaItem,
|
||||||
|
pendingOffsetToCompositionTimeUs,
|
||||||
|
isLastInSequence(getTimeline(), sequence, currentEditedMediaItem));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -265,6 +270,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private final VideoSink videoSink;
|
private final VideoSink videoSink;
|
||||||
|
|
||||||
@Nullable private ImmutableList<Effect> pendingEffect;
|
@Nullable private ImmutableList<Effect> pendingEffect;
|
||||||
|
@Nullable private EditedMediaItem currentEditedMediaItem;
|
||||||
private long offsetToCompositionTimeUs;
|
private long offsetToCompositionTimeUs;
|
||||||
|
|
||||||
public SequenceVideoRenderer(
|
public SequenceVideoRenderer(
|
||||||
@ -299,6 +305,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
checkState(getTimeline().getWindowCount() == 1);
|
checkState(getTimeline().getWindowCount() == 1);
|
||||||
// The media item might have been repeated in the sequence.
|
// The media item might have been repeated in the sequence.
|
||||||
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
|
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
|
||||||
|
// The renderer has started processing this item, VideoGraph might still be processing the
|
||||||
|
// previous one.
|
||||||
|
currentEditedMediaItem = getRepeatedEditedMediaItem(sequence, mediaItemIndex);
|
||||||
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
|
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
|
||||||
pendingEffect = sequence.editedMediaItems.get(mediaItemIndex).effects.videoEffects;
|
pendingEffect = sequence.editedMediaItems.get(mediaItemIndex).effects.videoEffects;
|
||||||
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
|
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
|
||||||
@ -309,6 +318,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
return offsetToCompositionTimeUs;
|
return offsetToCompositionTimeUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderToEndOfStream() {
|
||||||
|
super.renderToEndOfStream();
|
||||||
|
if (isLastInSequence(getTimeline(), sequence, checkNotNull(currentEditedMediaItem))) {
|
||||||
|
videoSink.signalEndOfInput();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onReadyToChangeVideoSinkInputStream() {
|
protected void onReadyToChangeVideoSinkInputStream() {
|
||||||
@Nullable ImmutableList<Effect> pendingEffect = this.pendingEffect;
|
@Nullable ImmutableList<Effect> pendingEffect = this.pendingEffect;
|
||||||
@ -326,7 +343,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
|
|
||||||
private ImmutableList<Effect> videoEffects;
|
private ImmutableList<Effect> videoEffects;
|
||||||
private @MonotonicNonNull ConstantRateTimestampIterator timestampIterator;
|
private @MonotonicNonNull ConstantRateTimestampIterator timestampIterator;
|
||||||
private @MonotonicNonNull EditedMediaItem editedMediaItem;
|
private @MonotonicNonNull EditedMediaItem currentEditedMediaItem;
|
||||||
@Nullable private ExoPlaybackException pendingExoPlaybackException;
|
@Nullable private ExoPlaybackException pendingExoPlaybackException;
|
||||||
private boolean inputStreamPending;
|
private boolean inputStreamPending;
|
||||||
private long streamStartPositionUs;
|
private long streamStartPositionUs;
|
||||||
@ -431,10 +448,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
streamStartPositionUs = startPositionUs;
|
streamStartPositionUs = startPositionUs;
|
||||||
// The media item might have been repeated in the sequence.
|
// The media item might have been repeated in the sequence.
|
||||||
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
|
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
|
||||||
editedMediaItem = sequence.editedMediaItems.get(mediaItemIndex);
|
currentEditedMediaItem = sequence.editedMediaItems.get(mediaItemIndex);
|
||||||
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
|
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
|
||||||
timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs);
|
timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs);
|
||||||
videoEffects = editedMediaItem.effects.videoEffects;
|
videoEffects = currentEditedMediaItem.effects.videoEffects;
|
||||||
inputStreamPending = true;
|
inputStreamPending = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -479,6 +496,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
videoSink.signalEndOfCurrentInputStream();
|
videoSink.signalEndOfCurrentInputStream();
|
||||||
|
if (isLastInSequence(getTimeline(), sequence, checkNotNull(currentEditedMediaItem))) {
|
||||||
|
videoSink.signalEndOfInput();
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -500,7 +520,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
long positionWithinImage = positionUs - streamOffsetUs;
|
long positionWithinImage = positionUs - streamOffsetUs;
|
||||||
long firstBitmapTimeUs = imageBaseTimestampUs + positionWithinImage;
|
long firstBitmapTimeUs = imageBaseTimestampUs + positionWithinImage;
|
||||||
long lastBitmapTimeUs =
|
long lastBitmapTimeUs =
|
||||||
imageBaseTimestampUs + checkNotNull(editedMediaItem).getPresentationDurationUs();
|
imageBaseTimestampUs + checkNotNull(currentEditedMediaItem).getPresentationDurationUs();
|
||||||
return new ConstantRateTimestampIterator(
|
return new ConstantRateTimestampIterator(
|
||||||
/* startPositionUs= */ firstBitmapTimeUs,
|
/* startPositionUs= */ firstBitmapTimeUs,
|
||||||
/* endPositionUs= */ lastBitmapTimeUs,
|
/* endPositionUs= */ lastBitmapTimeUs,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user