Handle rendering in VideoGraph time

Before this CL, the buffer adjustment (which allows to convert from
ExoPlayer time to VideoGraph time) was added to the frame timestamps
before feeding them to the VideoGraph, and then subtracted at the
VideoGraph output. The playback position and stream start position used
for rendering were in ExoPlayer time.

This doesn't work for multi-sequence though because the adjustment might
be different depending on the player (after a seek for example).

To solve this problem, this CL handles rendering in VideoGraph time
instead of ExoPlayer time. More concretely, the VideoGraph output
timestamps are unchanged, and the playback position and stream start
position are converted to VideoGraph time.

PiperOrigin-RevId: 752260744
This commit is contained in:
kimvde 2025-04-28 04:57:35 -07:00 committed by Copybara-Service
parent 8968d9fa45
commit 0f08c97221
7 changed files with 129 additions and 91 deletions

View File

@ -45,6 +45,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* <ul> * <ul>
* <li>Applying video effects * <li>Applying video effects
* <li>Inputting bitmaps * <li>Inputting bitmaps
* <li>Redrawing
* <li>Setting a buffer timestamp adjustment
* </ul> * </ul>
* *
* <p>The {@linkplain #getInputSurface() input} and {@linkplain #setOutputSurfaceInfo(Surface, Size) * <p>The {@linkplain #getInputSurface() input} and {@linkplain #setOutputSurfaceInfo(Surface, Size)
@ -59,7 +61,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Nullable private Surface outputSurface; @Nullable private Surface outputSurface;
private Format inputFormat; private Format inputFormat;
private long streamStartPositionUs; private long streamStartPositionUs;
private long bufferTimestampAdjustmentUs;
private Listener listener; private Listener listener;
private Executor listenerExecutor; private Executor listenerExecutor;
private VideoFrameMetadataListener videoFrameMetadataListener; private VideoFrameMetadataListener videoFrameMetadataListener;
@ -104,6 +105,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return true; return true;
} }
/**
* {@inheritDoc}
*
* <p>This method will always throw an {@link UnsupportedOperationException}.
*/
@Override @Override
public void redraw() { public void redraw() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
@ -163,9 +169,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
/**
* {@inheritDoc}
*
* <p>This method will always throw an {@link UnsupportedOperationException}.
*/
@Override @Override
public void setBufferTimestampAdjustmentUs(long bufferTimestampAdjustmentUs) { public void setBufferTimestampAdjustmentUs(long bufferTimestampAdjustmentUs) {
this.bufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs; throw new UnsupportedOperationException();
} }
@Override @Override
@ -220,8 +231,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) { long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) {
videoFrameHandlers.add(videoFrameHandler); videoFrameHandlers.add(videoFrameHandler);
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs; videoFrameRenderControl.onFrameAvailableForRendering(framePresentationTimeUs);
videoFrameRenderControl.onFrameAvailableForRendering(bufferPresentationTimeUs);
listenerExecutor.execute(() -> listener.onFrameAvailableForRendering()); listenerExecutor.execute(() -> listener.onFrameAvailableForRendering());
return true; return true;
} }
@ -232,7 +242,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* <p>This method will always throw an {@link UnsupportedOperationException}. * <p>This method will always throw an {@link UnsupportedOperationException}.
*/ */
@Override @Override
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) { public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator bufferTimestampIterator) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -269,8 +279,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public void renderFrame( public void renderFrame(long renderTimeNs, long framePresentationTimeUs, boolean isFirstFrame) {
long renderTimeNs, long bufferPresentationTimeUs, boolean isFirstFrame) {
if (isFirstFrame && outputSurface != null) { if (isFirstFrame && outputSurface != null) {
listenerExecutor.execute(() -> listener.onFirstFrameRendered()); listenerExecutor.execute(() -> listener.onFirstFrameRendered());
} }
@ -278,7 +287,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// onVideoSizeChanged is announced after the first frame is available for rendering. // onVideoSizeChanged is announced after the first frame is available for rendering.
Format format = outputFormat == null ? new Format.Builder().build() : outputFormat; Format format = outputFormat == null ? new Format.Builder().build() : outputFormat;
videoFrameMetadataListener.onVideoFrameAboutToBeRendered( videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
/* presentationTimeUs= */ bufferPresentationTimeUs, /* presentationTimeUs= */ framePresentationTimeUs,
/* releaseTimeNs= */ renderTimeNs, /* releaseTimeNs= */ renderTimeNs,
format, format,
/* mediaFormat= */ null); /* mediaFormat= */ null);

View File

@ -626,6 +626,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
boolean isLastFrame, boolean isLastFrame,
boolean treatDroppedBuffersAsSkipped) boolean treatDroppedBuffersAsSkipped)
throws ExoPlaybackException { throws ExoPlaybackException {
if (videoSink != null && ownsVideoSink) {
// When using PlaybackVideoGraphWrapper, positionUs is shifted by the buffer timestamp
// adjustment. Shift it back to the player position.
positionUs -= getBufferTimestampAdjustmentUs();
}
if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) { if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) {
// TODO: b/161996553 - Remove the isAwayFromLastResetPosition check when audio pre-rolling // TODO: b/161996553 - Remove the isAwayFromLastResetPosition check when audio pre-rolling
// is implemented correctly. Audio codecs such as Opus require pre-roll samples to be decoded // is implemented correctly. Audio codecs such as Opus require pre-roll samples to be decoded
@ -1734,9 +1739,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
skipOutputBuffer(codec, bufferIndex, presentationTimeUs); skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true; return true;
} }
long framePresentationTimeUs = bufferPresentationTimeUs + getBufferTimestampAdjustmentUs();
return videoSink.handleInputFrame( return videoSink.handleInputFrame(
framePresentationTimeUs, bufferPresentationTimeUs,
new VideoSink.VideoFrameHandler() { new VideoSink.VideoFrameHandler() {
@Override @Override
public void render(long renderTimestampNs) { public void render(long renderTimestampNs) {

View File

@ -326,23 +326,16 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
private @State int state; private @State int state;
/** /**
* The buffer presentation time of the frame most recently output by the video graph, in * The frame presentation time of the frame most recently output by the video graph, in
* microseconds. * microseconds.
*/ */
private long lastOutputBufferPresentationTimeUs; private long lastOutputFramePresentationTimeUs;
/** The buffer presentation time, in microseconds, of the final frame in the stream. */ /** The frame presentation time, in microseconds, of the final frame in the stream. */
private long finalBufferPresentationTimeUs; private long finalFramePresentationTimeUs;
private boolean hasSignaledEndOfVideoGraphOutputStream; private boolean hasSignaledEndOfVideoGraphOutputStream;
/**
* Converts the buffer timestamp (the player position, with renderer offset) to the composition
* timestamp, in microseconds. The composition time starts from zero, add this adjustment to
* buffer timestamp to get the composition time.
*/
private long bufferTimestampAdjustmentUs;
private int totalVideoInputCount; private int totalVideoInputCount;
private int registeredVideoInputCount; private int registeredVideoInputCount;
@ -372,8 +365,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
requestOpenGlToneMapping = builder.requestOpenGlToneMapping; requestOpenGlToneMapping = builder.requestOpenGlToneMapping;
videoGraphOutputFormat = new Format.Builder().build(); videoGraphOutputFormat = new Format.Builder().build();
outputStreamStartPositionUs = C.TIME_UNSET; outputStreamStartPositionUs = C.TIME_UNSET;
lastOutputBufferPresentationTimeUs = C.TIME_UNSET; lastOutputFramePresentationTimeUs = C.TIME_UNSET;
finalBufferPresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
totalVideoInputCount = C.LENGTH_UNSET; totalVideoInputCount = C.LENGTH_UNSET;
state = STATE_CREATED; state = STATE_CREATED;
} }
@ -492,12 +485,11 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
listener.onFrameAvailableForRendering(); listener.onFrameAvailableForRendering();
} }
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
if (isRedrawnFrame) { if (isRedrawnFrame) {
// Redrawn frames are rendered directly in the processing pipeline. // Redrawn frames are rendered directly in the processing pipeline.
if (videoFrameMetadataListener != null) { if (videoFrameMetadataListener != null) {
videoFrameMetadataListener.onVideoFrameAboutToBeRendered( videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
/* presentationTimeUs= */ bufferPresentationTimeUs, /* presentationTimeUs= */ framePresentationTimeUs,
/* releaseTimeNs= */ C.TIME_UNSET, /* releaseTimeNs= */ C.TIME_UNSET,
videoGraphOutputFormat, videoGraphOutputFormat,
/* mediaFormat= */ null); /* mediaFormat= */ null);
@ -507,8 +499,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
// The frame presentation time is relative to the start of the Composition and without the // The frame presentation time is relative to the start of the Composition and without the
// renderer offset // renderer offset
lastOutputBufferPresentationTimeUs = bufferPresentationTimeUs; lastOutputFramePresentationTimeUs = framePresentationTimeUs;
StreamChangeInfo streamChangeInfo = pendingStreamChanges.pollFloor(bufferPresentationTimeUs); StreamChangeInfo streamChangeInfo = pendingStreamChanges.pollFloor(framePresentationTimeUs);
if (streamChangeInfo != null) { if (streamChangeInfo != null) {
outputStreamStartPositionUs = streamChangeInfo.startPositionUs; outputStreamStartPositionUs = streamChangeInfo.startPositionUs;
outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction; outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction;
@ -516,8 +508,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
} }
defaultVideoSink.handleInputFrame(framePresentationTimeUs, videoFrameHandler); defaultVideoSink.handleInputFrame(framePresentationTimeUs, videoFrameHandler);
boolean isLastFrame = boolean isLastFrame =
finalBufferPresentationTimeUs != C.TIME_UNSET finalFramePresentationTimeUs != C.TIME_UNSET
&& bufferPresentationTimeUs >= finalBufferPresentationTimeUs; && framePresentationTimeUs >= finalFramePresentationTimeUs;
if (isLastFrame) { if (isLastFrame) {
signalEndOfVideoGraphOutputStream(); signalEndOfVideoGraphOutputStream();
} }
@ -667,8 +659,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction; outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction;
onOutputStreamChanged(); onOutputStreamChanged();
} }
lastOutputBufferPresentationTimeUs = C.TIME_UNSET; lastOutputFramePresentationTimeUs = C.TIME_UNSET;
finalBufferPresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
hasSignaledEndOfVideoGraphOutputStream = false; hasSignaledEndOfVideoGraphOutputStream = false;
// Handle pending video graph callbacks to ensure video size changes reach the video render // Handle pending video graph callbacks to ensure video size changes reach the video render
// control. // control.
@ -693,11 +685,6 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
defaultVideoSink.setPlaybackSpeed(speed); defaultVideoSink.setPlaybackSpeed(speed);
} }
private void setBufferTimestampAdjustment(long bufferTimestampAdjustmentUs) {
this.bufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs;
defaultVideoSink.setBufferTimestampAdjustmentUs(bufferTimestampAdjustmentUs);
}
private void setChangeFrameRateStrategy( private void setChangeFrameRateStrategy(
@C.VideoChangeFrameRateStrategy int changeFrameRateStrategy) { @C.VideoChangeFrameRateStrategy int changeFrameRateStrategy) {
defaultVideoSink.setChangeFrameRateStrategy(changeFrameRateStrategy); defaultVideoSink.setChangeFrameRateStrategy(changeFrameRateStrategy);
@ -736,10 +723,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
private @InputType int inputType; private @InputType int inputType;
private long inputBufferTimestampAdjustmentUs; private long inputBufferTimestampAdjustmentUs;
/** /** The frame presentation timestamp, in microseconds, of the most recently registered frame. */
* The buffer presentation timestamp, in microseconds, of the most recently registered frame. private long lastFramePresentationTimeUs;
*/
private long lastBufferPresentationTimeUs;
private VideoSink.Listener listener; private VideoSink.Listener listener;
private Executor listenerExecutor; private Executor listenerExecutor;
@ -755,7 +740,7 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
videoFrameProcessorMaxPendingFrameCount = videoFrameProcessorMaxPendingFrameCount =
getMaxPendingFramesCountForMediaCodecDecoders(context); getMaxPendingFramesCountForMediaCodecDecoders(context);
videoEffects = ImmutableList.of(); videoEffects = ImmutableList.of();
lastBufferPresentationTimeUs = C.TIME_UNSET; lastFramePresentationTimeUs = C.TIME_UNSET;
listener = VideoSink.Listener.NO_OP; listener = VideoSink.Listener.NO_OP;
listenerExecutor = NO_OP_EXECUTOR; listenerExecutor = NO_OP_EXECUTOR;
} }
@ -799,10 +784,10 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
} }
// Resignal EOS only for the last item. // Resignal EOS only for the last item.
boolean needsResignalEndOfCurrentInputStream = signaledEndOfStream; boolean needsResignalEndOfCurrentInputStream = signaledEndOfStream;
long replayedPresentationTimeUs = lastOutputBufferPresentationTimeUs; long replayedPresentationTimeUs = lastOutputFramePresentationTimeUs;
PlaybackVideoGraphWrapper.this.flush(/* resetPosition= */ false); PlaybackVideoGraphWrapper.this.flush(/* resetPosition= */ false);
checkNotNull(videoGraph).redraw(); checkNotNull(videoGraph).redraw();
lastOutputBufferPresentationTimeUs = replayedPresentationTimeUs; lastOutputFramePresentationTimeUs = replayedPresentationTimeUs;
if (needsResignalEndOfCurrentInputStream) { if (needsResignalEndOfCurrentInputStream) {
signalEndOfCurrentInputStream(); signalEndOfCurrentInputStream();
} }
@ -813,7 +798,7 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
if (isInitialized()) { if (isInitialized()) {
checkNotNull(videoGraph).flush(); checkNotNull(videoGraph).flush();
} }
lastBufferPresentationTimeUs = C.TIME_UNSET; lastFramePresentationTimeUs = C.TIME_UNSET;
PlaybackVideoGraphWrapper.this.flush(resetPosition); PlaybackVideoGraphWrapper.this.flush(resetPosition);
signaledEndOfStream = false; signaledEndOfStream = false;
// Don't change input stream start position or reset the pending input stream timestamp info // Don't change input stream start position or reset the pending input stream timestamp info
@ -830,8 +815,8 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
finalBufferPresentationTimeUs = lastBufferPresentationTimeUs; finalFramePresentationTimeUs = lastFramePresentationTimeUs;
if (lastOutputBufferPresentationTimeUs >= finalBufferPresentationTimeUs) { if (lastOutputFramePresentationTimeUs >= finalFramePresentationTimeUs) {
PlaybackVideoGraphWrapper.this.signalEndOfVideoGraphOutputStream(); PlaybackVideoGraphWrapper.this.signalEndOfVideoGraphOutputStream();
} }
} }
@ -863,17 +848,23 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
this.videoEffects = ImmutableList.copyOf(videoEffects); this.videoEffects = ImmutableList.copyOf(videoEffects);
this.inputType = inputType; this.inputType = inputType;
this.inputFormat = format; this.inputFormat = format;
finalBufferPresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
hasSignaledEndOfVideoGraphOutputStream = false; hasSignaledEndOfVideoGraphOutputStream = false;
registerInputStream(format); registerInputStream(format);
// Input timestamps should always be positive because they are offset by ExoPlayer. Adding a long fromTimestampUs;
// stream change info to the queue with timestamp 0 should therefore always apply it as long if (lastFramePresentationTimeUs == C.TIME_UNSET) {
// as it is the only one in the queue. // Add a stream change info to the queue with a large negative timestamp to always apply it
long fromTimestampUs = // as long as it is the only one in the queue.
lastBufferPresentationTimeUs == C.TIME_UNSET ? 0 : lastBufferPresentationTimeUs + 1; fromTimestampUs = Long.MIN_VALUE / 2;
} else {
fromTimestampUs = lastFramePresentationTimeUs + 1;
}
pendingStreamChanges.add( pendingStreamChanges.add(
fromTimestampUs, fromTimestampUs,
new StreamChangeInfo(startPositionUs, firstFrameReleaseInstruction, fromTimestampUs)); new StreamChangeInfo(
/* startPositionUs= */ startPositionUs + inputBufferTimestampAdjustmentUs,
firstFrameReleaseInstruction,
fromTimestampUs));
} }
@Override @Override
@ -954,11 +945,6 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
@Override @Override
public void setBufferTimestampAdjustmentUs(long bufferTimestampAdjustmentUs) { public void setBufferTimestampAdjustmentUs(long bufferTimestampAdjustmentUs) {
inputBufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs; inputBufferTimestampAdjustmentUs = bufferTimestampAdjustmentUs;
// The buffer timestamp adjustment is only allowed to change after a flush to make sure that
// the buffer timestamps are increasing. We can update the buffer timestamp adjustment
// directly at the output of the VideoGraph because no frame has been input yet following the
// flush.
PlaybackVideoGraphWrapper.this.setBufferTimestampAdjustment(inputBufferTimestampAdjustmentUs);
} }
@Override @Override
@ -981,7 +967,7 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
@Override @Override
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) { long bufferPresentationTimeUs, VideoFrameHandler videoFrameHandler) {
checkState(isInitialized()); checkState(isInitialized());
if (!shouldRenderToInputVideoSink()) { if (!shouldRenderToInputVideoSink()) {
@ -1002,10 +988,11 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
// duration of the first video. Thus this correction is needed to account for the different // duration of the first video. Thus this correction is needed to account for the different
// handling of presentation timestamps in ExoPlayer and VideoFrameProcessor. // handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
// //
// inputBufferTimestampAdjustmentUs adjusts the frame presentation time (which is relative to // inputBufferTimestampAdjustmentUs adjusts the buffer timestamp (that corresponds to the
// the start of a composition) to the buffer timestamp (that corresponds to the player // player position) to the frame presentation time (which is relative to the start of a
// position). // composition).
lastBufferPresentationTimeUs = framePresentationTimeUs - inputBufferTimestampAdjustmentUs; long framePresentationTimeUs = bufferPresentationTimeUs + inputBufferTimestampAdjustmentUs;
lastFramePresentationTimeUs = framePresentationTimeUs;
// Use the frame presentation time as render time so that the SurfaceTexture is accompanied // Use the frame presentation time as render time so that the SurfaceTexture is accompanied
// by this timestamp. Setting a realtime based release time is only relevant when rendering to // by this timestamp. Setting a realtime based release time is only relevant when rendering to
// a SurfaceView, but we render to a surface in this case. // a SurfaceView, but we render to a surface in this case.
@ -1014,25 +1001,29 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
} }
@Override @Override
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) { public boolean handleInputBitmap(
Bitmap inputBitmap, TimestampIterator bufferTimestampIterator) {
checkState(isInitialized()); checkState(isInitialized());
if (!shouldRenderToInputVideoSink() if (!shouldRenderToInputVideoSink()) {
|| !checkNotNull(videoGraph) return false;
.queueInputBitmap(inputIndex, inputBitmap, timestampIterator)) { }
TimestampIterator frameTimestampIterator =
new ShiftingTimestampIterator(bufferTimestampIterator, inputBufferTimestampAdjustmentUs);
if (!checkNotNull(videoGraph)
.queueInputBitmap(inputIndex, inputBitmap, frameTimestampIterator)) {
return false; return false;
} }
// TimestampIterator generates frame time. long lastFramePresentationTimeUs = frameTimestampIterator.getLastTimestampUs();
long lastBufferPresentationTimeUs = checkState(lastFramePresentationTimeUs != C.TIME_UNSET);
timestampIterator.getLastTimestampUs() - inputBufferTimestampAdjustmentUs; this.lastFramePresentationTimeUs = lastFramePresentationTimeUs;
checkState(lastBufferPresentationTimeUs != C.TIME_UNSET);
this.lastBufferPresentationTimeUs = lastBufferPresentationTimeUs;
return true; return true;
} }
@Override @Override
public void render(long positionUs, long elapsedRealtimeUs) throws VideoSinkException { public void render(long positionUs, long elapsedRealtimeUs) throws VideoSinkException {
PlaybackVideoGraphWrapper.this.render(positionUs, elapsedRealtimeUs); PlaybackVideoGraphWrapper.this.render(
/* positionUs= */ positionUs + inputBufferTimestampAdjustmentUs, elapsedRealtimeUs);
} }
@Override @Override
@ -1152,6 +1143,40 @@ public final class PlaybackVideoGraphWrapper implements VideoGraph.Listener {
} }
} }
private static final class ShiftingTimestampIterator implements TimestampIterator {
private final TimestampIterator timestampIterator;
private final long shift;
public ShiftingTimestampIterator(TimestampIterator timestampIterator, long shift) {
this.timestampIterator = timestampIterator;
this.shift = shift;
}
@Override
public boolean hasNext() {
return timestampIterator.hasNext();
}
@Override
public long next() {
return timestampIterator.next() + shift;
}
@Override
public TimestampIterator copyOf() {
return new ShiftingTimestampIterator(timestampIterator.copyOf(), shift);
}
@Override
public long getLastTimestampUs() {
long unshiftedLastTimestampUs = timestampIterator.getLastTimestampUs();
return unshiftedLastTimestampUs == C.TIME_UNSET
? C.TIME_UNSET
: unshiftedLastTimestampUs + shift;
}
}
/** Delays reflection for loading a {@link VideoGraph.Factory SingleInputVideoGraph} instance. */ /** Delays reflection for loading a {@link VideoGraph.Factory SingleInputVideoGraph} instance. */
private static final class ReflectiveSingleInputVideoGraphFactory implements VideoGraph.Factory { private static final class ReflectiveSingleInputVideoGraphFactory implements VideoGraph.Factory {

View File

@ -185,8 +185,12 @@ import androidx.media3.exoplayer.ExoPlaybackException;
videoFrameReleaseControl.onStreamChanged(firstFrameReleaseInstruction); videoFrameReleaseControl.onStreamChanged(firstFrameReleaseInstruction);
outputStreamStartPositionUs = streamStartPositionUs; outputStreamStartPositionUs = streamStartPositionUs;
} else { } else {
// Add a start position to the queue with a large negative timestamp to always apply it as
// long as it is the only one in the queue.
streamStartPositionsUs.add( streamStartPositionsUs.add(
latestInputPresentationTimeUs == C.TIME_UNSET ? 0 : latestInputPresentationTimeUs + 1, latestInputPresentationTimeUs == C.TIME_UNSET
? Long.MIN_VALUE / 2
: latestInputPresentationTimeUs + 1,
streamStartPositionUs); streamStartPositionUs);
} }
} }

View File

@ -277,12 +277,12 @@ public interface VideoSink {
* <p>Must be called after the corresponding stream is {@linkplain #onInputStreamChanged(int, * <p>Must be called after the corresponding stream is {@linkplain #onInputStreamChanged(int,
* Format, long, int, List) signaled}. * Format, long, int, List) signaled}.
* *
* @param framePresentationTimeUs The frame's presentation time, in microseconds. * @param bufferPresentationTimeUs The buffer presentation time, in microseconds.
* @param videoFrameHandler The {@link VideoFrameHandler} used to handle the input frame. * @param videoFrameHandler The {@link VideoFrameHandler} used to handle the input frame.
* @return Whether the frame was handled successfully. If {@code false}, the caller can try again * @return Whether the frame was handled successfully. If {@code false}, the caller can try again
* later. * later.
*/ */
boolean handleInputFrame(long framePresentationTimeUs, VideoFrameHandler videoFrameHandler); boolean handleInputFrame(long bufferPresentationTimeUs, VideoFrameHandler videoFrameHandler);
/** /**
* Handles an input {@link Bitmap}. * Handles an input {@link Bitmap}.
@ -291,12 +291,12 @@ public interface VideoSink {
* Format, long, int, List) signaled}. * Format, long, int, List) signaled}.
* *
* @param inputBitmap The {@link Bitmap} to queue to the video sink. * @param inputBitmap The {@link Bitmap} to queue to the video sink.
* @param timestampIterator The times within the current stream that the bitmap should be shown * @param bufferTimestampIterator The buffer presentation times within the current stream that the
* at. The timestamps should be monotonically increasing. * bitmap should be shown at. The timestamps should be monotonically increasing.
* @return Whether the bitmap was queued successfully. If {@code false}, the caller can try again * @return Whether the bitmap was queued successfully. If {@code false}, the caller can try again
* later. * later.
*/ */
boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator); boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator bufferTimestampIterator);
/** /**
* Incrementally renders processed video frames to the output surface. * Incrementally renders processed video frames to the output surface.

View File

@ -233,9 +233,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/ */
@Override @Override
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) { long bufferPresentationTimeUs, VideoFrameHandler videoFrameHandler) {
return videoSink != null return videoSink != null
&& videoSink.handleInputFrame(framePresentationTimeUs, videoFrameHandler); && videoSink.handleInputFrame(bufferPresentationTimeUs, videoFrameHandler);
} }
/** /**
@ -245,8 +245,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* sink} is {@code null}. * sink} is {@code null}.
*/ */
@Override @Override
public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) { public boolean handleInputBitmap(Bitmap inputBitmap, TimestampIterator bufferTimestampIterator) {
return videoSink != null && videoSink.handleInputBitmap(inputBitmap, timestampIterator); return videoSink != null && videoSink.handleInputBitmap(inputBitmap, bufferTimestampIterator);
} }
/** /**

View File

@ -500,7 +500,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private long streamStartPositionUs; private long streamStartPositionUs;
private boolean mayRenderStartOfStream; private boolean mayRenderStartOfStream;
private @VideoSink.FirstFrameReleaseInstruction int nextFirstFrameReleaseInstruction; private @VideoSink.FirstFrameReleaseInstruction int nextFirstFrameReleaseInstruction;
private long offsetToCompositionTimeUs;
private @MonotonicNonNull WakeupListener wakeupListener; private @MonotonicNonNull WakeupListener wakeupListener;
public SequenceImageRenderer( public SequenceImageRenderer(
@ -598,7 +597,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// The media item might have been repeated in the sequence. // The media item might have been repeated in the sequence.
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid); int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
currentEditedMediaItem = sequence.editedMediaItems.get(mediaItemIndex); currentEditedMediaItem = sequence.editedMediaItems.get(mediaItemIndex);
offsetToCompositionTimeUs = getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs); long offsetToCompositionTimeUs =
getOffsetToCompositionTimeUs(sequence, mediaItemIndex, offsetUs);
videoSink.setBufferTimestampAdjustmentUs(offsetToCompositionTimeUs); videoSink.setBufferTimestampAdjustmentUs(offsetToCompositionTimeUs);
timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs); timestampIterator = createTimestampIterator(/* positionUs= */ startPositionUs);
videoEffects = currentEditedMediaItem.effects.videoEffects; videoEffects = currentEditedMediaItem.effects.videoEffects;
@ -663,14 +663,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
private ConstantRateTimestampIterator createTimestampIterator(long positionUs) { private ConstantRateTimestampIterator createTimestampIterator(long positionUs) {
long streamOffsetUs = getStreamOffsetUs();
long imageBaseTimestampUs = streamOffsetUs + offsetToCompositionTimeUs;
long positionWithinImage = positionUs - streamOffsetUs;
long firstBitmapTimeUs = imageBaseTimestampUs + positionWithinImage;
long lastBitmapTimeUs = long lastBitmapTimeUs =
imageBaseTimestampUs + checkNotNull(currentEditedMediaItem).getPresentationDurationUs(); getStreamOffsetUs() + checkNotNull(currentEditedMediaItem).getPresentationDurationUs();
return new ConstantRateTimestampIterator( return new ConstantRateTimestampIterator(
/* startPositionUs= */ firstBitmapTimeUs, /* startPositionUs= */ positionUs,
/* endPositionUs= */ lastBitmapTimeUs, /* endPositionUs= */ lastBitmapTimeUs,
DEFAULT_FRAME_RATE); DEFAULT_FRAME_RATE);
} }