Display last frame when seeking to end of stream.

We currently don't display the last frame because the seek time is behind the
last frame's timestamps and it's thus marked as decodeOnly.

This case can be detected by checking whether all data sent to the codec is
marked as decodeOnly at the time we read the end of stream signal. If so, we
can re-enable the last frame. This should work for almost all cases because the
end-of-stream signal is read in the same feedInputBuffer loop as the last
frame and we therefore haven't released the last frame buffer yet.

Issue:#2568
PiperOrigin-RevId: 251425870
This commit is contained in:
tonihei 2019-06-04 14:20:51 +01:00 committed by Oliver Woodman
parent 44aa731476
commit be88499615
6 changed files with 58 additions and 26 deletions

View File

@ -6,6 +6,8 @@
and analytics reporting (TODO: link to developer guide page/blog post).
* Add basic DRM support to the Cast demo app.
* Offline: Add `Scheduler` implementation that uses `WorkManager`.
* Display last frame when seeking to end of stream
([#2568](https://github.com/google/ExoPlayer/issues/2568)).
* Assume that encrypted content requires secure decoders in renderer support
checks ([#5568](https://github.com/google/ExoPlayer/issues/5568)).
* Decoders: Prefer decoders that advertise format support over ones that do not,

View File

@ -691,7 +691,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
int bufferIndex,
int bufferFlags,
long bufferPresentationTimeUs,
boolean shouldSkip,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format)
throws ExoPlaybackException {
if (codecNeedsEosBufferTimestampWorkaround
@ -707,7 +708,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true;
}
if (shouldSkip) {
if (isDecodeOnlyBuffer) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.skippedOutputBufferCount++;
audioSink.handleDiscontinuity();

View File

@ -328,14 +328,16 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
private int inputIndex;
private int outputIndex;
private ByteBuffer outputBuffer;
private boolean shouldSkipOutputBuffer;
private boolean isDecodeOnlyOutputBuffer;
private boolean isLastOutputBuffer;
private boolean codecReconfigured;
@ReconfigurationState private int codecReconfigurationState;
@DrainState private int codecDrainState;
@DrainAction private int codecDrainAction;
private boolean codecReceivedBuffers;
private boolean codecReceivedEos;
private long lastBufferInStreamPresentationTimeUs;
private long largestQueuedPresentationTimeUs;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean waitingForKeys;
@ -600,6 +602,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
waitingForKeys = false;
codecHotswapDeadlineMs = C.TIME_UNSET;
decodeOnlyPresentationTimestamps.clear();
largestQueuedPresentationTimeUs = C.TIME_UNSET;
lastBufferInStreamPresentationTimeUs = C.TIME_UNSET;
try {
if (codec != null) {
decoderCounters.decoderReleaseCount++;
@ -706,10 +710,13 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
waitingForFirstSyncSample = true;
codecNeedsAdaptationWorkaroundBuffer = false;
shouldSkipAdaptationWorkaroundOutputBuffer = false;
shouldSkipOutputBuffer = false;
isDecodeOnlyOutputBuffer = false;
isLastOutputBuffer = false;
waitingForKeys = false;
decodeOnlyPresentationTimestamps.clear();
largestQueuedPresentationTimeUs = C.TIME_UNSET;
lastBufferInStreamPresentationTimeUs = C.TIME_UNSET;
codecDrainState = DRAIN_STATE_NONE;
codecDrainAction = DRAIN_ACTION_NONE;
// Reconfiguration data sent shortly before the flush may not have been processed by the
@ -883,7 +890,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecDrainAction = DRAIN_ACTION_NONE;
codecNeedsAdaptationWorkaroundBuffer = false;
shouldSkipAdaptationWorkaroundOutputBuffer = false;
shouldSkipOutputBuffer = false;
isDecodeOnlyOutputBuffer = false;
isLastOutputBuffer = false;
waitingForFirstSyncSample = true;
decoderCounters.decoderInitCount++;
@ -1010,6 +1018,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
result = readSource(formatHolder, buffer, false);
}
if (hasReadStreamToEnd()) {
// Notify output queue of the last buffer's timestamp.
lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
}
if (result == C.RESULT_NOTHING_READ) {
return false;
}
@ -1082,6 +1095,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
formatQueue.add(presentationTimeUs, inputFormat);
waitingForFirstSampleInFormat = false;
}
largestQueuedPresentationTimeUs =
Math.max(largestQueuedPresentationTimeUs, presentationTimeUs);
buffer.flip();
onQueueInputBuffer(buffer);
@ -1456,7 +1471,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputBuffer.position(outputBufferInfo.offset);
outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
}
shouldSkipOutputBuffer = shouldSkipOutputBuffer(outputBufferInfo.presentationTimeUs);
isDecodeOnlyOutputBuffer = isDecodeOnlyBuffer(outputBufferInfo.presentationTimeUs);
isLastOutputBuffer =
lastBufferInStreamPresentationTimeUs == outputBufferInfo.presentationTimeUs;
updateOutputFormatForTime(outputBufferInfo.presentationTimeUs);
}
@ -1472,7 +1489,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputIndex,
outputBufferInfo.flags,
outputBufferInfo.presentationTimeUs,
shouldSkipOutputBuffer,
isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
outputFormat);
} catch (IllegalStateException e) {
processEndOfStream();
@ -1492,7 +1510,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputIndex,
outputBufferInfo.flags,
outputBufferInfo.presentationTimeUs,
shouldSkipOutputBuffer,
isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
outputFormat);
}
@ -1559,7 +1578,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
* @param bufferIndex The index of the output buffer.
* @param bufferFlags The flags attached to the output buffer.
* @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds.
* @param shouldSkip Whether the buffer should be skipped (i.e. not rendered).
* @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY}
* by the source.
* @param isLastBuffer Whether the buffer is the last sample of the current stream.
* @param format The format associated with the buffer.
* @return Whether the output buffer was fully processed (e.g. rendered or skipped).
* @throws ExoPlaybackException If an error occurs processing the output buffer.
@ -1572,7 +1593,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
int bufferIndex,
int bufferFlags,
long bufferPresentationTimeUs,
boolean shouldSkip,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format)
throws ExoPlaybackException;
@ -1652,7 +1674,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecDrainAction = DRAIN_ACTION_NONE;
}
private boolean shouldSkipOutputBuffer(long presentationTimeUs) {
private boolean isDecodeOnlyBuffer(long presentationTimeUs) {
// We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would
// box presentationTimeUs, creating a Long object that would need to be garbage collected.
int size = decodeOnlyPresentationTimestamps.size();

View File

@ -738,7 +738,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
if (prepared) {
SeekMap seekMap = getPreparedState().seekMap;
Assertions.checkState(isPendingReset());
if (durationUs != C.TIME_UNSET && pendingResetPositionUs >= durationUs) {
if (durationUs != C.TIME_UNSET && pendingResetPositionUs > durationUs) {
loadingFinished = true;
pendingResetPositionUs = C.TIME_UNSET;
return;

View File

@ -712,7 +712,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
int bufferIndex,
int bufferFlags,
long bufferPresentationTimeUs,
boolean shouldSkip,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format)
throws ExoPlaybackException {
if (initialPositionUs == C.TIME_UNSET) {
@ -721,7 +722,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs;
if (shouldSkip) {
if (isDecodeOnlyBuffer && !isLastBuffer) {
skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true;
}
@ -769,10 +770,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs)
if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastBuffer)
&& maybeDropBuffersToKeyframe(codec, bufferIndex, presentationTimeUs, positionUs)) {
return false;
} else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs)) {
} else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastBuffer)) {
dropOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true;
}
@ -840,8 +841,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/**
* Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link
* #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, Format)} to
* get the playback position with respect to the media.
* #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, boolean,
* Format)} to get the playback position with respect to the media.
*/
protected long getOutputStreamOffsetUs() {
return outputStreamOffsetUs;
@ -893,9 +894,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* indicates that the buffer is late.
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
* measured at the start of the current iteration of the rendering loop.
* @param isLastBuffer Whether the buffer is the last buffer in the current stream.
*/
protected boolean shouldDropOutputBuffer(long earlyUs, long elapsedRealtimeUs) {
return isBufferLate(earlyUs);
protected boolean shouldDropOutputBuffer(
long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
return isBufferLate(earlyUs) && !isLastBuffer;
}
/**
@ -906,9 +909,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* negative value indicates that the buffer is late.
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
* measured at the start of the current iteration of the rendering loop.
* @param isLastBuffer Whether the buffer is the last buffer in the current stream.
*/
protected boolean shouldDropBuffersToKeyframe(long earlyUs, long elapsedRealtimeUs) {
return isBufferVeryLate(earlyUs);
protected boolean shouldDropBuffersToKeyframe(
long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
return isBufferVeryLate(earlyUs) && !isLastBuffer;
}
/**

View File

@ -166,14 +166,15 @@ public class DebugRenderersFactory extends DefaultRenderersFactory {
int bufferIndex,
int bufferFlags,
long bufferPresentationTimeUs,
boolean shouldSkip,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format)
throws ExoPlaybackException {
if (skipToPositionBeforeRenderingFirstFrame && bufferPresentationTimeUs < positionUs) {
// After the codec has been initialized, don't render the first frame until we've caught up
// to the playback position. Else test runs on devices that do not support dummy surface
// will drop frames between rendering the first one and catching up [Internal: b/66494991].
shouldSkip = true;
isDecodeOnlyBuffer = true;
}
return super.processOutputBuffer(
positionUs,
@ -183,7 +184,8 @@ public class DebugRenderersFactory extends DefaultRenderersFactory {
bufferIndex,
bufferFlags,
bufferPresentationTimeUs,
shouldSkip,
isDecodeOnlyBuffer,
isLastBuffer,
format);
}