Remove seeking to live edge.

This is in preparation for the player knowing about the live
window, at which point only correct playback positions will be
passed into ChunkSource implementations. Note that both
implementations bound the chunk index within range in case of,
for example, off-by-one errors caused by very recent manifest
updates.

This will temporarily cause live playbacks to always start at
the beginning of the current live window (we'll be trying to
play from t=0, which will be adjusted to the start of the live
window as a result of the bounding).
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=119733559
This commit is contained in:
olly 2016-04-13 04:23:41 -07:00 committed by Oliver Woodman
parent 730e4ac953
commit f963c626af
7 changed files with 49 additions and 155 deletions

View File

@ -48,7 +48,6 @@ public class DashSourceBuilder implements SourceBuilder {
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 54;
private static final int TEXT_BUFFER_SEGMENTS = 2;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final DataSourceFactory dataSourceFactory;
private final String url;
@ -76,14 +75,14 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
videoDataSource, new AdaptiveEvaluator(bandwidthMeter));
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
// Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_AUDIO,
audioDataSource, null, LIVE_EDGE_LATENCY_MS);
audioDataSource, null);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
@ -91,7 +90,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_TEXT,
textDataSource, null, LIVE_EDGE_LATENCY_MS);
textDataSource, null);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);

View File

@ -48,7 +48,6 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 54;
private static final int TEXT_BUFFER_SEGMENTS = 2;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final DataSourceFactory dataSourceFactory;
private final String url;
@ -75,8 +74,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_VIDEO, videoDataSource, new AdaptiveEvaluator(bandwidthMeter),
LIVE_EDGE_LATENCY_MS);
C.TRACK_TYPE_VIDEO, videoDataSource, new AdaptiveEvaluator(bandwidthMeter));
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
@ -84,14 +82,14 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_AUDIO, audioDataSource, null, LIVE_EDGE_LATENCY_MS);
C.TRACK_TYPE_AUDIO, audioDataSource, null);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_TEXT, textDataSource, null, LIVE_EDGE_LATENCY_MS);
C.TRACK_TYPE_TEXT, textDataSource, null);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);

View File

@ -44,6 +44,7 @@ import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import java.io.IOException;
import java.util.Arrays;
@ -70,7 +71,6 @@ public class DashChunkSource implements ChunkSource {
private final FormatEvaluator adaptiveFormatEvaluator;
private final Evaluation evaluation;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final long liveEdgeLatencyUs;
// Properties of the initial manifest.
private boolean live;
@ -78,8 +78,6 @@ public class DashChunkSource implements ChunkSource {
private MediaPresentationDescription currentManifest;
private DrmInitData drmInitData;
private boolean indexIsUnbounded;
private boolean indexIsExplicit;
private boolean lastChunkWasInitialization;
private IOException fatalError;
@ -99,20 +97,13 @@ public class DashChunkSource implements ChunkSource {
* {@link C#TRACK_TYPE_AUDIO}, {@link C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however
* note that the value sets an upper bound on the length of media that the player can buffer.
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long liveEdgeLatencyMs) {
int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this.manifestFetcher = manifestFetcher;
this.adaptationSetType = adaptationSetType;
this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
this.evaluation = new Evaluation();
}
@ -269,25 +260,20 @@ public class DashChunkSource implements ChunkSource {
int segmentNum;
if (previous == null) {
if (live) {
playbackPositionUs = getLiveSeekPosition(nowUs);
}
segmentNum = representationHolder.getSegmentNum(playbackPositionUs);
segmentNum = Util.constrainValue(representationHolder.getSegmentNum(playbackPositionUs),
firstAvailableSegmentNum, lastAvailableSegmentNum);
} else {
segmentNum = previous.getNextChunkIndex();
}
if (live && segmentNum < firstAvailableSegmentNum) {
if (segmentNum < firstAvailableSegmentNum) {
// This is before the first chunk in the current manifest.
fatalError = new BehindLiveWindowException();
return;
} else if (currentManifest.dynamic) {
}
}
if (segmentNum > lastAvailableSegmentNum) {
// This is beyond the last chunk in the current manifest.
return;
}
} else if (segmentNum > lastAvailableSegmentNum) {
out.endOfStream = true;
out.endOfStream = !currentManifest.dynamic;
return;
}
@ -362,8 +348,6 @@ public class DashChunkSource implements ChunkSource {
}
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null, trackFormats);
drmInitData = getDrmInitData(adaptationSet);
updateRepresentationIndependentProperties(periodDurationUs,
representationHolders[0].representation);
return;
}
}
@ -381,27 +365,12 @@ public class DashChunkSource implements ChunkSource {
Representation representation = representations.get(i);
representationHolders[i].updateRepresentation(periodDurationUs, representation);
}
updateRepresentationIndependentProperties(periodDurationUs,
representationHolders[0].representation);
} catch (BehindLiveWindowException e) {
fatalError = e;
return;
}
}
private void updateRepresentationIndependentProperties(long periodDurationUs,
Representation arbitaryRepresentation) {
DashSegmentIndex segmentIndex = arbitaryRepresentation.getIndex();
if (segmentIndex != null) {
int lastSegmentNum = segmentIndex.getLastSegmentNum(periodDurationUs);
indexIsUnbounded = lastSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
indexIsExplicit = segmentIndex.isExplicit();
} else {
indexIsUnbounded = false;
indexIsExplicit = true;
}
}
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri,
Representation representation, ChunkExtractorWrapper extractor, DataSource dataSource,
int trigger) {
@ -443,33 +412,6 @@ public class DashChunkSource implements ChunkSource {
}
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition(long nowUs) {
long elapsedTimeUs = nowUs - currentManifest.availabilityStartTime * 1000;
long liveEdgeTimestampUs;
if (indexIsUnbounded) {
liveEdgeTimestampUs = elapsedTimeUs;
} else {
liveEdgeTimestampUs = Long.MIN_VALUE;
for (RepresentationHolder representationHolder : representationHolders) {
int lastSegmentNum = representationHolder.getLastSegmentNum();
long indexLiveEdgeTimestampUs = representationHolder.getSegmentEndTimeUs(lastSegmentNum);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
}
if (!indexIsExplicit) {
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs, elapsedTimeUs);
}
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private int getTrackIndex(Format format) {
for (int i = 0; i < trackGroup.length; i++) {
if (trackGroup.getFormat(i) == format) {

View File

@ -337,24 +337,15 @@ public class HlsChunkSource {
return;
}
int chunkMediaSequence = 0;
if (live) {
if (previous == null) {
chunkMediaSequence = getLiveStartChunkMediaSequence(variantIndex);
} else {
chunkMediaSequence = switchingVariant ? previous.chunkIndex : previous.chunkIndex + 1;
if (chunkMediaSequence < mediaPlaylist.mediaSequence) {
fatalError = new BehindLiveWindowException();
return;
}
}
} else {
// Not live.
int chunkMediaSequence;
if (previous == null) {
chunkMediaSequence = Util.binarySearchFloor(mediaPlaylist.segments, playbackPositionUs,
true, true) + mediaPlaylist.mediaSequence;
} else {
chunkMediaSequence = switchingVariant ? previous.chunkIndex : previous.chunkIndex + 1;
if (chunkMediaSequence < mediaPlaylist.mediaSequence) {
fatalError = new BehindLiveWindowException();
return;
}
}
@ -624,13 +615,6 @@ public class HlsChunkSource {
return timeSinceLastMediaPlaylistLoadMs >= (mediaPlaylist.targetDurationSecs * 1000) / 2;
}
private int getLiveStartChunkMediaSequence(int variantIndex) {
HlsMediaPlaylist mediaPlaylist = variantPlaylists[variantIndex];
// For live start playback from the third chunk from the end.
int chunkIndex = mediaPlaylist.segments.size() > 3 ? mediaPlaylist.segments.size() - 3 : 0;
return chunkIndex + mediaPlaylist.mediaSequence;
}
private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) {
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, variants[variantIndex].url);
DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null,

View File

@ -61,7 +61,6 @@ public class SmoothStreamingChunkSource implements ChunkSource {
private final int streamElementType;
private final DataSource dataSource;
private final Evaluation evaluation;
private final long liveEdgeLatencyUs;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final FormatEvaluator adaptiveFormatEvaluator;
@ -90,20 +89,13 @@ public class SmoothStreamingChunkSource implements ChunkSource {
* {@link C#TRACK_TYPE_VIDEO}, {@link C#TRACK_TYPE_AUDIO} and {@link C#TRACK_TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however
* note that the value sets an upper bound on the length of media that the player can buffer.
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
int streamElementType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long liveEdgeLatencyMs) {
int streamElementType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this.manifestFetcher = manifestFetcher;
this.streamElementType = streamElementType;
this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
evaluation = new Evaluation();
}
@ -247,30 +239,20 @@ public class SmoothStreamingChunkSource implements ChunkSource {
int chunkIndex;
if (previous == null) {
if (live) {
playbackPositionUs = getLiveSeekPosition(currentManifest, liveEdgeLatencyUs);
}
chunkIndex = streamElement.getChunkIndex(playbackPositionUs);
} else {
chunkIndex = previous.getNextChunkIndex() - currentManifestChunkOffset;
}
if (live && chunkIndex < 0) {
if (chunkIndex < 0) {
// This is before the first chunk in the current manifest.
fatalError = new BehindLiveWindowException();
return;
} else if (currentManifest.isLive) {
}
}
needManifestRefresh = currentManifest.isLive && chunkIndex >= streamElement.chunkCount - 1;
if (chunkIndex >= streamElement.chunkCount) {
// This is beyond the last chunk in the current manifest.
needManifestRefresh = true;
return;
} else if (chunkIndex == streamElement.chunkCount - 1) {
// This is the last chunk in the current manifest. Mark the manifest as being finished,
// but continue to return the final chunk.
needManifestRefresh = true;
}
} else if (chunkIndex >= streamElement.chunkCount) {
out.endOfStream = true;
out.endOfStream = !currentManifest.isLive;
return;
}
@ -339,29 +321,6 @@ public class SmoothStreamingChunkSource implements ChunkSource {
trackGroup = null;
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@code liveEdgeLatencyUs} behind the live edge of the provided manifest.
*
* @param manifest The manifest.
* @param liveEdgeLatencyUs The live edge latency, in microseconds.
* @return The seek position in microseconds.
*/
private static long getLiveSeekPosition(SmoothStreamingManifest manifest,
long liveEdgeLatencyUs) {
long liveEdgeTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < manifest.streamElements.length; i++) {
StreamElement streamElement = manifest.streamElements[i];
if (streamElement.chunkCount > 0) {
long elementLiveEdgeTimestampUs =
streamElement.getStartTimeUs(streamElement.chunkCount - 1)
+ streamElement.getChunkDurationUs(streamElement.chunkCount - 1);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, elementLiveEdgeTimestampUs);
}
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
/**
* Gets the index of a format in a track group, using referential equality.
*/

View File

@ -98,7 +98,7 @@ public class PlayerControl implements MediaPlayerControl {
@Override
public void seekTo(int timeMillis) {
long seekPosition = exoPlayer.getDuration() == ExoPlayer.UNKNOWN_TIME ? 0
: Math.min(Math.max(0, timeMillis), getDuration());
: Util.constrainValue(timeMillis, 0, getDuration());
exoPlayer.seekTo(seekPosition);
}

View File

@ -288,6 +288,18 @@ public final class Util {
return (numerator + denominator - 1) / denominator;
}
/**
* Constrains a value to the specified bounds.
*
* @param value The value to constrain.
* @param min The lower bound.
* @param max The upper bound.
* @return The constrained value {@code Math.max(min, Math.min(value, max))}.
*/
public static int constrainValue(int value, int min, int max) {
return Math.max(min, Math.min(value, max));
}
/**
* Returns the index of the largest value in an array that is less than (or optionally equal to)
* a specified key.