mirror of
https://github.com/androidx/media.git
synced 2025-05-05 06:30:24 +08:00
Steps toward enabling seeking in DASH live
This commit is contained in:
parent
8e58a3f5f5
commit
160a88baf9
@ -224,7 +224,8 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||||
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
|
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
|
||||||
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
|
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
|
||||||
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset);
|
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
|
||||||
|
mainHandler, player);
|
||||||
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
||||||
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
|
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
|
||||||
DemoPlayer.TYPE_VIDEO);
|
DemoPlayer.TYPE_VIDEO);
|
||||||
@ -246,7 +247,7 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
format.audioSamplingRate + "Hz)");
|
format.audioSamplingRate + "Hz)");
|
||||||
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
|
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
|
||||||
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
|
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
|
||||||
elapsedRealtimeOffset));
|
elapsedRealtimeOffset, mainHandler, player));
|
||||||
codecs.add(format.codecs);
|
codecs.add(format.codecs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,7 +304,8 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
Representation representation = representations.get(j);
|
Representation representation = representations.get(j);
|
||||||
textTrackNameList.add(representation.format.id);
|
textTrackNameList.add(representation.format.id);
|
||||||
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
|
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
|
||||||
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset));
|
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
|
||||||
|
mainHandler, player));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,6 +29,7 @@ import com.google.android.exoplayer.audio.AudioTrack;
|
|||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
|
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
|
||||||
|
import com.google.android.exoplayer.dash.DashChunkSource;
|
||||||
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
|
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
|
||||||
import com.google.android.exoplayer.hls.HlsSampleSource;
|
import com.google.android.exoplayer.hls.HlsSampleSource;
|
||||||
import com.google.android.exoplayer.metadata.MetadataTrackRenderer.MetadataRenderer;
|
import com.google.android.exoplayer.metadata.MetadataTrackRenderer.MetadataRenderer;
|
||||||
@ -58,7 +59,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||||||
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
|
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
|
||||||
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
|
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
|
||||||
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
|
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
|
||||||
StreamingDrmSessionManager.EventListener, TextRenderer,
|
StreamingDrmSessionManager.EventListener, DashChunkSource.EventListener, TextRenderer,
|
||||||
MetadataRenderer<Map<String, Object>>, DebugTextViewHelper.Provider {
|
MetadataRenderer<Map<String, Object>>, DebugTextViewHelper.Provider {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -552,6 +553,13 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSeekRangeChanged(TimeRange seekRange) {
|
||||||
|
if (infoListener != null) {
|
||||||
|
infoListener.onSeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onPlayWhenReadyCommitted() {
|
public void onPlayWhenReadyCommitted() {
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
|
@ -18,6 +18,7 @@ package com.google.android.exoplayer.dash;
|
|||||||
import com.google.android.exoplayer.BehindLiveWindowException;
|
import com.google.android.exoplayer.BehindLiveWindowException;
|
||||||
import com.google.android.exoplayer.C;
|
import com.google.android.exoplayer.C;
|
||||||
import com.google.android.exoplayer.MediaFormat;
|
import com.google.android.exoplayer.MediaFormat;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.TrackInfo;
|
import com.google.android.exoplayer.TrackInfo;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.Chunk;
|
import com.google.android.exoplayer.chunk.Chunk;
|
||||||
@ -50,6 +51,8 @@ import com.google.android.exoplayer.util.ManifestFetcher;
|
|||||||
import com.google.android.exoplayer.util.MimeTypes;
|
import com.google.android.exoplayer.util.MimeTypes;
|
||||||
import com.google.android.exoplayer.util.SystemClock;
|
import com.google.android.exoplayer.util.SystemClock;
|
||||||
|
|
||||||
|
import android.os.Handler;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@ -63,6 +66,20 @@ import java.util.List;
|
|||||||
*/
|
*/
|
||||||
public class DashChunkSource implements ChunkSource {
|
public class DashChunkSource implements ChunkSource {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
|
||||||
|
*/
|
||||||
|
public interface EventListener {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoked when the available seek range of the stream has changed.
|
||||||
|
*
|
||||||
|
* @param seekRange The range which specifies available content that can be seeked to.
|
||||||
|
*/
|
||||||
|
public void onSeekRangeChanged(TimeRange seekRange);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Thrown when an AdaptationSet is missing from the MPD.
|
* Thrown when an AdaptationSet is missing from the MPD.
|
||||||
*/
|
*/
|
||||||
@ -79,6 +96,9 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
*/
|
*/
|
||||||
public static final int USE_ALL_TRACKS = -1;
|
public static final int USE_ALL_TRACKS = -1;
|
||||||
|
|
||||||
|
private final Handler eventHandler;
|
||||||
|
private final EventListener eventListener;
|
||||||
|
|
||||||
private final TrackInfo trackInfo;
|
private final TrackInfo trackInfo;
|
||||||
private final DataSource dataSource;
|
private final DataSource dataSource;
|
||||||
private final FormatEvaluator formatEvaluator;
|
private final FormatEvaluator formatEvaluator;
|
||||||
@ -101,6 +121,12 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
private boolean finishedCurrentManifest;
|
private boolean finishedCurrentManifest;
|
||||||
|
|
||||||
private DrmInitData drmInitData;
|
private DrmInitData drmInitData;
|
||||||
|
private TimeRange seekRange;
|
||||||
|
private long[] seekRangeValues;
|
||||||
|
private int firstAvailableSegmentNum;
|
||||||
|
private int lastAvailableSegmentNum;
|
||||||
|
|
||||||
|
private boolean startAtLiveEdge;
|
||||||
private boolean lastChunkWasInitialization;
|
private boolean lastChunkWasInitialization;
|
||||||
private IOException fatalError;
|
private IOException fatalError;
|
||||||
|
|
||||||
@ -142,7 +168,7 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
|
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
|
||||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
|
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
|
||||||
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
|
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
|
||||||
new SystemClock(), 0, 0);
|
new SystemClock(), 0, 0, false, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -167,19 +193,58 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
|
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
|
||||||
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
|
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
|
||||||
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
|
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
|
||||||
|
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
|
||||||
|
* null if delivery of events is not required.
|
||||||
|
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||||
*/
|
*/
|
||||||
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||||
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
||||||
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs) {
|
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
|
||||||
|
Handler eventHandler, EventListener eventListener) {
|
||||||
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
||||||
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
||||||
elapsedRealtimeOffsetMs * 1000);
|
elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor to use for live DVR streaming.
|
||||||
|
*
|
||||||
|
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
|
||||||
|
* completed an initial load.
|
||||||
|
* @param adaptationSetIndex The index of the adaptation set that should be used.
|
||||||
|
* @param representationIndices The indices of the representations within the adaptations set
|
||||||
|
* that should be used. May be null if all representations within the adaptation set should
|
||||||
|
* be considered.
|
||||||
|
* @param dataSource A {@link DataSource} suitable for loading the media data.
|
||||||
|
* @param formatEvaluator Selects from the available formats.
|
||||||
|
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
|
||||||
|
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
|
||||||
|
* manifest). Choosing a small value will minimize latency introduced by the player, however
|
||||||
|
* note that the value sets an upper bound on the length of media that the player can buffer.
|
||||||
|
* Hence a small value may increase the probability of rebuffering and playback failures.
|
||||||
|
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
|
||||||
|
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
|
||||||
|
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
|
||||||
|
* @param startAtLiveEdge True if the stream should start at the live edge; false if it should
|
||||||
|
* at the beginning of the live window.
|
||||||
|
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
|
||||||
|
* null if delivery of events is not required.
|
||||||
|
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||||
|
*/
|
||||||
|
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||||
|
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
||||||
|
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
|
||||||
|
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) {
|
||||||
|
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
||||||
|
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
||||||
|
elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||||
MediaPresentationDescription initialManifest, int adaptationSetIndex,
|
MediaPresentationDescription initialManifest, int adaptationSetIndex,
|
||||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
|
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
|
||||||
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
|
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
|
||||||
|
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) {
|
||||||
this.manifestFetcher = manifestFetcher;
|
this.manifestFetcher = manifestFetcher;
|
||||||
this.currentManifest = initialManifest;
|
this.currentManifest = initialManifest;
|
||||||
this.adaptationSetIndex = adaptationSetIndex;
|
this.adaptationSetIndex = adaptationSetIndex;
|
||||||
@ -189,8 +254,12 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
this.systemClock = systemClock;
|
this.systemClock = systemClock;
|
||||||
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
|
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
|
||||||
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
|
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
|
||||||
|
this.startAtLiveEdge = startAtLiveEdge;
|
||||||
|
this.eventHandler = eventHandler;
|
||||||
|
this.eventListener = eventListener;
|
||||||
this.evaluation = new Evaluation();
|
this.evaluation = new Evaluation();
|
||||||
this.headerBuilder = new StringBuilder();
|
this.headerBuilder = new StringBuilder();
|
||||||
|
this.seekRangeValues = new long[2];
|
||||||
|
|
||||||
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
|
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
|
||||||
Representation[] representations = getFilteredRepresentations(currentManifest,
|
Representation[] representations = getFilteredRepresentations(currentManifest,
|
||||||
@ -229,6 +298,11 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
return trackInfo;
|
return trackInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// VisibleForTesting
|
||||||
|
/* package */ TimeRange getSeekRange() {
|
||||||
|
return seekRange;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enable() {
|
public void enable() {
|
||||||
fatalError = null;
|
fatalError = null;
|
||||||
@ -236,6 +310,16 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
if (manifestFetcher != null) {
|
if (manifestFetcher != null) {
|
||||||
manifestFetcher.enable();
|
manifestFetcher.enable();
|
||||||
}
|
}
|
||||||
|
DashSegmentIndex segmentIndex =
|
||||||
|
representationHolders.get(formats[0].id).representation.getIndex();
|
||||||
|
if (segmentIndex == null) {
|
||||||
|
seekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, currentManifest.duration * 1000);
|
||||||
|
notifySeekRangeChanged(seekRange);
|
||||||
|
} else {
|
||||||
|
long nowUs = getNowUs();
|
||||||
|
updateAvailableSegmentBounds(segmentIndex, nowUs);
|
||||||
|
updateSeekRange(segmentIndex, nowUs);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -244,6 +328,7 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
if (manifestFetcher != null) {
|
if (manifestFetcher != null) {
|
||||||
manifestFetcher.disable();
|
manifestFetcher.disable();
|
||||||
}
|
}
|
||||||
|
seekRange = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -285,6 +370,10 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
}
|
}
|
||||||
currentManifest = newManifest;
|
currentManifest = newManifest;
|
||||||
finishedCurrentManifest = false;
|
finishedCurrentManifest = false;
|
||||||
|
|
||||||
|
long nowUs = getNowUs();
|
||||||
|
updateAvailableSegmentBounds(newRepresentations[0].getIndex(), nowUs);
|
||||||
|
updateSeekRange(newRepresentations[0].getIndex(), nowUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
|
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
|
||||||
@ -354,36 +443,41 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
long nowUs;
|
|
||||||
if (elapsedRealtimeOffsetUs != 0) {
|
|
||||||
nowUs = (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
|
||||||
} else {
|
|
||||||
nowUs = System.currentTimeMillis() * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
int firstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
|
|
||||||
int lastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
|
|
||||||
boolean indexUnbounded = lastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
|
|
||||||
if (indexUnbounded) {
|
|
||||||
// The index is itself unbounded. We need to use the current time to calculate the range of
|
|
||||||
// available segments.
|
|
||||||
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
|
||||||
if (currentManifest.timeShiftBufferDepth != -1) {
|
|
||||||
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
|
|
||||||
firstAvailableSegmentNum = Math.max(firstAvailableSegmentNum,
|
|
||||||
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
|
|
||||||
}
|
|
||||||
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
|
|
||||||
// index of the last completed segment.
|
|
||||||
lastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int segmentNum;
|
int segmentNum;
|
||||||
|
boolean indexUnbounded = segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED;
|
||||||
|
if (indexUnbounded) {
|
||||||
|
// Manifests with unbounded indexes aren't updated regularly, so we need to update the
|
||||||
|
// segment bounds before use to ensure that they are accurate to the current time; also if
|
||||||
|
// the bounds have changed, we should update the seek range
|
||||||
|
long nowUs = getNowUs();
|
||||||
|
int oldFirstAvailableSegmentNum = firstAvailableSegmentNum;
|
||||||
|
int oldLastAvailableSegmentNum = lastAvailableSegmentNum;
|
||||||
|
updateAvailableSegmentBounds(segmentIndex, nowUs);
|
||||||
|
if (oldFirstAvailableSegmentNum != firstAvailableSegmentNum
|
||||||
|
|| oldLastAvailableSegmentNum != lastAvailableSegmentNum) {
|
||||||
|
updateSeekRange(segmentIndex, nowUs);
|
||||||
|
}
|
||||||
|
}
|
||||||
if (queue.isEmpty()) {
|
if (queue.isEmpty()) {
|
||||||
if (currentManifest.dynamic) {
|
if (currentManifest.dynamic) {
|
||||||
seekPositionUs = getLiveSeekPosition(nowUs, indexUnbounded, segmentIndex.isExplicit());
|
seekRangeValues = seekRange.getCurrentBoundsUs(seekRangeValues);
|
||||||
|
if (startAtLiveEdge) {
|
||||||
|
// We want live streams to start at the live edge instead of the beginning of the
|
||||||
|
// manifest
|
||||||
|
startAtLiveEdge = false;
|
||||||
|
seekPositionUs = seekRangeValues[1];
|
||||||
|
} else {
|
||||||
|
seekPositionUs = Math.max(seekPositionUs, seekRangeValues[0]);
|
||||||
|
seekPositionUs = Math.min(seekPositionUs, seekRangeValues[1]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
|
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
|
||||||
|
|
||||||
|
// if the index is unbounded then the result of getSegmentNum isn't clamped to ensure that
|
||||||
|
// it doesn't exceed the last available segment. Clamp it here.
|
||||||
|
if (indexUnbounded) {
|
||||||
|
segmentNum = Math.min(segmentNum, lastAvailableSegmentNum);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
MediaChunk previous = queue.get(out.queueSize - 1);
|
MediaChunk previous = queue.get(out.queueSize - 1);
|
||||||
segmentNum = previous.isLastChunk ? -1
|
segmentNum = previous.isLastChunk ? -1
|
||||||
@ -452,6 +546,59 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateAvailableSegmentBounds(DashSegmentIndex segmentIndex, long nowUs) {
|
||||||
|
int indexFirstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
|
||||||
|
int indexLastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
|
||||||
|
if (indexLastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED) {
|
||||||
|
// The index is itself unbounded. We need to use the current time to calculate the range of
|
||||||
|
// available segments.
|
||||||
|
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
||||||
|
if (currentManifest.timeShiftBufferDepth != -1) {
|
||||||
|
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
|
||||||
|
indexFirstAvailableSegmentNum = Math.max(indexFirstAvailableSegmentNum,
|
||||||
|
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
|
||||||
|
}
|
||||||
|
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
|
||||||
|
// index of the last completed segment.
|
||||||
|
indexLastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
|
||||||
|
}
|
||||||
|
firstAvailableSegmentNum = indexFirstAvailableSegmentNum;
|
||||||
|
lastAvailableSegmentNum = indexLastAvailableSegmentNum;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateSeekRange(DashSegmentIndex segmentIndex, long nowUs) {
|
||||||
|
long earliestSeekPosition = segmentIndex.getTimeUs(firstAvailableSegmentNum);
|
||||||
|
long latestSeekPosition = segmentIndex.getTimeUs(lastAvailableSegmentNum)
|
||||||
|
+ segmentIndex.getDurationUs(lastAvailableSegmentNum);
|
||||||
|
if (currentManifest.dynamic) {
|
||||||
|
long liveEdgeTimestampUs;
|
||||||
|
if (segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED) {
|
||||||
|
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
||||||
|
} else {
|
||||||
|
liveEdgeTimestampUs = segmentIndex.getTimeUs(segmentIndex.getLastSegmentNum())
|
||||||
|
+ segmentIndex.getDurationUs(segmentIndex.getLastSegmentNum());
|
||||||
|
if (!segmentIndex.isExplicit()) {
|
||||||
|
// Some segments defined by the index may not be available yet. Bound the calculated live
|
||||||
|
// edge based on the elapsed time since the manifest became available.
|
||||||
|
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
|
||||||
|
nowUs - currentManifest.availabilityStartTime * 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// it's possible that the live edge latency actually puts our latest position before
|
||||||
|
// the earliest position in the case of a DVR-like stream that's just starting up, so
|
||||||
|
// in that case just return the earliest position instead
|
||||||
|
latestSeekPosition = Math.max(earliestSeekPosition, liveEdgeTimestampUs - liveEdgeLatencyUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
TimeRange newSeekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, earliestSeekPosition,
|
||||||
|
latestSeekPosition);
|
||||||
|
if (seekRange == null || !seekRange.equals(newSeekRange)) {
|
||||||
|
seekRange = newSeekRange;
|
||||||
|
notifySeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean mimeTypeIsWebm(String mimeType) {
|
private static boolean mimeTypeIsWebm(String mimeType) {
|
||||||
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
|
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
|
||||||
}
|
}
|
||||||
@ -512,36 +659,12 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private long getNowUs() {
|
||||||
* For live playbacks, determines the seek position that snaps playback to be
|
if (elapsedRealtimeOffsetUs != 0) {
|
||||||
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
|
return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
||||||
*
|
|
||||||
* @param nowUs An estimate of the current server time, in microseconds.
|
|
||||||
* @param indexUnbounded True if the segment index for this source is unbounded. False otherwise.
|
|
||||||
* @param indexExplicit True if the segment index is explicit. False otherwise.
|
|
||||||
* @return The seek position in microseconds.
|
|
||||||
*/
|
|
||||||
private long getLiveSeekPosition(long nowUs, boolean indexUnbounded, boolean indexExplicit) {
|
|
||||||
long liveEdgeTimestampUs;
|
|
||||||
if (indexUnbounded) {
|
|
||||||
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
|
||||||
} else {
|
} else {
|
||||||
liveEdgeTimestampUs = Long.MIN_VALUE;
|
return System.currentTimeMillis() * 1000;
|
||||||
for (RepresentationHolder representationHolder : representationHolders.values()) {
|
|
||||||
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
|
|
||||||
int lastSegmentNum = segmentIndex.getLastSegmentNum();
|
|
||||||
long indexLiveEdgeTimestampUs = segmentIndex.getTimeUs(lastSegmentNum)
|
|
||||||
+ segmentIndex.getDurationUs(lastSegmentNum);
|
|
||||||
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
|
|
||||||
}
|
|
||||||
if (!indexExplicit) {
|
|
||||||
// Some segments defined by the index may not be available yet. Bound the calculated live
|
|
||||||
// edge based on the elapsed time since the manifest became available.
|
|
||||||
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
|
|
||||||
nowUs - currentManifest.availabilityStartTime * 1000);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return liveEdgeTimestampUs - liveEdgeLatencyUs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
|
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
|
||||||
@ -592,6 +715,17 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
Collections.singletonList(period));
|
Collections.singletonList(period));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void notifySeekRangeChanged(final TimeRange seekRange) {
|
||||||
|
if (eventHandler != null && eventListener != null) {
|
||||||
|
eventHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
eventListener.onSeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static class RepresentationHolder {
|
private static class RepresentationHolder {
|
||||||
|
|
||||||
public final Representation representation;
|
public final Representation representation;
|
||||||
|
@ -15,9 +15,11 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.dash;
|
package com.google.android.exoplayer.dash;
|
||||||
|
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import com.google.android.exoplayer.MediaFormat;
|
import com.google.android.exoplayer.MediaFormat;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
@ -55,12 +57,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
|
|
||||||
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
|
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
|
||||||
|
|
||||||
private static final long AVAILABILITY_START_TIME = 0;
|
private static final long VOD_DURATION_MS = 30000;
|
||||||
private static final long AVAILABILITY_LATENCY = 5000;
|
|
||||||
private static final long AVAILABILITY_REALTIME_OFFSET = 1000;
|
private static final long LIVE_SEGMENT_COUNT = 5;
|
||||||
private static final long AVAILABILITY_CURRENT_TIME =
|
private static final long LIVE_SEGMENT_DURATION_MS = 1000;
|
||||||
AVAILABILITY_START_TIME + AVAILABILITY_LATENCY - AVAILABILITY_REALTIME_OFFSET;
|
private static final long LIVE_DURATION_MS = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS;
|
||||||
private static final FakeClock AVAILABILITY_CLOCK = new FakeClock(AVAILABILITY_CURRENT_TIME);
|
private static final long LIVE_TIMESHIFT_BUFFER_DEPTH_MS = LIVE_DURATION_MS;
|
||||||
|
|
||||||
|
private static final long AVAILABILITY_START_TIME_MS = 60000;
|
||||||
|
private static final long AVAILABILITY_REALTIME_OFFSET_MS = 1000;
|
||||||
|
private static final long AVAILABILITY_CURRENT_TIME_MS =
|
||||||
|
AVAILABILITY_START_TIME_MS + LIVE_TIMESHIFT_BUFFER_DEPTH_MS - AVAILABILITY_REALTIME_OFFSET_MS;
|
||||||
|
|
||||||
|
private static final long LIVE_SEEK_BEYOND_EDGE_MS = 60000;
|
||||||
|
|
||||||
private static final int TALL_HEIGHT = 200;
|
private static final int TALL_HEIGHT = 200;
|
||||||
private static final int WIDE_WIDTH = 400;
|
private static final int WIDE_WIDTH = 400;
|
||||||
@ -90,6 +99,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetSeekRangeOnVod() {
|
||||||
|
DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO,
|
||||||
|
null, null, mock(FormatEvaluator.class));
|
||||||
|
chunkSource.enable();
|
||||||
|
TimeRange seekRange = chunkSource.getSeekRange();
|
||||||
|
|
||||||
|
checkSeekRange(seekRange, 0, VOD_DURATION_MS * 1000);
|
||||||
|
|
||||||
|
long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null);
|
||||||
|
assertEquals(0, seekRangeValuesMs[0]);
|
||||||
|
assertEquals(VOD_DURATION_MS, seekRangeValuesMs[1]);
|
||||||
|
}
|
||||||
|
|
||||||
public void testMaxVideoDimensionsLegacy() {
|
public void testMaxVideoDimensionsLegacy() {
|
||||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
||||||
Representation representation1 =
|
Representation representation1 =
|
||||||
@ -107,221 +129,338 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeNoLatencyWithTimeline() {
|
public void testLiveEdgeNoLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(0L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 0;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge500msLatencyWithTimeline() {
|
public void testLiveEdgeAlmostNoLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(500L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1000msLatencyWithTimeline() {
|
public void testLiveEdge500msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1000L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1001msLatencyWithTimeline() {
|
public void testLiveEdge1000msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1001L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge2500msLatencyWithTimeline() {
|
public void testLiveEdge1001msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(2500L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1001;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 3000;
|
||||||
|
long chunkEndTimeMs = 4000;
|
||||||
|
|
||||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeVeryHighLatencyWithTimeline() {
|
public void testLiveEdge2500msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(10000L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 2500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 2000;
|
||||||
|
long chunkEndTimeMs = 3000;
|
||||||
|
|
||||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeNoLatencyWithTemplate() {
|
public void testLiveEdgeVeryHighLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(0L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 10000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = 0;
|
||||||
|
long chunkStartTimeMs = 0;
|
||||||
|
long chunkEndTimeMs = 1000;
|
||||||
|
|
||||||
// this should actually return the "5th" segment, but it currently returns the "6th", which
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
|
||||||
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeAlmostNoLatencyWithTemplate() {
|
public void testLiveEdgeNoLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 0;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge500msLatencyWithTemplate() {
|
public void testLiveEdgeAlmostNoLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(500L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1000msLatencyWithTemplate() {
|
public void testLiveEdge500msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1000L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1001msLatencyWithTemplate() {
|
public void testLiveEdge1000msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1001L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge2500msLatencyWithTemplate() {
|
public void testLiveEdge1001msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(2500L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 1001;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 6000;
|
||||||
|
long chunkEndTimeMs = 7000;
|
||||||
|
|
||||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeVeryHighLatencyWithTemplate() {
|
public void testLiveEdge2500msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(10000L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<>();
|
long liveEdgeLatencyMs = 2500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 5000;
|
||||||
|
long chunkEndTimeMs = 6000;
|
||||||
|
|
||||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveTimelineConsistency(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testLiveEdgeVeryHighLatencyInProgress() {
|
||||||
|
long startTimeMs = 3000;
|
||||||
|
long liveEdgeLatencyMs = 10000;
|
||||||
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000;
|
||||||
|
long chunkStartTimeMs = 3000;
|
||||||
|
long chunkEndTimeMs = 4000;
|
||||||
|
|
||||||
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, 0, liveEdgeLatencyMs, seekPositionMs,
|
||||||
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, 0, 0, 1000);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Representation generateVodRepresentation(long startTimeMs, long duration,
|
||||||
|
Format format) {
|
||||||
|
SingleSegmentBase segmentBase = new SingleSegmentBase("https://example.com/1.mp4");
|
||||||
|
return Representation.newInstance(startTimeMs, duration, null, 0, format, segmentBase);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Representation generateSegmentTimelineRepresentation(long segmentStartMs,
|
||||||
|
long periodStartMs, long duration) {
|
||||||
|
List<SegmentTimelineElement> segmentTimeline = new ArrayList<>();
|
||||||
|
List<RangedUri> mediaSegments = new ArrayList<>();
|
||||||
|
long segmentStartTimeMs = segmentStartMs;
|
||||||
|
long byteStart = 0;
|
||||||
|
for (int i = 0; i < (duration / LIVE_SEGMENT_DURATION_MS); i++) {
|
||||||
|
segmentTimeline.add(new SegmentTimelineElement(segmentStartTimeMs, LIVE_SEGMENT_DURATION_MS));
|
||||||
|
mediaSegments.add(new RangedUri("", "", byteStart, 500L));
|
||||||
|
segmentStartTimeMs += LIVE_SEGMENT_DURATION_MS;
|
||||||
|
byteStart += 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
int startNumber = (int) ((periodStartMs + segmentStartMs) / LIVE_SEGMENT_DURATION_MS);
|
||||||
|
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
|
||||||
|
TrackRenderer.UNKNOWN_TIME_US, startNumber, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
|
||||||
|
mediaSegments);
|
||||||
|
return Representation.newInstance(periodStartMs, TrackRenderer.UNKNOWN_TIME_US, null, 0,
|
||||||
|
REGULAR_VIDEO, segmentBase);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateMpd(boolean live,
|
private static MediaPresentationDescription generateMpd(boolean live,
|
||||||
List<Representation> representations) {
|
List<Representation> representations, boolean limitTimeshiftBuffer) {
|
||||||
Representation firstRepresentation = representations.get(0);
|
Representation firstRepresentation = representations.get(0);
|
||||||
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
|
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
|
||||||
Period period = new Period(null, firstRepresentation.periodStartMs,
|
Period period = new Period(null, firstRepresentation.periodStartMs,
|
||||||
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
|
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
|
||||||
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
|
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
|
||||||
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
|
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
|
||||||
return new MediaPresentationDescription(AVAILABILITY_START_TIME, duration, -1, live, -1, -1,
|
return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, duration, -1, live, -1,
|
||||||
null, null, Collections.singletonList(period));
|
(limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1, null, null,
|
||||||
|
Collections.singletonList(period));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateVodMpd() {
|
private static MediaPresentationDescription generateVodMpd() {
|
||||||
List<Representation> representations = new ArrayList<>();
|
List<Representation> representations = new ArrayList<>();
|
||||||
|
|
||||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
representations.add(generateVodRepresentation(0, VOD_DURATION_MS, TALL_VIDEO));
|
||||||
Representation representation1 =
|
representations.add(generateVodRepresentation(0, VOD_DURATION_MS, WIDE_VIDEO));
|
||||||
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
|
|
||||||
representations.add(representation1);
|
|
||||||
|
|
||||||
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
|
return generateMpd(false, representations, false);
|
||||||
Representation representation2 =
|
|
||||||
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
|
|
||||||
representations.add(representation2);
|
|
||||||
|
|
||||||
return generateMpd(false, representations);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateLiveMpdWithTimeline() {
|
private static MediaPresentationDescription generateLiveMpdWithTimeline(long segmentStartMs,
|
||||||
List<Representation> representations = new ArrayList<>();
|
long periodStartMs, long durationMs) {
|
||||||
|
return generateMpd(true, Collections.singletonList(generateSegmentTimelineRepresentation(
|
||||||
List<SegmentTimelineElement> segmentTimeline = new ArrayList<>();
|
segmentStartMs, periodStartMs, durationMs)), false);
|
||||||
segmentTimeline.add(new SegmentTimelineElement(0L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(1000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(2000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(3000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(4000L, 1000L));
|
|
||||||
List<RangedUri> mediaSegments = new ArrayList<>();
|
|
||||||
mediaSegments.add(new RangedUri("", "", 0L, 500L));
|
|
||||||
mediaSegments.add(new RangedUri("", "", 500L, 500L));
|
|
||||||
mediaSegments.add(new RangedUri("", "", 1000L, 500L));
|
|
||||||
mediaSegments.add(new RangedUri("", "", 1500L, 500L));
|
|
||||||
mediaSegments.add(new RangedUri("", "", 2000L, 500L));
|
|
||||||
|
|
||||||
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
|
|
||||||
TrackRenderer.UNKNOWN_TIME_US, 1, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
|
|
||||||
mediaSegments);
|
|
||||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
|
||||||
null, 0, REGULAR_VIDEO, segmentBase);
|
|
||||||
representations.add(representation);
|
|
||||||
|
|
||||||
return generateMpd(true, representations);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateLiveMpdWithTemplate() {
|
private static MediaPresentationDescription generateLiveMpdWithTemplate(
|
||||||
|
boolean limitTimeshiftBuffer) {
|
||||||
List<Representation> representations = new ArrayList<>();
|
List<Representation> representations = new ArrayList<>();
|
||||||
|
|
||||||
UrlTemplate initializationTemplate = null;
|
UrlTemplate initializationTemplate = null;
|
||||||
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
|
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
|
||||||
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
|
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
|
||||||
TrackRenderer.UNKNOWN_TIME_US, 1, 1000, null,
|
TrackRenderer.UNKNOWN_TIME_US, 0, LIVE_SEGMENT_DURATION_MS, null,
|
||||||
initializationTemplate, mediaTemplate, "http://www.youtube.com");
|
initializationTemplate, mediaTemplate, "http://www.youtube.com");
|
||||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
||||||
null, 0, REGULAR_VIDEO, segmentBase);
|
null, 0, REGULAR_VIDEO, segmentBase);
|
||||||
representations.add(representation);
|
representations.add(representation);
|
||||||
|
|
||||||
return generateMpd(true, representations);
|
return generateMpd(true, representations, limitTimeshiftBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
private DashChunkSource setupLiveEdgeTimelineTest(long liveEdgeLatencyMs) {
|
private DashChunkSource setupDashChunkSource(MediaPresentationDescription mpd, long periodStartMs,
|
||||||
MediaPresentationDescription manifest = generateLiveMpdWithTimeline();
|
long liveEdgeLatencyMs) {
|
||||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
@SuppressWarnings("unchecked")
|
||||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
|
||||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
when(manifestFetcher.getManifest()).thenReturn(mpd);
|
||||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
|
||||||
|
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
|
||||||
|
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + periodStartMs), liveEdgeLatencyMs * 1000,
|
||||||
|
AVAILABILITY_REALTIME_OFFSET_MS * 1000, false, null, null);
|
||||||
|
chunkSource.enable();
|
||||||
|
return chunkSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DashChunkSource setupLiveEdgeTemplateTest(long liveEdgeLatencyMs) {
|
private void checkSeekRange(TimeRange seekRange, long startTimeUs, long endTimeUs) {
|
||||||
MediaPresentationDescription manifest = generateLiveMpdWithTemplate();
|
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
|
||||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
assertEquals(startTimeUs, seekRangeValuesUs[0]);
|
||||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
assertEquals(endTimeUs, seekRangeValuesUs[1]);
|
||||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
}
|
||||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
|
||||||
|
private void checkLiveEdgeLatency(DashChunkSource chunkSource, List<MediaChunk> queue,
|
||||||
|
ChunkOperationHolder out, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
|
||||||
|
TimeRange seekRange = chunkSource.getSeekRange();
|
||||||
|
|
||||||
|
assertNotNull(out.chunk);
|
||||||
|
checkSeekRange(seekRange, seekRangeStartMs * 1000, seekRangeEndMs * 1000);
|
||||||
|
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
|
||||||
|
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatency(MediaPresentationDescription mpd, long periodStartMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
DashChunkSource chunkSource = setupDashChunkSource(mpd, periodStartMs, liveEdgeLatencyMs);
|
||||||
|
List<MediaChunk> queue = new ArrayList<>();
|
||||||
|
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||||
|
checkLiveEdgeLatency(chunkSource, queue, out, seekPositionMs, seekRangeStartMs, seekRangeEndMs,
|
||||||
|
chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTimeline(long segmentStartMs, long periodStartMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
MediaPresentationDescription mpd = generateLiveMpdWithTimeline(segmentStartMs, periodStartMs,
|
||||||
|
LIVE_DURATION_MS);
|
||||||
|
checkLiveEdgeLatency(mpd, periodStartMs, liveEdgeLatencyMs, seekPositionMs, seekRangeStartMs,
|
||||||
|
seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(long startTimeMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
MediaPresentationDescription mpd = generateLiveMpdWithTemplate(false);
|
||||||
|
checkLiveEdgeLatency(mpd, startTimeMs, liveEdgeLatencyMs, seekPositionMs, 0, seekRangeEndMs,
|
||||||
|
chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(long startTimeMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
MediaPresentationDescription mpd = generateLiveMpdWithTemplate(true);
|
||||||
|
checkLiveEdgeLatency(mpd, startTimeMs, liveEdgeLatencyMs, seekPositionMs, seekRangeStartMs,
|
||||||
|
seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveTimelineConsistency(long startTimeMs, long liveEdgeLatencyMs,
|
||||||
|
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
|
||||||
|
long chunkEndTimeMs) {
|
||||||
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, 0, liveEdgeLatencyMs, seekPositionMs,
|
||||||
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user