Merge consecutive segments for downloading.
This speeds up downloads where segments have the same URL with different byte ranges. We limit the merged segments to 20 seconds to ensure the download progress of demuxed streams is roughly in line with the playable media duration. Issue:#5978 PiperOrigin-RevId: 280410761
This commit is contained in:
parent
51711a0c97
commit
c8e7ecd367
@ -137,6 +137,8 @@
|
||||
[Cast demo app](https://github.com/google/ExoPlayer/tree/dev-v2/demos/cast).
|
||||
* TestUtils: Publish the `testutils` module to simplify unit testing with
|
||||
ExoPlayer ([#6267](https://github.com/google/ExoPlayer/issues/6267)).
|
||||
* Downloads: Merge downloads in `SegmentDownloader` to improve overall download
|
||||
speed ([#5978](https://github.com/google/ExoPlayer/issues/5978)).
|
||||
|
||||
### 2.10.7 (2019-11-12) ###
|
||||
|
||||
|
@ -25,11 +25,13 @@ import com.google.android.exoplayer2.upstream.cache.Cache;
|
||||
import com.google.android.exoplayer2.upstream.cache.CacheDataSource;
|
||||
import com.google.android.exoplayer2.upstream.cache.CacheKeyFactory;
|
||||
import com.google.android.exoplayer2.upstream.cache.CacheUtil;
|
||||
import com.google.android.exoplayer2.util.Assertions;
|
||||
import com.google.android.exoplayer2.util.PriorityTaskManager;
|
||||
import com.google.android.exoplayer2.util.Util;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
@ -62,6 +64,7 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
|
||||
}
|
||||
|
||||
private static final int BUFFER_SIZE_BYTES = 128 * 1024;
|
||||
private static final long MAX_MERGED_SEGMENT_START_TIME_DIFF_US = 20 * C.MICROS_PER_SECOND;
|
||||
|
||||
private final DataSpec manifestDataSpec;
|
||||
private final Cache cache;
|
||||
@ -108,6 +111,8 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
|
||||
manifest = manifest.copy(streamKeys);
|
||||
}
|
||||
List<Segment> segments = getSegments(dataSource, manifest, /* allowIncompleteList= */ false);
|
||||
Collections.sort(segments);
|
||||
mergeSegments(segments, cacheKeyFactory);
|
||||
|
||||
// Scan the segments, removing any that are fully downloaded.
|
||||
int totalSegments = segments.size();
|
||||
@ -134,7 +139,6 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
|
||||
contentLength = C.LENGTH_UNSET;
|
||||
}
|
||||
}
|
||||
Collections.sort(segments);
|
||||
|
||||
// Download the segments.
|
||||
@Nullable ProgressNotifier progressNotifier = null;
|
||||
@ -232,6 +236,44 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
|
||||
/* flags= */ DataSpec.FLAG_ALLOW_GZIP);
|
||||
}
|
||||
|
||||
private static void mergeSegments(List<Segment> segments, CacheKeyFactory keyFactory) {
|
||||
HashMap<String, Integer> lastIndexByCacheKey = new HashMap<>();
|
||||
int nextOutIndex = 0;
|
||||
for (int i = 0; i < segments.size(); i++) {
|
||||
Segment segment = segments.get(i);
|
||||
String cacheKey = keyFactory.buildCacheKey(segment.dataSpec);
|
||||
@Nullable Integer lastIndex = lastIndexByCacheKey.get(cacheKey);
|
||||
@Nullable Segment lastSegment = lastIndex == null ? null : segments.get(lastIndex);
|
||||
if (lastSegment == null
|
||||
|| segment.startTimeUs > lastSegment.startTimeUs + MAX_MERGED_SEGMENT_START_TIME_DIFF_US
|
||||
|| !canMergeSegments(lastSegment.dataSpec, segment.dataSpec)) {
|
||||
lastIndexByCacheKey.put(cacheKey, nextOutIndex);
|
||||
segments.set(nextOutIndex, segment);
|
||||
nextOutIndex++;
|
||||
} else {
|
||||
long mergedLength =
|
||||
segment.dataSpec.length == C.LENGTH_UNSET
|
||||
? C.LENGTH_UNSET
|
||||
: lastSegment.dataSpec.length + segment.dataSpec.length;
|
||||
DataSpec mergedDataSpec = lastSegment.dataSpec.subrange(/* offset= */ 0, mergedLength);
|
||||
segments.set(
|
||||
Assertions.checkNotNull(lastIndex),
|
||||
new Segment(lastSegment.startTimeUs, mergedDataSpec));
|
||||
}
|
||||
}
|
||||
Util.removeRange(segments, /* fromIndex= */ nextOutIndex, /* toIndex= */ segments.size());
|
||||
}
|
||||
|
||||
private static boolean canMergeSegments(DataSpec dataSpec1, DataSpec dataSpec2) {
|
||||
return dataSpec1.uri.equals(dataSpec2.uri)
|
||||
&& dataSpec1.length != C.LENGTH_UNSET
|
||||
&& (dataSpec1.absoluteStreamPosition + dataSpec1.length == dataSpec2.absoluteStreamPosition)
|
||||
&& Util.areEqual(dataSpec1.key, dataSpec2.key)
|
||||
&& dataSpec1.flags == dataSpec2.flags
|
||||
&& dataSpec1.httpMethod == dataSpec2.httpMethod
|
||||
&& dataSpec1.httpRequestHeaders.equals(dataSpec2.httpRequestHeaders);
|
||||
}
|
||||
|
||||
private static final class ProgressNotifier implements CacheUtil.ProgressListener {
|
||||
|
||||
private final ProgressListener progressListener;
|
||||
|
Loading…
x
Reference in New Issue
Block a user