Merge fMP4/H264 and WebM/VP9 DASH implementations.

This commit is contained in:
Oliver Woodman 2014-09-08 11:33:12 +01:00
parent 1ddd5c6e16
commit 52a300f1fd
11 changed files with 621 additions and 992 deletions

View File

@ -28,8 +28,7 @@ import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource; import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashMp4ChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DashWebmChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher;
@ -163,14 +162,8 @@ public class DashVodRendererBuilder implements RendererBuilder,
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter); DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource; ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType; String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4)) { if (mimeType.equals(MimeTypes.VIDEO_MP4) || mimeType.equals(MimeTypes.VIDEO_WEBM)) {
videoChunkSource = new DashMp4ChunkSource(videoDataSource, videoChunkSource = new DashChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// TODO: Figure out how to query supported vpX resolutions. For now, restrict to standard
// definition streams.
videoRepresentations = getSdRepresentations(videoRepresentations);
videoChunkSource = new DashWebmChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations); new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else { } else {
throw new IllegalStateException("Unexpected mime type: " + mimeType); throw new IllegalStateException("Unexpected mime type: " + mimeType);
@ -200,7 +193,7 @@ public class DashVodRendererBuilder implements RendererBuilder,
Format format = representation.format; Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " + audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)"; format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashMp4ChunkSource(audioDataSource, audioChunkSources[i] = new DashChunkSource(audioDataSource,
audioEvaluator, representation); audioEvaluator, representation);
} }
audioChunkSource = new MultiTrackChunkSource(audioChunkSources); audioChunkSource = new MultiTrackChunkSource(audioChunkSources);

View File

@ -26,7 +26,7 @@ import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.Format; import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashMp4ChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher;
@ -116,7 +116,7 @@ import java.util.ArrayList;
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter); DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource = new DashMp4ChunkSource(videoDataSource, ChunkSource videoChunkSource = new DashChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations); new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
@ -125,7 +125,7 @@ import java.util.ArrayList;
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter); DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource audioChunkSource = new DashMp4ChunkSource(audioDataSource, ChunkSource audioChunkSource = new DashChunkSource(audioDataSource,
new FormatEvaluator.FixedEvaluator(), audioRepresentation); new FormatEvaluator.FixedEvaluator(), audioRepresentation);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);

View File

@ -18,7 +18,7 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor; import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream; import com.google.android.exoplayer.upstream.NonBlockingInputStream;
@ -32,7 +32,7 @@ import java.util.UUID;
*/ */
public final class Mp4MediaChunk extends MediaChunk { public final class Mp4MediaChunk extends MediaChunk {
private final FragmentedMp4Extractor extractor; private final Extractor extractor;
private final boolean maybeSelfContained; private final boolean maybeSelfContained;
private final long sampleOffsetUs; private final long sampleOffsetUs;
@ -57,7 +57,7 @@ public final class Mp4MediaChunk extends MediaChunk {
*/ */
public Mp4MediaChunk(DataSource dataSource, DataSpec dataSpec, Format format, public Mp4MediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex,
FragmentedMp4Extractor extractor, boolean maybeSelfContained, long sampleOffsetUs) { Extractor extractor, boolean maybeSelfContained, long sampleOffsetUs) {
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex); super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
this.extractor = extractor; this.extractor = extractor;
this.maybeSelfContained = maybeSelfContained; this.maybeSelfContained = maybeSelfContained;
@ -89,7 +89,7 @@ public final class Mp4MediaChunk extends MediaChunk {
NonBlockingInputStream inputStream = getNonBlockingInputStream(); NonBlockingInputStream inputStream = getNonBlockingInputStream();
Assertions.checkState(inputStream != null); Assertions.checkState(inputStream != null);
int result = extractor.read(inputStream, null); int result = extractor.read(inputStream, null);
prepared = (result & FragmentedMp4Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0; prepared = (result & Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0;
} else { } else {
// We know there isn't a moov atom. The extractor must have parsed one from a separate // We know there isn't a moov atom. The extractor must have parsed one from a separate
// initialization chunk. // initialization chunk.
@ -107,7 +107,7 @@ public final class Mp4MediaChunk extends MediaChunk {
public boolean sampleAvailable() throws ParserException { public boolean sampleAvailable() throws ParserException {
NonBlockingInputStream inputStream = getNonBlockingInputStream(); NonBlockingInputStream inputStream = getNonBlockingInputStream();
int result = extractor.read(inputStream, null); int result = extractor.read(inputStream, null);
return (result & FragmentedMp4Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0; return (result & Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0;
} }
@Override @Override
@ -115,7 +115,7 @@ public final class Mp4MediaChunk extends MediaChunk {
NonBlockingInputStream inputStream = getNonBlockingInputStream(); NonBlockingInputStream inputStream = getNonBlockingInputStream();
Assertions.checkState(inputStream != null); Assertions.checkState(inputStream != null);
int result = extractor.read(inputStream, holder); int result = extractor.read(inputStream, holder);
boolean sampleRead = (result & FragmentedMp4Extractor.RESULT_READ_SAMPLE) != 0; boolean sampleRead = (result & Extractor.RESULT_READ_SAMPLE) != 0;
if (sampleRead) { if (sampleRead) {
holder.timeUs -= sampleOffsetUs; holder.timeUs -= sampleOffsetUs;
} }

View File

@ -1,99 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.webm.WebmExtractor;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import java.util.Map;
import java.util.UUID;
/**
* A WebM {@link MediaChunk}.
*/
public final class WebmMediaChunk extends MediaChunk {
private final WebmExtractor extractor;
/**
* @param dataSource A {@link DataSource} for loading the data.
* @param dataSpec Defines the data to be loaded.
* @param format The format of the stream to which this chunk belongs.
* @param extractor The extractor that will be used to extract the samples.
* @param trigger The reason for this chunk being selected.
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
*/
public WebmMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, WebmExtractor extractor, long startTimeUs, long endTimeUs,
int nextChunkIndex) {
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
this.extractor = extractor;
}
@Override
public void seekToStart() {
seekTo(0, false);
}
@Override
public boolean seekTo(long positionUs, boolean allowNoop) {
boolean isDiscontinuous = extractor.seekTo(positionUs, allowNoop);
if (isDiscontinuous) {
resetReadPosition();
}
return isDiscontinuous;
}
@Override
public boolean prepare() {
return true;
}
@Override
public boolean sampleAvailable() throws ParserException {
NonBlockingInputStream inputStream = getNonBlockingInputStream();
int result = extractor.read(inputStream, null);
return (result & WebmExtractor.RESULT_NEED_SAMPLE_HOLDER) != 0;
}
@Override
public boolean read(SampleHolder holder) {
NonBlockingInputStream inputStream = getNonBlockingInputStream();
Assertions.checkState(inputStream != null);
int result = extractor.read(inputStream, holder);
return (result & WebmExtractor.RESULT_READ_SAMPLE) != 0;
}
@Override
public MediaFormat getMediaFormat() {
return extractor.getFormat();
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
// TODO: Add support for Pssh to WebmExtractor
return null;
}
}

View File

@ -29,10 +29,13 @@ import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.Mp4MediaChunk; import com.google.android.exoplayer.chunk.Mp4MediaChunk;
import com.google.android.exoplayer.dash.mpd.RangedUri; import com.google.android.exoplayer.dash.mpd.RangedUri;
import com.google.android.exoplayer.dash.mpd.Representation; import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor; import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor;
import com.google.android.exoplayer.parser.webm.WebmExtractor;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream; import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.MimeTypes;
import android.net.Uri; import android.net.Uri;
@ -42,9 +45,11 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
/** /**
* An {@link ChunkSource} for Mp4 DASH streams. * An {@link ChunkSource} for DASH streams.
* <p>
* This implementation currently supports fMP4 and webm.
*/ */
public class DashMp4ChunkSource implements ChunkSource { public class DashChunkSource implements ChunkSource {
private final TrackInfo trackInfo; private final TrackInfo trackInfo;
private final DataSource dataSource; private final DataSource dataSource;
@ -55,7 +60,7 @@ public class DashMp4ChunkSource implements ChunkSource {
private final Format[] formats; private final Format[] formats;
private final HashMap<String, Representation> representations; private final HashMap<String, Representation> representations;
private final HashMap<String, FragmentedMp4Extractor> extractors; private final HashMap<String, Extractor> extractors;
private final HashMap<String, DashSegmentIndex> segmentIndexes; private final HashMap<String, DashSegmentIndex> segmentIndexes;
private boolean lastChunkWasInitialization; private boolean lastChunkWasInitialization;
@ -65,12 +70,12 @@ public class DashMp4ChunkSource implements ChunkSource {
* @param evaluator Selects from the available formats. * @param evaluator Selects from the available formats.
* @param representations The representations to be considered by the source. * @param representations The representations to be considered by the source.
*/ */
public DashMp4ChunkSource(DataSource dataSource, FormatEvaluator evaluator, public DashChunkSource(DataSource dataSource, FormatEvaluator evaluator,
Representation... representations) { Representation... representations) {
this.dataSource = dataSource; this.dataSource = dataSource;
this.evaluator = evaluator; this.evaluator = evaluator;
this.formats = new Format[representations.length]; this.formats = new Format[representations.length];
this.extractors = new HashMap<String, FragmentedMp4Extractor>(); this.extractors = new HashMap<String, Extractor>();
this.segmentIndexes = new HashMap<String, DashSegmentIndex>(); this.segmentIndexes = new HashMap<String, DashSegmentIndex>();
this.representations = new HashMap<String, Representation>(); this.representations = new HashMap<String, Representation>();
this.trackInfo = new TrackInfo(representations[0].format.mimeType, this.trackInfo = new TrackInfo(representations[0].format.mimeType,
@ -82,7 +87,9 @@ public class DashMp4ChunkSource implements ChunkSource {
formats[i] = representations[i].format; formats[i] = representations[i].format;
maxWidth = Math.max(formats[i].width, maxWidth); maxWidth = Math.max(formats[i].width, maxWidth);
maxHeight = Math.max(formats[i].height, maxHeight); maxHeight = Math.max(formats[i].height, maxHeight);
extractors.put(formats[i].id, new FragmentedMp4Extractor()); Extractor extractor = formats[i].mimeType.startsWith(MimeTypes.VIDEO_WEBM)
? new WebmExtractor() : new FragmentedMp4Extractor();
extractors.put(formats[i].id, extractor);
this.representations.put(formats[i].id, representations[i]); this.representations.put(formats[i].id, representations[i]);
DashSegmentIndex segmentIndex = representations[i].getIndex(); DashSegmentIndex segmentIndex = representations[i].getIndex();
if (segmentIndex != null) { if (segmentIndex != null) {
@ -142,7 +149,7 @@ public class DashMp4ChunkSource implements ChunkSource {
} }
Representation selectedRepresentation = representations.get(selectedFormat.id); Representation selectedRepresentation = representations.get(selectedFormat.id);
FragmentedMp4Extractor extractor = extractors.get(selectedRepresentation.format.id); Extractor extractor = extractors.get(selectedRepresentation.format.id);
RangedUri pendingInitializationUri = null; RangedUri pendingInitializationUri = null;
RangedUri pendingIndexUri = null; RangedUri pendingIndexUri = null;
@ -191,35 +198,39 @@ public class DashMp4ChunkSource implements ChunkSource {
} }
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri, private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri,
Representation representation, FragmentedMp4Extractor extractor, DataSource dataSource, Representation representation, Extractor extractor, DataSource dataSource,
int trigger) { int trigger) {
int expectedExtractorResult = FragmentedMp4Extractor.RESULT_END_OF_STREAM; int expectedExtractorResult = Extractor.RESULT_END_OF_STREAM;
long indexAnchor = 0; long indexAnchor = 0;
RangedUri requestUri; RangedUri requestUri;
if (initializationUri != null) { if (initializationUri != null) {
// It's common for initialization and index data to be stored adjacently. Attempt to merge // It's common for initialization and index data to be stored adjacently. Attempt to merge
// the two requests together to request both at once. // the two requests together to request both at once.
expectedExtractorResult |= FragmentedMp4Extractor.RESULT_READ_INIT; expectedExtractorResult |= Extractor.RESULT_READ_INIT;
requestUri = initializationUri.attemptMerge(indexUri); requestUri = initializationUri.attemptMerge(indexUri);
if (requestUri != null) { if (requestUri != null) {
expectedExtractorResult |= FragmentedMp4Extractor.RESULT_READ_INDEX; expectedExtractorResult |= Extractor.RESULT_READ_INDEX;
if (extractor.hasRelativeIndexOffsets()) {
indexAnchor = indexUri.start + indexUri.length; indexAnchor = indexUri.start + indexUri.length;
}
} else { } else {
requestUri = initializationUri; requestUri = initializationUri;
} }
} else { } else {
requestUri = indexUri; requestUri = indexUri;
if (extractor.hasRelativeIndexOffsets()) {
indexAnchor = indexUri.start + indexUri.length; indexAnchor = indexUri.start + indexUri.length;
expectedExtractorResult |= FragmentedMp4Extractor.RESULT_READ_INDEX; }
expectedExtractorResult |= Extractor.RESULT_READ_INDEX;
} }
DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length, DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length,
representation.getCacheKey()); representation.getCacheKey());
return new InitializationMp4Loadable(dataSource, dataSpec, trigger, representation.format, return new InitializationLoadable(dataSource, dataSpec, trigger, representation.format,
extractor, expectedExtractorResult, indexAnchor); extractor, expectedExtractorResult, indexAnchor);
} }
private Chunk newMediaChunk(Representation representation, DashSegmentIndex segmentIndex, private Chunk newMediaChunk(Representation representation, DashSegmentIndex segmentIndex,
FragmentedMp4Extractor extractor, DataSource dataSource, int segmentNum, int trigger) { Extractor extractor, DataSource dataSource, int segmentNum, int trigger) {
int lastSegmentNum = segmentIndex.getLastSegmentNum(); int lastSegmentNum = segmentIndex.getLastSegmentNum();
int nextSegmentNum = segmentNum == lastSegmentNum ? -1 : segmentNum + 1; int nextSegmentNum = segmentNum == lastSegmentNum ? -1 : segmentNum + 1;
long startTimeUs = segmentIndex.getTimeUs(segmentNum); long startTimeUs = segmentIndex.getTimeUs(segmentNum);
@ -232,15 +243,15 @@ public class DashMp4ChunkSource implements ChunkSource {
endTimeUs, nextSegmentNum, extractor, false, 0); endTimeUs, nextSegmentNum, extractor, false, 0);
} }
private class InitializationMp4Loadable extends Chunk { private class InitializationLoadable extends Chunk {
private final FragmentedMp4Extractor extractor; private final Extractor extractor;
private final int expectedExtractorResult; private final int expectedExtractorResult;
private final long indexAnchor; private final long indexAnchor;
private final Uri uri; private final Uri uri;
public InitializationMp4Loadable(DataSource dataSource, DataSpec dataSpec, int trigger, public InitializationLoadable(DataSource dataSource, DataSpec dataSpec, int trigger,
Format format, FragmentedMp4Extractor extractor, int expectedExtractorResult, Format format, Extractor extractor, int expectedExtractorResult,
long indexAnchor) { long indexAnchor) {
super(dataSource, dataSpec, format, trigger); super(dataSource, dataSpec, format, trigger);
this.extractor = extractor; this.extractor = extractor;
@ -256,7 +267,7 @@ public class DashMp4ChunkSource implements ChunkSource {
throw new ParserException("Invalid extractor result. Expected " throw new ParserException("Invalid extractor result. Expected "
+ expectedExtractorResult + ", got " + result); + expectedExtractorResult + ", got " + result);
} }
if ((result & FragmentedMp4Extractor.RESULT_READ_INDEX) != 0) { if ((result & Extractor.RESULT_READ_INDEX) != 0) {
segmentIndexes.put(format.id, segmentIndexes.put(format.id,
new DashWrappingSegmentIndex(extractor.getIndex(), uri, indexAnchor)); new DashWrappingSegmentIndex(extractor.getIndex(), uri, indexAnchor));
} }

View File

@ -1,261 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.Format.DecreasingBandwidthComparator;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.WebmMediaChunk;
import com.google.android.exoplayer.dash.mpd.RangedUri;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.parser.webm.DefaultWebmExtractor;
import com.google.android.exoplayer.parser.webm.WebmExtractor;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import android.net.Uri;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
/**
* An {@link ChunkSource} for WebM DASH streams.
*/
public class DashWebmChunkSource implements ChunkSource {
private final TrackInfo trackInfo;
private final DataSource dataSource;
private final FormatEvaluator evaluator;
private final Evaluation evaluation;
private final int maxWidth;
private final int maxHeight;
private final Format[] formats;
private final HashMap<String, Representation> representations;
private final HashMap<String, DefaultWebmExtractor> extractors;
private final HashMap<String, DashSegmentIndex> segmentIndexes;
private boolean lastChunkWasInitialization;
/**
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param evaluator Selects from the available formats.
* @param representations The representations to be considered by the source.
*/
public DashWebmChunkSource(DataSource dataSource, FormatEvaluator evaluator,
Representation... representations) {
this.dataSource = dataSource;
this.evaluator = evaluator;
this.formats = new Format[representations.length];
this.extractors = new HashMap<String, DefaultWebmExtractor>();
this.segmentIndexes = new HashMap<String, DashSegmentIndex>();
this.representations = new HashMap<String, Representation>();
this.trackInfo = new TrackInfo(representations[0].format.mimeType,
representations[0].periodDurationMs * 1000);
this.evaluation = new Evaluation();
int maxWidth = 0;
int maxHeight = 0;
for (int i = 0; i < representations.length; i++) {
formats[i] = representations[i].format;
maxWidth = Math.max(formats[i].width, maxWidth);
maxHeight = Math.max(formats[i].height, maxHeight);
extractors.put(formats[i].id, new DefaultWebmExtractor());
this.representations.put(formats[i].id, representations[i]);
DashSegmentIndex segmentIndex = representations[i].getIndex();
if (segmentIndex != null) {
segmentIndexes.put(formats[i].id, segmentIndex);
}
}
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
Arrays.sort(formats, new DecreasingBandwidthComparator());
}
@Override
public final void getMaxVideoDimensions(MediaFormat out) {
if (trackInfo.mimeType.startsWith("video")) {
out.setMaxVideoDimensions(maxWidth, maxHeight);
}
}
@Override
public final TrackInfo getTrackInfo() {
return trackInfo;
}
@Override
public void enable() {
evaluator.enable();
}
@Override
public void disable(List<? extends MediaChunk> queue) {
evaluator.disable();
}
@Override
public void continueBuffering(long playbackPositionUs) {
// Do nothing
}
@Override
public final void getChunkOperation(List<? extends MediaChunk> queue, long seekPositionUs,
long playbackPositionUs, ChunkOperationHolder out) {
evaluation.queueSize = queue.size();
if (evaluation.format == null || !lastChunkWasInitialization) {
evaluator.evaluate(queue, playbackPositionUs, formats, evaluation);
}
Format selectedFormat = evaluation.format;
out.queueSize = evaluation.queueSize;
if (selectedFormat == null) {
out.chunk = null;
return;
} else if (out.queueSize == queue.size() && out.chunk != null
&& out.chunk.format.id.equals(selectedFormat.id)) {
// We already have a chunk, and the evaluation hasn't changed either the format or the size
// of the queue. Leave unchanged.
return;
}
Representation selectedRepresentation = representations.get(selectedFormat.id);
WebmExtractor extractor = extractors.get(selectedRepresentation.format.id);
RangedUri pendingInitializationUri = null;
RangedUri pendingIndexUri = null;
if (extractor.getFormat() == null) {
pendingInitializationUri = selectedRepresentation.getInitializationUri();
}
if (!segmentIndexes.containsKey(selectedRepresentation.format.id)) {
pendingIndexUri = selectedRepresentation.getIndexUri();
}
if (pendingInitializationUri != null || pendingIndexUri != null) {
// We have initialization and/or index requests to make.
Chunk initializationChunk = newInitializationChunk(pendingInitializationUri, pendingIndexUri,
selectedRepresentation, extractor, dataSource, evaluation.trigger);
lastChunkWasInitialization = true;
out.chunk = initializationChunk;
return;
}
int nextSegmentNum;
DashSegmentIndex segmentIndex = segmentIndexes.get(selectedRepresentation.format.id);
if (queue.isEmpty()) {
nextSegmentNum = segmentIndex.getSegmentNum(seekPositionUs);
} else {
nextSegmentNum = queue.get(out.queueSize - 1).nextChunkIndex;
}
if (nextSegmentNum == -1) {
out.chunk = null;
return;
}
Chunk nextMediaChunk = newMediaChunk(selectedRepresentation, segmentIndex, extractor,
dataSource, nextSegmentNum, evaluation.trigger);
lastChunkWasInitialization = false;
out.chunk = nextMediaChunk;
}
@Override
public IOException getError() {
return null;
}
@Override
public void onChunkLoadError(Chunk chunk, Exception e) {
// Do nothing.
}
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri,
Representation representation, WebmExtractor extractor, DataSource dataSource,
int trigger) {
int expectedExtractorResult = WebmExtractor.RESULT_END_OF_STREAM;
RangedUri requestUri;
if (initializationUri != null) {
// It's common for initialization and index data to be stored adjacently. Attempt to merge
// the two requests together to request both at once.
expectedExtractorResult |= WebmExtractor.RESULT_READ_INIT;
requestUri = initializationUri.attemptMerge(indexUri);
if (requestUri != null) {
expectedExtractorResult |= WebmExtractor.RESULT_READ_INDEX;
} else {
requestUri = initializationUri;
}
} else {
requestUri = indexUri;
expectedExtractorResult |= WebmExtractor.RESULT_READ_INDEX;
}
DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length,
representation.getCacheKey());
return new InitializationWebmLoadable(dataSource, dataSpec, trigger, representation.format,
extractor, expectedExtractorResult);
}
private Chunk newMediaChunk(Representation representation, DashSegmentIndex segmentIndex,
WebmExtractor extractor, DataSource dataSource, int segmentNum, int trigger) {
int lastSegmentNum = segmentIndex.getLastSegmentNum();
int nextSegmentNum = segmentNum == lastSegmentNum ? -1 : segmentNum + 1;
long startTimeUs = segmentIndex.getTimeUs(segmentNum);
long endTimeUs = segmentNum < lastSegmentNum ? segmentIndex.getTimeUs(segmentNum + 1)
: startTimeUs + segmentIndex.getDurationUs(segmentNum);
RangedUri segmentUri = segmentIndex.getSegmentUrl(segmentNum);
DataSpec dataSpec = new DataSpec(segmentUri.getUri(), segmentUri.start, segmentUri.length,
representation.getCacheKey());
return new WebmMediaChunk(dataSource, dataSpec, representation.format, trigger, extractor,
startTimeUs, endTimeUs, nextSegmentNum);
}
private class InitializationWebmLoadable extends Chunk {
private final WebmExtractor extractor;
private final int expectedExtractorResult;
private final Uri uri;
public InitializationWebmLoadable(DataSource dataSource, DataSpec dataSpec, int trigger,
Format format, WebmExtractor extractor, int expectedExtractorResult) {
super(dataSource, dataSpec, format, trigger);
this.extractor = extractor;
this.expectedExtractorResult = expectedExtractorResult;
this.uri = dataSpec.uri;
}
@Override
protected void consumeStream(NonBlockingInputStream stream) throws IOException {
int result = extractor.read(stream, null);
if (result != expectedExtractorResult) {
throw new ParserException("Invalid extractor result. Expected "
+ expectedExtractorResult + ", got " + result);
}
if ((result & WebmExtractor.RESULT_READ_INDEX) != 0) {
segmentIndexes.put(format.id, new DashWrappingSegmentIndex(extractor.getIndex(), uri, 0));
}
}
}
}

View File

@ -0,0 +1,115 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.parser;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import java.util.Map;
import java.util.UUID;
/**
* Facilitates extraction of media samples from a container format.
*/
public interface Extractor {
/**
* An attempt to read from the input stream returned insufficient data.
*/
public static final int RESULT_NEED_MORE_DATA = 1;
/**
* The end of the input stream was reached.
*/
public static final int RESULT_END_OF_STREAM = 2;
/**
* A media sample was read.
*/
public static final int RESULT_READ_SAMPLE = 4;
/**
* Initialization data was read. The parsed data can be read using {@link #getFormat()} and
* {@link #getPsshInfo}.
*/
public static final int RESULT_READ_INIT = 8;
/**
* A sidx atom was read. The parsed data can be read using {@link #getIndex()}.
*/
public static final int RESULT_READ_INDEX = 16;
/**
* The next thing to be read is a sample, but a {@link SampleHolder} was not supplied.
*/
public static final int RESULT_NEED_SAMPLE_HOLDER = 32;
/**
* Returns the segment index parsed from the stream.
*
* @return The segment index, or null if a SIDX atom has yet to be parsed.
*/
public SegmentIndex getIndex();
/**
* Returns true if the offsets in the index returned by {@link #getIndex()} are relative to the
* first byte following the initialization data, or false if they are absolute (i.e. relative to
* the first byte of the stream).
*
* @return True if the offsets are relative to the first byte following the initialization data.
* False otherwise.
*/
public boolean hasRelativeIndexOffsets();
/**
* Returns the format of the samples contained within the media stream.
*
* @return The sample media format, or null if the format has yet to be parsed.
*/
public MediaFormat getFormat();
/**
* Returns the pssh information parsed from the stream.
*
* @return The pssh information. May be null if pssh data has yet to be parsed, or if the stream
* does not contain any pssh data.
*/
public Map<UUID, byte[]> getPsshInfo();
/**
* Consumes data from a {@link NonBlockingInputStream}.
* <p>
* The read terminates if the end of the input stream is reached, if an attempt to read from the
* input stream returned 0 bytes of data, or if a sample is read. The returned flags indicate
* both the reason for termination and data that was parsed during the read.
*
* @param inputStream The input stream from which data should be read.
* @param out A {@link SampleHolder} into which the next sample should be read. If null then
* {@link #RESULT_NEED_SAMPLE_HOLDER} will be returned once a sample has been reached.
* @return One or more of the {@code RESULT_*} flags defined in this class.
* @throws ParserException If an error occurs parsing the media data.
*/
public int read(NonBlockingInputStream inputStream, SampleHolder out) throws ParserException;
/**
* Seeks to a position before or equal to the requested time.
*
* @param seekTimeUs The desired seek time in microseconds.
* @param allowNoop Allow the seek operation to do nothing if the seek time is in the current
* fragment run, is equal to or greater than the time of the current sample, and if there
* does not exist a sync frame between these two times.
* @return True if the operation resulted in a change of state. False if it was a no-op.
*/
public boolean seekTo(long seekTimeUs, boolean allowNoop);
}

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer.parser.mp4;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.SegmentIndex; import com.google.android.exoplayer.parser.SegmentIndex;
import com.google.android.exoplayer.parser.mp4.Atom.ContainerAtom; import com.google.android.exoplayer.parser.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.parser.mp4.Atom.LeafAtom; import com.google.android.exoplayer.parser.mp4.Atom.LeafAtom;
@ -48,7 +49,7 @@ import java.util.UUID;
* <p> * <p>
* This implementation only supports de-muxed (i.e. single track) streams. * This implementation only supports de-muxed (i.e. single track) streams.
*/ */
public final class FragmentedMp4Extractor { public final class FragmentedMp4Extractor implements Extractor {
/** /**
* Flag to work around an issue in some video streams where every frame is marked as a sync frame. * Flag to work around an issue in some video streams where every frame is marked as a sync frame.
@ -59,32 +60,6 @@ public final class FragmentedMp4Extractor {
*/ */
public static final int WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME = 1; public static final int WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME = 1;
/**
* An attempt to read from the input stream returned insufficient data.
*/
public static final int RESULT_NEED_MORE_DATA = 1;
/**
* The end of the input stream was reached.
*/
public static final int RESULT_END_OF_STREAM = 2;
/**
* A media sample was read.
*/
public static final int RESULT_READ_SAMPLE = 4;
/**
* A moov atom was read. The parsed data can be read using {@link #getFormat()} and
* {@link #getPsshInfo}.
*/
public static final int RESULT_READ_INIT = 8;
/**
* A sidx atom was read. The parsed data can be read using {@link #getIndex()}.
*/
public static final int RESULT_READ_INDEX = 16;
/**
* The next thing to be read is a sample, but a {@link SampleHolder} was not supplied.
*/
public static final int RESULT_NEED_SAMPLE_HOLDER = 32;
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER; | RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
private static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1}; private static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1};
@ -197,22 +172,13 @@ public final class FragmentedMp4Extractor {
} }
/** /**
* Returns the segment index parsed from the stream. * Sideloads track information into the extractor.
* *
* @return The segment index, or null if a SIDX atom has yet to be parsed. * @param track The track to sideload.
*/ */
public SegmentIndex getIndex() { public void setTrack(Track track) {
return segmentIndex; this.extendsDefaults = new DefaultSampleValues(0, 0, 0, 0);
} this.track = track;
/**
* Returns the pssh information parsed from the stream.
*
* @return The pssh information. May be null if the MOOV atom has yet to be parsed of if it did
* not contain any pssh information.
*/
public Map<UUID, byte[]> getPsshInfo() {
return psshData.isEmpty() ? null : psshData;
} }
/** /**
@ -229,38 +195,27 @@ public final class FragmentedMp4Extractor {
psshData.put(uuid, data); psshData.put(uuid, data);
} }
/** @Override
* Returns the format of the samples contained within the media stream. public Map<UUID, byte[]> getPsshInfo() {
* return psshData.isEmpty() ? null : psshData;
* @return The sample media format, or null if a MOOV atom has yet to be parsed. }
*/
@Override
public SegmentIndex getIndex() {
return segmentIndex;
}
@Override
public boolean hasRelativeIndexOffsets() {
return true;
}
@Override
public MediaFormat getFormat() { public MediaFormat getFormat() {
return track == null ? null : track.mediaFormat; return track == null ? null : track.mediaFormat;
} }
/** @Override
* Sideloads track information into the extractor.
*
* @param track The track to sideload.
*/
public void setTrack(Track track) {
this.extendsDefaults = new DefaultSampleValues(0, 0, 0, 0);
this.track = track;
}
/**
* Consumes data from a {@link NonBlockingInputStream}.
* <p>
* The read terminates if the end of the input stream is reached, if an attempt to read from the
* input stream returned 0 bytes of data, or if a sample is read. The returned flags indicate
* both the reason for termination and data that was parsed during the read.
*
* @param inputStream The input stream from which data should be read.
* @param out A {@link SampleHolder} into which the next sample should be read. If null then
* {@link #RESULT_NEED_SAMPLE_HOLDER} will be returned once a sample has been reached.
* @return One or more of the {@code RESULT_*} flags defined in this class.
* @throws ParserException If an error occurs parsing the media data.
*/
public int read(NonBlockingInputStream inputStream, SampleHolder out) public int read(NonBlockingInputStream inputStream, SampleHolder out)
throws ParserException { throws ParserException {
try { try {
@ -287,15 +242,7 @@ public final class FragmentedMp4Extractor {
} }
} }
/** @Override
* Seeks to a position before or equal to the requested time.
*
* @param seekTimeUs The desired seek time in microseconds.
* @param allowNoop Allow the seek operation to do nothing if the seek time is in the current
* fragment run, is equal to or greater than the time of the current sample, and if there
* does not exist a sync frame between these two times.
* @return True if the operation resulted in a change of state. False if it was a no-op.
*/
public boolean seekTo(long seekTimeUs, boolean allowNoop) { public boolean seekTo(long seekTimeUs, boolean allowNoop) {
pendingSeekTimeMs = (int) (seekTimeUs / 1000); pendingSeekTimeMs = (int) (seekTimeUs / 1000);
if (allowNoop && fragmentRun != null if (allowNoop && fragmentRun != null

View File

@ -1,462 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.SegmentIndex;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.LongArray;
import com.google.android.exoplayer.util.MimeTypes;
import android.annotation.TargetApi;
import android.media.MediaExtractor;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/**
* Default version of an extractor to facilitate data retrieval from the WebM container format.
*
* <p>WebM is a subset of the EBML elements defined for Matroska. More information about EBML and
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
* More info about WebM is <a href="http://www.webmproject.org/code/specs/container/">here</a>.
*/
@TargetApi(16)
public final class DefaultWebmExtractor implements WebmExtractor {
private static final String DOC_TYPE_WEBM = "webm";
private static final String CODEC_ID_VP9 = "V_VP9";
private static final int UNKNOWN = -1;
// Element IDs
private static final int ID_EBML = 0x1A45DFA3;
private static final int ID_EBML_READ_VERSION = 0x42F7;
private static final int ID_DOC_TYPE = 0x4282;
private static final int ID_DOC_TYPE_READ_VERSION = 0x4285;
private static final int ID_SEGMENT = 0x18538067;
private static final int ID_INFO = 0x1549A966;
private static final int ID_TIMECODE_SCALE = 0x2AD7B1;
private static final int ID_DURATION = 0x4489;
private static final int ID_CLUSTER = 0x1F43B675;
private static final int ID_TIME_CODE = 0xE7;
private static final int ID_SIMPLE_BLOCK = 0xA3;
private static final int ID_TRACKS = 0x1654AE6B;
private static final int ID_TRACK_ENTRY = 0xAE;
private static final int ID_CODEC_ID = 0x86;
private static final int ID_VIDEO = 0xE0;
private static final int ID_PIXEL_WIDTH = 0xB0;
private static final int ID_PIXEL_HEIGHT = 0xBA;
private static final int ID_CUES = 0x1C53BB6B;
private static final int ID_CUE_POINT = 0xBB;
private static final int ID_CUE_TIME = 0xB3;
private static final int ID_CUE_TRACK_POSITIONS = 0xB7;
private static final int ID_CUE_CLUSTER_POSITION = 0xF1;
// SimpleBlock Lacing Values
private static final int LACING_NONE = 0;
private static final int LACING_XIPH = 1;
private static final int LACING_FIXED = 2;
private static final int LACING_EBML = 3;
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
private final EbmlReader reader;
private final byte[] simpleBlockTimecodeAndFlags = new byte[3];
private SampleHolder sampleHolder;
private int readResults;
private long segmentStartOffsetBytes = UNKNOWN;
private long segmentEndOffsetBytes = UNKNOWN;
private long timecodeScale = 1000000L;
private long durationUs = UNKNOWN;
private int pixelWidth = UNKNOWN;
private int pixelHeight = UNKNOWN;
private long cuesSizeBytes = UNKNOWN;
private long clusterTimecodeUs = UNKNOWN;
private long simpleBlockTimecodeUs = UNKNOWN;
private MediaFormat format;
private SegmentIndex cues;
private LongArray cueTimesUs;
private LongArray cueClusterPositions;
public DefaultWebmExtractor() {
this(new DefaultEbmlReader());
}
/* package */ DefaultWebmExtractor(EbmlReader reader) {
this.reader = reader;
this.reader.setEventHandler(new InnerEbmlEventHandler());
}
@Override
public int read(NonBlockingInputStream inputStream, SampleHolder sampleHolder) {
this.sampleHolder = sampleHolder;
this.readResults = 0;
while ((readResults & READ_TERMINATING_RESULTS) == 0) {
int ebmlReadResult = reader.read(inputStream);
if (ebmlReadResult == EbmlReader.READ_RESULT_NEED_MORE_DATA) {
readResults |= WebmExtractor.RESULT_NEED_MORE_DATA;
} else if (ebmlReadResult == EbmlReader.READ_RESULT_END_OF_STREAM) {
readResults |= WebmExtractor.RESULT_END_OF_STREAM;
}
}
this.sampleHolder = null;
return readResults;
}
@Override
public boolean seekTo(long seekTimeUs, boolean allowNoop) {
if (allowNoop
&& cues != null
&& clusterTimecodeUs != UNKNOWN
&& simpleBlockTimecodeUs != UNKNOWN
&& seekTimeUs >= simpleBlockTimecodeUs) {
int clusterIndex = Arrays.binarySearch(cues.timesUs, clusterTimecodeUs);
if (clusterIndex >= 0 && seekTimeUs < clusterTimecodeUs + cues.durationsUs[clusterIndex]) {
return false;
}
}
clusterTimecodeUs = UNKNOWN;
simpleBlockTimecodeUs = UNKNOWN;
reader.reset();
return true;
}
@Override
public SegmentIndex getIndex() {
return cues;
}
@Override
public MediaFormat getFormat() {
return format;
}
/* package */ int getElementType(int id) {
switch (id) {
case ID_EBML:
case ID_SEGMENT:
case ID_INFO:
case ID_CLUSTER:
case ID_TRACKS:
case ID_TRACK_ENTRY:
case ID_VIDEO:
case ID_CUES:
case ID_CUE_POINT:
case ID_CUE_TRACK_POSITIONS:
return EbmlReader.TYPE_MASTER;
case ID_EBML_READ_VERSION:
case ID_DOC_TYPE_READ_VERSION:
case ID_TIMECODE_SCALE:
case ID_TIME_CODE:
case ID_PIXEL_WIDTH:
case ID_PIXEL_HEIGHT:
case ID_CUE_TIME:
case ID_CUE_CLUSTER_POSITION:
return EbmlReader.TYPE_UNSIGNED_INT;
case ID_DOC_TYPE:
case ID_CODEC_ID:
return EbmlReader.TYPE_STRING;
case ID_SIMPLE_BLOCK:
return EbmlReader.TYPE_BINARY;
case ID_DURATION:
return EbmlReader.TYPE_FLOAT;
default:
return EbmlReader.TYPE_UNKNOWN;
}
}
/* package */ boolean onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
switch (id) {
case ID_SEGMENT:
if (segmentStartOffsetBytes != UNKNOWN || segmentEndOffsetBytes != UNKNOWN) {
throw new IllegalStateException("Multiple Segment elements not supported");
}
segmentStartOffsetBytes = elementOffsetBytes + headerSizeBytes;
segmentEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
break;
case ID_CUES:
cuesSizeBytes = headerSizeBytes + contentsSizeBytes;
cueTimesUs = new LongArray();
cueClusterPositions = new LongArray();
break;
default:
// pass
}
return true;
}
/* package */ boolean onMasterElementEnd(int id) {
switch (id) {
case ID_CUES:
buildCues();
return false;
case ID_VIDEO:
buildFormat();
return true;
default:
return true;
}
}
/* package */ boolean onIntegerElement(int id, long value) {
switch (id) {
case ID_EBML_READ_VERSION:
// Validate that EBMLReadVersion is supported. This extractor only supports v1.
if (value != 1) {
throw new IllegalArgumentException("EBMLReadVersion " + value + " not supported");
}
break;
case ID_DOC_TYPE_READ_VERSION:
// Validate that DocTypeReadVersion is supported. This extractor only supports up to v2.
if (value < 1 || value > 2) {
throw new IllegalArgumentException("DocTypeReadVersion " + value + " not supported");
}
break;
case ID_TIMECODE_SCALE:
timecodeScale = value;
break;
case ID_PIXEL_WIDTH:
pixelWidth = (int) value;
break;
case ID_PIXEL_HEIGHT:
pixelHeight = (int) value;
break;
case ID_CUE_TIME:
cueTimesUs.add(scaleTimecodeToUs(value));
break;
case ID_CUE_CLUSTER_POSITION:
cueClusterPositions.add(value);
break;
case ID_TIME_CODE:
clusterTimecodeUs = scaleTimecodeToUs(value);
break;
default:
// pass
}
return true;
}
/* package */ boolean onFloatElement(int id, double value) {
if (id == ID_DURATION) {
durationUs = scaleTimecodeToUs((long) value);
}
return true;
}
/* package */ boolean onStringElement(int id, String value) {
switch (id) {
case ID_DOC_TYPE:
// Validate that DocType is supported. This extractor only supports "webm".
if (!DOC_TYPE_WEBM.equals(value)) {
throw new IllegalArgumentException("DocType " + value + " not supported");
}
break;
case ID_CODEC_ID:
// Validate that CodecID is supported. This extractor only supports "V_VP9".
if (!CODEC_ID_VP9.equals(value)) {
throw new IllegalArgumentException("CodecID " + value + " not supported");
}
break;
default:
// pass
}
return true;
}
/* package */ boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
if (id == ID_SIMPLE_BLOCK) {
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
// for info about how data is organized in a SimpleBlock element.
// If we don't have a sample holder then don't consume the data.
if (sampleHolder == null) {
readResults |= RESULT_NEED_SAMPLE_HOLDER;
return false;
}
// Value of trackNumber is not used but needs to be read.
reader.readVarint(inputStream);
// Next three bytes have timecode and flags.
reader.readBytes(inputStream, simpleBlockTimecodeAndFlags, 3);
// First two bytes of the three are the relative timecode.
int timecode =
(simpleBlockTimecodeAndFlags[0] << 8) | (simpleBlockTimecodeAndFlags[1] & 0xff);
long timecodeUs = scaleTimecodeToUs(timecode);
// Last byte of the three has some flags and the lacing value.
boolean keyframe = (simpleBlockTimecodeAndFlags[2] & 0x80) == 0x80;
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
// Validate lacing and set info into sample holder.
switch (lacing) {
case LACING_NONE:
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? MediaExtractor.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
break;
case LACING_EBML:
case LACING_FIXED:
case LACING_XIPH:
default:
throw new IllegalStateException("Lacing mode " + lacing + " not supported");
}
ByteBuffer outputData = sampleHolder.data;
if (sampleHolder.allowDataBufferReplacement
&& (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size)) {
outputData = ByteBuffer.allocate(sampleHolder.size);
sampleHolder.data = outputData;
}
if (outputData == null) {
reader.skipBytes(inputStream, sampleHolder.size);
sampleHolder.size = 0;
} else {
reader.readBytes(inputStream, outputData, sampleHolder.size);
}
readResults |= RESULT_READ_SAMPLE;
}
return true;
}
private long scaleTimecodeToUs(long unscaledTimecode) {
return TimeUnit.NANOSECONDS.toMicros(unscaledTimecode * timecodeScale);
}
/**
* Build a video {@link MediaFormat} containing recently gathered Video information, if needed.
*
* <p>Replaces the previous {@link #format} only if video width/height have changed.
* {@link #format} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link IllegalStateException} will be thrown.
*/
private void buildFormat() {
if (pixelWidth != UNKNOWN && pixelHeight != UNKNOWN
&& (format == null || format.width != pixelWidth || format.height != pixelHeight)) {
format = MediaFormat.createVideoFormat(
MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, pixelWidth, pixelHeight, null);
readResults |= RESULT_READ_INIT;
} else if (format == null) {
throw new IllegalStateException("Unable to build format");
}
}
/**
* Build a {@link SegmentIndex} containing recently gathered Cues information.
*
* <p>{@link #cues} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link IllegalStateException} will be thrown.
*/
private void buildCues() {
if (segmentStartOffsetBytes == UNKNOWN) {
throw new IllegalStateException("Segment start/end offsets unknown");
} else if (durationUs == UNKNOWN) {
throw new IllegalStateException("Duration unknown");
} else if (cuesSizeBytes == UNKNOWN) {
throw new IllegalStateException("Cues size unknown");
} else if (cueTimesUs == null || cueClusterPositions == null
|| cueTimesUs.size() == 0 || cueTimesUs.size() != cueClusterPositions.size()) {
throw new IllegalStateException("Invalid/missing cue points");
}
int cuePointsSize = cueTimesUs.size();
int[] sizes = new int[cuePointsSize];
long[] offsets = new long[cuePointsSize];
long[] durationsUs = new long[cuePointsSize];
long[] timesUs = new long[cuePointsSize];
for (int i = 0; i < cuePointsSize; i++) {
timesUs[i] = cueTimesUs.get(i);
offsets[i] = segmentStartOffsetBytes + cueClusterPositions.get(i);
}
for (int i = 0; i < cuePointsSize - 1; i++) {
sizes[i] = (int) (offsets[i + 1] - offsets[i]);
durationsUs[i] = timesUs[i + 1] - timesUs[i];
}
sizes[cuePointsSize - 1] = (int) (segmentEndOffsetBytes - offsets[cuePointsSize - 1]);
durationsUs[cuePointsSize - 1] = durationUs - timesUs[cuePointsSize - 1];
cues = new SegmentIndex((int) cuesSizeBytes, sizes, offsets, durationsUs, timesUs);
cueTimesUs = null;
cueClusterPositions = null;
readResults |= RESULT_READ_INDEX;
}
/**
* Passes events through to {@link DefaultWebmExtractor} as
* callbacks from {@link EbmlReader} are received.
*/
private final class InnerEbmlEventHandler implements EbmlEventHandler {
@Override
public int getElementType(int id) {
return DefaultWebmExtractor.this.getElementType(id);
}
@Override
public void onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
DefaultWebmExtractor.this.onMasterElementStart(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes);
}
@Override
public void onMasterElementEnd(int id) {
DefaultWebmExtractor.this.onMasterElementEnd(id);
}
@Override
public void onIntegerElement(int id, long value) {
DefaultWebmExtractor.this.onIntegerElement(id, value);
}
@Override
public void onFloatElement(int id, double value) {
DefaultWebmExtractor.this.onFloatElement(id, value);
}
@Override
public void onStringElement(int id, String value) {
DefaultWebmExtractor.this.onStringElement(id, value);
}
@Override
public boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
return DefaultWebmExtractor.this.onBinaryElement(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes, inputStream);
}
}
}

View File

@ -17,76 +17,460 @@ package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.SegmentIndex; import com.google.android.exoplayer.parser.SegmentIndex;
import com.google.android.exoplayer.upstream.NonBlockingInputStream; import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.LongArray;
import com.google.android.exoplayer.util.MimeTypes;
import android.annotation.TargetApi;
import android.media.MediaExtractor;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
/** /**
* Extractor to facilitate data retrieval from the WebM container format. * An extractor to facilitate data retrieval from the WebM container format.
* *
* <p>WebM is a subset of the EBML elements defined for Matroska. More information about EBML and * <p>WebM is a subset of the EBML elements defined for Matroska. More information about EBML and
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>. * Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
* More info about WebM is <a href="http://www.webmproject.org/code/specs/container/">here</a>. * More info about WebM is <a href="http://www.webmproject.org/code/specs/container/">here</a>.
*/ */
public interface WebmExtractor { @TargetApi(16)
public final class WebmExtractor implements Extractor {
private static final String DOC_TYPE_WEBM = "webm";
private static final String CODEC_ID_VP9 = "V_VP9";
private static final int UNKNOWN = -1;
// Element IDs
private static final int ID_EBML = 0x1A45DFA3;
private static final int ID_EBML_READ_VERSION = 0x42F7;
private static final int ID_DOC_TYPE = 0x4282;
private static final int ID_DOC_TYPE_READ_VERSION = 0x4285;
private static final int ID_SEGMENT = 0x18538067;
private static final int ID_INFO = 0x1549A966;
private static final int ID_TIMECODE_SCALE = 0x2AD7B1;
private static final int ID_DURATION = 0x4489;
private static final int ID_CLUSTER = 0x1F43B675;
private static final int ID_TIME_CODE = 0xE7;
private static final int ID_SIMPLE_BLOCK = 0xA3;
private static final int ID_TRACKS = 0x1654AE6B;
private static final int ID_TRACK_ENTRY = 0xAE;
private static final int ID_CODEC_ID = 0x86;
private static final int ID_VIDEO = 0xE0;
private static final int ID_PIXEL_WIDTH = 0xB0;
private static final int ID_PIXEL_HEIGHT = 0xBA;
private static final int ID_CUES = 0x1C53BB6B;
private static final int ID_CUE_POINT = 0xBB;
private static final int ID_CUE_TIME = 0xB3;
private static final int ID_CUE_TRACK_POSITIONS = 0xB7;
private static final int ID_CUE_CLUSTER_POSITION = 0xF1;
// SimpleBlock Lacing Values
private static final int LACING_NONE = 0;
private static final int LACING_XIPH = 1;
private static final int LACING_FIXED = 2;
private static final int LACING_EBML = 3;
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
private final EbmlReader reader;
private final byte[] simpleBlockTimecodeAndFlags = new byte[3];
private SampleHolder sampleHolder;
private int readResults;
private long segmentStartOffsetBytes = UNKNOWN;
private long segmentEndOffsetBytes = UNKNOWN;
private long timecodeScale = 1000000L;
private long durationUs = UNKNOWN;
private int pixelWidth = UNKNOWN;
private int pixelHeight = UNKNOWN;
private long cuesSizeBytes = UNKNOWN;
private long clusterTimecodeUs = UNKNOWN;
private long simpleBlockTimecodeUs = UNKNOWN;
private MediaFormat format;
private SegmentIndex cues;
private LongArray cueTimesUs;
private LongArray cueClusterPositions;
public WebmExtractor() {
this(new DefaultEbmlReader());
}
/* package */ WebmExtractor(EbmlReader reader) {
this.reader = reader;
this.reader.setEventHandler(new InnerEbmlEventHandler());
}
@Override
public int read(NonBlockingInputStream inputStream, SampleHolder sampleHolder) {
this.sampleHolder = sampleHolder;
this.readResults = 0;
while ((readResults & READ_TERMINATING_RESULTS) == 0) {
int ebmlReadResult = reader.read(inputStream);
if (ebmlReadResult == EbmlReader.READ_RESULT_NEED_MORE_DATA) {
readResults |= WebmExtractor.RESULT_NEED_MORE_DATA;
} else if (ebmlReadResult == EbmlReader.READ_RESULT_END_OF_STREAM) {
readResults |= WebmExtractor.RESULT_END_OF_STREAM;
}
}
this.sampleHolder = null;
return readResults;
}
@Override
public boolean seekTo(long seekTimeUs, boolean allowNoop) {
if (allowNoop
&& cues != null
&& clusterTimecodeUs != UNKNOWN
&& simpleBlockTimecodeUs != UNKNOWN
&& seekTimeUs >= simpleBlockTimecodeUs) {
int clusterIndex = Arrays.binarySearch(cues.timesUs, clusterTimecodeUs);
if (clusterIndex >= 0 && seekTimeUs < clusterTimecodeUs + cues.durationsUs[clusterIndex]) {
return false;
}
}
clusterTimecodeUs = UNKNOWN;
simpleBlockTimecodeUs = UNKNOWN;
reader.reset();
return true;
}
@Override
public SegmentIndex getIndex() {
return cues;
}
@Override
public boolean hasRelativeIndexOffsets() {
return false;
}
@Override
public MediaFormat getFormat() {
return format;
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
// TODO: Parse pssh data from Webm streams.
return null;
}
/* package */ int getElementType(int id) {
switch (id) {
case ID_EBML:
case ID_SEGMENT:
case ID_INFO:
case ID_CLUSTER:
case ID_TRACKS:
case ID_TRACK_ENTRY:
case ID_VIDEO:
case ID_CUES:
case ID_CUE_POINT:
case ID_CUE_TRACK_POSITIONS:
return EbmlReader.TYPE_MASTER;
case ID_EBML_READ_VERSION:
case ID_DOC_TYPE_READ_VERSION:
case ID_TIMECODE_SCALE:
case ID_TIME_CODE:
case ID_PIXEL_WIDTH:
case ID_PIXEL_HEIGHT:
case ID_CUE_TIME:
case ID_CUE_CLUSTER_POSITION:
return EbmlReader.TYPE_UNSIGNED_INT;
case ID_DOC_TYPE:
case ID_CODEC_ID:
return EbmlReader.TYPE_STRING;
case ID_SIMPLE_BLOCK:
return EbmlReader.TYPE_BINARY;
case ID_DURATION:
return EbmlReader.TYPE_FLOAT;
default:
return EbmlReader.TYPE_UNKNOWN;
}
}
/* package */ boolean onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
switch (id) {
case ID_SEGMENT:
if (segmentStartOffsetBytes != UNKNOWN || segmentEndOffsetBytes != UNKNOWN) {
throw new IllegalStateException("Multiple Segment elements not supported");
}
segmentStartOffsetBytes = elementOffsetBytes + headerSizeBytes;
segmentEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
break;
case ID_CUES:
cuesSizeBytes = headerSizeBytes + contentsSizeBytes;
cueTimesUs = new LongArray();
cueClusterPositions = new LongArray();
break;
default:
// pass
}
return true;
}
/* package */ boolean onMasterElementEnd(int id) {
switch (id) {
case ID_CUES:
buildCues();
return false;
case ID_VIDEO:
buildFormat();
return true;
default:
return true;
}
}
/* package */ boolean onIntegerElement(int id, long value) {
switch (id) {
case ID_EBML_READ_VERSION:
// Validate that EBMLReadVersion is supported. This extractor only supports v1.
if (value != 1) {
throw new IllegalArgumentException("EBMLReadVersion " + value + " not supported");
}
break;
case ID_DOC_TYPE_READ_VERSION:
// Validate that DocTypeReadVersion is supported. This extractor only supports up to v2.
if (value < 1 || value > 2) {
throw new IllegalArgumentException("DocTypeReadVersion " + value + " not supported");
}
break;
case ID_TIMECODE_SCALE:
timecodeScale = value;
break;
case ID_PIXEL_WIDTH:
pixelWidth = (int) value;
break;
case ID_PIXEL_HEIGHT:
pixelHeight = (int) value;
break;
case ID_CUE_TIME:
cueTimesUs.add(scaleTimecodeToUs(value));
break;
case ID_CUE_CLUSTER_POSITION:
cueClusterPositions.add(value);
break;
case ID_TIME_CODE:
clusterTimecodeUs = scaleTimecodeToUs(value);
break;
default:
// pass
}
return true;
}
/* package */ boolean onFloatElement(int id, double value) {
if (id == ID_DURATION) {
durationUs = scaleTimecodeToUs((long) value);
}
return true;
}
/* package */ boolean onStringElement(int id, String value) {
switch (id) {
case ID_DOC_TYPE:
// Validate that DocType is supported. This extractor only supports "webm".
if (!DOC_TYPE_WEBM.equals(value)) {
throw new IllegalArgumentException("DocType " + value + " not supported");
}
break;
case ID_CODEC_ID:
// Validate that CodecID is supported. This extractor only supports "V_VP9".
if (!CODEC_ID_VP9.equals(value)) {
throw new IllegalArgumentException("CodecID " + value + " not supported");
}
break;
default:
// pass
}
return true;
}
/* package */ boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
if (id == ID_SIMPLE_BLOCK) {
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
// for info about how data is organized in a SimpleBlock element.
// If we don't have a sample holder then don't consume the data.
if (sampleHolder == null) {
readResults |= RESULT_NEED_SAMPLE_HOLDER;
return false;
}
// Value of trackNumber is not used but needs to be read.
reader.readVarint(inputStream);
// Next three bytes have timecode and flags.
reader.readBytes(inputStream, simpleBlockTimecodeAndFlags, 3);
// First two bytes of the three are the relative timecode.
int timecode =
(simpleBlockTimecodeAndFlags[0] << 8) | (simpleBlockTimecodeAndFlags[1] & 0xff);
long timecodeUs = scaleTimecodeToUs(timecode);
// Last byte of the three has some flags and the lacing value.
boolean keyframe = (simpleBlockTimecodeAndFlags[2] & 0x80) == 0x80;
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
// Validate lacing and set info into sample holder.
switch (lacing) {
case LACING_NONE:
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? MediaExtractor.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
break;
case LACING_EBML:
case LACING_FIXED:
case LACING_XIPH:
default:
throw new IllegalStateException("Lacing mode " + lacing + " not supported");
}
ByteBuffer outputData = sampleHolder.data;
if (sampleHolder.allowDataBufferReplacement
&& (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size)) {
outputData = ByteBuffer.allocate(sampleHolder.size);
sampleHolder.data = outputData;
}
if (outputData == null) {
reader.skipBytes(inputStream, sampleHolder.size);
sampleHolder.size = 0;
} else {
reader.readBytes(inputStream, outputData, sampleHolder.size);
}
readResults |= RESULT_READ_SAMPLE;
}
return true;
}
private long scaleTimecodeToUs(long unscaledTimecode) {
return TimeUnit.NANOSECONDS.toMicros(unscaledTimecode * timecodeScale);
}
/** /**
* An attempt to read from the input stream returned insufficient data. * Build a video {@link MediaFormat} containing recently gathered Video information, if needed.
*/
public static final int RESULT_NEED_MORE_DATA = 1;
/**
* The end of the input stream was reached.
*/
public static final int RESULT_END_OF_STREAM = 2;
/**
* A media sample was read.
*/
public static final int RESULT_READ_SAMPLE = 4;
/**
* Initialization data was read. The parsed data can be read using {@link #getFormat()}.
*/
public static final int RESULT_READ_INIT = 8;
/**
* A sidx atom was read. The parsed data can be read using {@link #getIndex()}.
*/
public static final int RESULT_READ_INDEX = 16;
/**
* The next thing to be read is a sample, but a {@link SampleHolder} was not supplied.
*/
public static final int RESULT_NEED_SAMPLE_HOLDER = 32;
/**
* Consumes data from a {@link NonBlockingInputStream}.
* *
* @param inputStream The input stream from which data should be read * <p>Replaces the previous {@link #format} only if video width/height have changed.
* @param sampleHolder A {@link SampleHolder} into which the sample should be read * {@link #format} is guaranteed to not be null after calling this method. In
* @return One or more of the {@code RESULT_*} flags defined in this class. * the event that it can't be built, an {@link IllegalStateException} will be thrown.
*/ */
public int read(NonBlockingInputStream inputStream, SampleHolder sampleHolder); private void buildFormat() {
if (pixelWidth != UNKNOWN && pixelHeight != UNKNOWN
&& (format == null || format.width != pixelWidth || format.height != pixelHeight)) {
format = MediaFormat.createVideoFormat(
MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, pixelWidth, pixelHeight, null);
readResults |= RESULT_READ_INIT;
} else if (format == null) {
throw new IllegalStateException("Unable to build format");
}
}
/** /**
* Seeks to a position before or equal to the requested time. * Build a {@link SegmentIndex} containing recently gathered Cues information.
* *
* @param seekTimeUs The desired seek time in microseconds * <p>{@link #cues} is guaranteed to not be null after calling this method. In
* @param allowNoop Allow the seek operation to do nothing if the seek time is in the current * the event that it can't be built, an {@link IllegalStateException} will be thrown.
* segment, is equal to or greater than the time of the current sample, and if there does not
* exist a sync frame between these two times
* @return True if the operation resulted in a change of state. False if it was a no-op
*/ */
public boolean seekTo(long seekTimeUs, boolean allowNoop); private void buildCues() {
if (segmentStartOffsetBytes == UNKNOWN) {
throw new IllegalStateException("Segment start/end offsets unknown");
} else if (durationUs == UNKNOWN) {
throw new IllegalStateException("Duration unknown");
} else if (cuesSizeBytes == UNKNOWN) {
throw new IllegalStateException("Cues size unknown");
} else if (cueTimesUs == null || cueClusterPositions == null
|| cueTimesUs.size() == 0 || cueTimesUs.size() != cueClusterPositions.size()) {
throw new IllegalStateException("Invalid/missing cue points");
}
int cuePointsSize = cueTimesUs.size();
int[] sizes = new int[cuePointsSize];
long[] offsets = new long[cuePointsSize];
long[] durationsUs = new long[cuePointsSize];
long[] timesUs = new long[cuePointsSize];
for (int i = 0; i < cuePointsSize; i++) {
timesUs[i] = cueTimesUs.get(i);
offsets[i] = segmentStartOffsetBytes + cueClusterPositions.get(i);
}
for (int i = 0; i < cuePointsSize - 1; i++) {
sizes[i] = (int) (offsets[i + 1] - offsets[i]);
durationsUs[i] = timesUs[i + 1] - timesUs[i];
}
sizes[cuePointsSize - 1] = (int) (segmentEndOffsetBytes - offsets[cuePointsSize - 1]);
durationsUs[cuePointsSize - 1] = durationUs - timesUs[cuePointsSize - 1];
cues = new SegmentIndex((int) cuesSizeBytes, sizes, offsets, durationsUs, timesUs);
cueTimesUs = null;
cueClusterPositions = null;
readResults |= RESULT_READ_INDEX;
}
/** /**
* Returns the cues for the media stream. * Passes events through to {@link WebmExtractor} as
* * callbacks from {@link EbmlReader} are received.
* @return The cues in the form of a {@link SegmentIndex}, or null if the extractor is not yet
* prepared
*/ */
public SegmentIndex getIndex(); private final class InnerEbmlEventHandler implements EbmlEventHandler {
/** @Override
* Returns the format of the samples contained within the media stream. public int getElementType(int id) {
* return WebmExtractor.this.getElementType(id);
* @return The sample media format, or null if the extracted is not yet prepared }
*/
public MediaFormat getFormat(); @Override
public void onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
WebmExtractor.this.onMasterElementStart(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes);
}
@Override
public void onMasterElementEnd(int id) {
WebmExtractor.this.onMasterElementEnd(id);
}
@Override
public void onIntegerElement(int id, long value) {
WebmExtractor.this.onIntegerElement(id, value);
}
@Override
public void onFloatElement(int id, double value) {
WebmExtractor.this.onFloatElement(id, value);
}
@Override
public void onStringElement(int id, String value) {
WebmExtractor.this.onStringElement(id, value);
}
@Override
public boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
return WebmExtractor.this.onBinaryElement(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes, inputStream);
}
}
} }

View File

@ -26,6 +26,7 @@ import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation; import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.MediaChunk; import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.Mp4MediaChunk; import com.google.android.exoplayer.chunk.Mp4MediaChunk;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor; import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor;
import com.google.android.exoplayer.parser.mp4.Track; import com.google.android.exoplayer.parser.mp4.Track;
import com.google.android.exoplayer.parser.mp4.TrackEncryptionBox; import com.google.android.exoplayer.parser.mp4.TrackEncryptionBox;
@ -227,7 +228,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
} }
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey, private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
FragmentedMp4Extractor extractor, DataSource dataSource, int chunkIndex, Extractor extractor, DataSource dataSource, int chunkIndex,
boolean isLast, long chunkStartTimeUs, long nextChunkStartTimeUs, int trigger) { boolean isLast, long chunkStartTimeUs, long nextChunkStartTimeUs, int trigger) {
int nextChunkIndex = isLast ? -1 : chunkIndex + 1; int nextChunkIndex = isLast ? -1 : chunkIndex + 1;
long nextStartTimeUs = isLast ? -1 : nextChunkStartTimeUs; long nextStartTimeUs = isLast ? -1 : nextChunkStartTimeUs;