Use a single set of track type constants.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=119732992
This commit is contained in:
olly 2016-04-13 04:11:47 -07:00 committed by Oliver Woodman
parent 72025d7ea5
commit 35e0dd8401
14 changed files with 89 additions and 86 deletions

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer.demo.player; package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MultiSampleSource; import com.google.android.exoplayer.MultiSampleSource;
@ -23,7 +24,6 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
@ -75,14 +75,14 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_VIDEO, ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_AUDIO, ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_AUDIO,
audioDataSource, null, LIVE_EDGE_LATENCY_MS); audioDataSource, null, LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
@ -90,7 +90,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_TEXT, ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_TEXT,
textDataSource, null, LIVE_EDGE_LATENCY_MS); textDataSource, null, LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer.demo.player; package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MultiSampleSource; import com.google.android.exoplayer.MultiSampleSource;
@ -69,20 +70,20 @@ public class HlsSourceBuilder implements SourceBuilder {
DataSource defaultDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource defaultDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource defaultChunkSource = new HlsChunkSource(manifestFetcher, HlsChunkSource defaultChunkSource = new HlsChunkSource(manifestFetcher,
HlsChunkSource.TYPE_DEFAULT, defaultDataSource, timestampAdjusterProvider, C.TRACK_TYPE_DEFAULT, defaultDataSource, timestampAdjusterProvider,
new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter)); new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter));
HlsSampleSource defaultSampleSource = new HlsSampleSource(defaultChunkSource, loadControl, HlsSampleSource defaultSampleSource = new HlsSampleSource(defaultChunkSource, loadControl,
MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource audioChunkSource = new HlsChunkSource(manifestFetcher, HlsChunkSource.TYPE_AUDIO, HlsChunkSource audioChunkSource = new HlsChunkSource(manifestFetcher, C.TRACK_TYPE_AUDIO,
audioDataSource, timestampAdjusterProvider, null); audioDataSource, timestampAdjusterProvider, null);
HlsSampleSource audioSampleSource = new HlsSampleSource(audioChunkSource, loadControl, HlsSampleSource audioSampleSource = new HlsSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
DataSource subtitleDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource subtitleDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource subtitleChunkSource = new HlsChunkSource(manifestFetcher, HlsChunkSource subtitleChunkSource = new HlsChunkSource(manifestFetcher,
HlsChunkSource.TYPE_SUBTITLE, subtitleDataSource, timestampAdjusterProvider, null); C.TRACK_TYPE_TEXT, subtitleDataSource, timestampAdjusterProvider, null);
HlsSampleSource subtitleSampleSource = new HlsSampleSource(subtitleChunkSource, loadControl, HlsSampleSource subtitleSampleSource = new HlsSampleSource(subtitleChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer.demo.player; package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MultiSampleSource; import com.google.android.exoplayer.MultiSampleSource;
@ -74,8 +75,8 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource videoDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
SmoothStreamingManifest.StreamElement.TYPE_VIDEO, C.TRACK_TYPE_VIDEO, videoDataSource, new AdaptiveEvaluator(bandwidthMeter),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO); DemoPlayer.TYPE_VIDEO);
@ -83,16 +84,14 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
SmoothStreamingManifest.StreamElement.TYPE_AUDIO, audioDataSource, null, C.TRACK_TYPE_AUDIO, audioDataSource, null, LIVE_EDGE_LATENCY_MS);
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter); DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
SmoothStreamingManifest.StreamElement.TYPE_TEXT, textDataSource, null, C.TRACK_TYPE_TEXT, textDataSource, null, LIVE_EDGE_LATENCY_MS);
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);

View File

@ -119,6 +119,31 @@ public final class C {
*/ */
public static final int RESULT_MAX_LENGTH_EXCEEDED = -2; public static final int RESULT_MAX_LENGTH_EXCEEDED = -2;
/**
* A type constant for tracks of unknown type.
*/
public static final int TRACK_TYPE_UNKNOWN = -1;
/**
* A type constant for tracks of some default type, where the type itself is unknown.
*/
public static final int TRACK_TYPE_DEFAULT = 0;
/**
* A type constant for audio tracks.
*/
public static final int TRACK_TYPE_AUDIO = 1;
/**
* A type constant for video tracks.
*/
public static final int TRACK_TYPE_VIDEO = 2;
/**
* A type constant for text tracks.
*/
public static final int TRACK_TYPE_TEXT = 3;
private C() {} private C() {}
} }

View File

@ -96,8 +96,7 @@ public class DashChunkSource implements ChunkSource {
/** /**
* @param manifestFetcher A fetcher for the manifest. * @param manifestFetcher A fetcher for the manifest.
* @param adaptationSetType The type of the adaptation set exposed by this source. One of * @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and * {@link C#TRACK_TYPE_AUDIO}, {@link C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_TEXT}.
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should

View File

@ -23,11 +23,6 @@ import java.util.List;
*/ */
public class AdaptationSet { public class AdaptationSet {
public static final int TYPE_UNKNOWN = -1;
public static final int TYPE_VIDEO = 0;
public static final int TYPE_AUDIO = 1;
public static final int TYPE_TEXT = 2;
public final int id; public final int id;
public final int type; public final int type;

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer.dash.mpd; package com.google.android.exoplayer.dash.mpd;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.Format; import com.google.android.exoplayer.Format;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentList; import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentList;
@ -285,25 +286,25 @@ public class MediaPresentationDescriptionParser extends DefaultHandler
protected int parseContentType(XmlPullParser xpp) { protected int parseContentType(XmlPullParser xpp) {
String contentType = xpp.getAttributeValue(null, "contentType"); String contentType = xpp.getAttributeValue(null, "contentType");
return TextUtils.isEmpty(contentType) ? AdaptationSet.TYPE_UNKNOWN return TextUtils.isEmpty(contentType) ? C.TRACK_TYPE_UNKNOWN
: MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? AdaptationSet.TYPE_AUDIO : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? C.TRACK_TYPE_AUDIO
: MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? AdaptationSet.TYPE_VIDEO : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? C.TRACK_TYPE_VIDEO
: MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? AdaptationSet.TYPE_TEXT : MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? C.TRACK_TYPE_TEXT
: AdaptationSet.TYPE_UNKNOWN; : C.TRACK_TYPE_UNKNOWN;
} }
protected int getContentType(Representation representation) { protected int getContentType(Representation representation) {
String sampleMimeType = representation.format.sampleMimeType; String sampleMimeType = representation.format.sampleMimeType;
if (TextUtils.isEmpty(sampleMimeType)) { if (TextUtils.isEmpty(sampleMimeType)) {
return AdaptationSet.TYPE_UNKNOWN; return C.TRACK_TYPE_UNKNOWN;
} else if (MimeTypes.isVideo(sampleMimeType)) { } else if (MimeTypes.isVideo(sampleMimeType)) {
return AdaptationSet.TYPE_VIDEO; return C.TRACK_TYPE_VIDEO;
} else if (MimeTypes.isAudio(sampleMimeType)) { } else if (MimeTypes.isAudio(sampleMimeType)) {
return AdaptationSet.TYPE_AUDIO; return C.TRACK_TYPE_AUDIO;
} else if (mimeTypeIsRawText(sampleMimeType)) { } else if (mimeTypeIsRawText(sampleMimeType)) {
return AdaptationSet.TYPE_TEXT; return C.TRACK_TYPE_TEXT;
} }
return AdaptationSet.TYPE_UNKNOWN; return C.TRACK_TYPE_UNKNOWN;
} }
/** /**
@ -691,17 +692,17 @@ public class MediaPresentationDescriptionParser extends DefaultHandler
* Checks two adaptation set content types for consistency, returning the consistent type, or * Checks two adaptation set content types for consistency, returning the consistent type, or
* throwing an {@link IllegalStateException} if the types are inconsistent. * throwing an {@link IllegalStateException} if the types are inconsistent.
* <p> * <p>
* Two types are consistent if they are equal, or if one is {@link AdaptationSet#TYPE_UNKNOWN}. * Two types are consistent if they are equal, or if one is {@link C#TRACK_TYPE_UNKNOWN}.
* Where one of the types is {@link AdaptationSet#TYPE_UNKNOWN}, the other is returned. * Where one of the types is {@link C#TRACK_TYPE_UNKNOWN}, the other is returned.
* *
* @param firstType The first type. * @param firstType The first type.
* @param secondType The second type. * @param secondType The second type.
* @return The consistent type. * @return The consistent type.
*/ */
private static int checkContentTypeConsistency(int firstType, int secondType) { private static int checkContentTypeConsistency(int firstType, int secondType) {
if (firstType == AdaptationSet.TYPE_UNKNOWN) { if (firstType == C.TRACK_TYPE_UNKNOWN) {
return secondType; return secondType;
} else if (secondType == AdaptationSet.TYPE_UNKNOWN) { } else if (secondType == C.TRACK_TYPE_UNKNOWN) {
return firstType; return firstType;
} else { } else {
Assertions.checkState(firstType == secondType); Assertions.checkState(firstType == secondType);

View File

@ -38,6 +38,12 @@ import java.util.List;
*/ */
/* package */ final class AtomParsers { /* package */ final class AtomParsers {
private static final int TYPE_vide = Util.getIntegerCodeForString("vide");
private static final int TYPE_soun = Util.getIntegerCodeForString("soun");
private static final int TYPE_text = Util.getIntegerCodeForString("text");
private static final int TYPE_sbtl = Util.getIntegerCodeForString("sbtl");
private static final int TYPE_subt = Util.getIntegerCodeForString("subt");
/** /**
* Parses a trak atom (defined in 14496-12). * Parses a trak atom (defined in 14496-12).
* *
@ -49,8 +55,7 @@ import java.util.List;
public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd, boolean isQuickTime) { public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd, boolean isQuickTime) {
Atom.ContainerAtom mdia = trak.getContainerAtomOfType(Atom.TYPE_mdia); Atom.ContainerAtom mdia = trak.getContainerAtomOfType(Atom.TYPE_mdia);
int trackType = parseHdlr(mdia.getLeafAtomOfType(Atom.TYPE_hdlr).data); int trackType = parseHdlr(mdia.getLeafAtomOfType(Atom.TYPE_hdlr).data);
if (trackType != Track.TYPE_soun && trackType != Track.TYPE_vide && trackType != Track.TYPE_text if (trackType == C.TRACK_TYPE_UNKNOWN) {
&& trackType != Track.TYPE_sbtl && trackType != Track.TYPE_subt) {
return null; return null;
} }
@ -501,7 +506,16 @@ import java.util.List;
*/ */
private static int parseHdlr(ParsableByteArray hdlr) { private static int parseHdlr(ParsableByteArray hdlr) {
hdlr.setPosition(Atom.FULL_HEADER_SIZE + 4); hdlr.setPosition(Atom.FULL_HEADER_SIZE + 4);
return hdlr.readInt(); int trackType = hdlr.readInt();
if (trackType == TYPE_soun) {
return C.TRACK_TYPE_AUDIO;
} else if (trackType == TYPE_vide) {
return C.TRACK_TYPE_VIDEO;
} else if (trackType == TYPE_text || trackType == TYPE_sbtl || trackType == TYPE_subt) {
return C.TRACK_TYPE_TEXT;
} else {
return C.TRACK_TYPE_UNKNOWN;
}
} }
/** /**

View File

@ -635,7 +635,7 @@ public final class FragmentedMp4Extractor implements Extractor {
long timescale = track.timescale; long timescale = track.timescale;
long cumulativeTime = decodeTime; long cumulativeTime = decodeTime;
boolean workaroundEveryVideoFrameIsSyncFrame = track.type == Track.TYPE_vide boolean workaroundEveryVideoFrameIsSyncFrame = track.type == C.TRACK_TYPE_VIDEO
&& (flags & FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME) != 0; && (flags & FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME) != 0;
for (int i = 0; i < sampleCount; i++) { for (int i = 0; i < sampleCount; i++) {
// Use trun values if present, otherwise tfhd, otherwise trex. // Use trun values if present, otherwise tfhd, otherwise trex.

View File

@ -17,27 +17,19 @@ package com.google.android.exoplayer.extractor.mp4;
import com.google.android.exoplayer.C; import com.google.android.exoplayer.C;
import com.google.android.exoplayer.Format; import com.google.android.exoplayer.Format;
import com.google.android.exoplayer.util.Util;
/** /**
* Encapsulates information describing an MP4 track. * Encapsulates information describing an MP4 track.
*/ */
public final class Track { public final class Track {
public static final int TYPE_vide = Util.getIntegerCodeForString("vide");
public static final int TYPE_soun = Util.getIntegerCodeForString("soun");
public static final int TYPE_text = Util.getIntegerCodeForString("text");
public static final int TYPE_sbtl = Util.getIntegerCodeForString("sbtl");
public static final int TYPE_subt = Util.getIntegerCodeForString("subt");
/** /**
* The track identifier. * The track identifier.
*/ */
public final int id; public final int id;
/** /**
* One of {@link #TYPE_vide}, {@link #TYPE_soun}, {@link #TYPE_text} and {@link #TYPE_sbtl} and * One of {@link C#TRACK_TYPE_AUDIO}, {@link C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_TEXT}.
* {@link #TYPE_subt}.
*/ */
public final int type; public final int type;

View File

@ -56,10 +56,6 @@ import java.util.Locale;
*/ */
public class HlsChunkSource { public class HlsChunkSource {
public static final int TYPE_DEFAULT = 0;
public static final int TYPE_AUDIO = 1;
public static final int TYPE_SUBTITLE = 2;
/** /**
* The default time for which a media playlist should be blacklisted. * The default time for which a media playlist should be blacklisted.
*/ */
@ -103,8 +99,8 @@ public class HlsChunkSource {
/** /**
* @param manifestFetcher A fetcher for the playlist. * @param manifestFetcher A fetcher for the playlist.
* @param type The type of chunk provided by the source. One of {@link #TYPE_DEFAULT}, * @param type The type of chunk provided by the source. One of {@link C#TRACK_TYPE_DEFAULT},
* {@link #TYPE_AUDIO} and {@link #TYPE_SUBTITLE}. * {@link C#TRACK_TYPE_AUDIO} and {@link C#TRACK_TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If * @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If
* multiple {@link HlsChunkSource}s are used for a single playback, they should all share the * multiple {@link HlsChunkSource}s are used for a single playback, they should all share the
@ -302,7 +298,7 @@ public class HlsChunkSource {
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
*/ */
public void seek() { public void seek() {
if (type == TYPE_DEFAULT) { if (type == C.TRACK_TYPE_DEFAULT) {
timestampAdjusterProvider.reset(); timestampAdjusterProvider.reset();
} }
} }
@ -536,8 +532,8 @@ public class HlsChunkSource {
// Private methods. // Private methods.
private void processMasterPlaylist(HlsMasterPlaylist playlist) { private void processMasterPlaylist(HlsMasterPlaylist playlist) {
if (type == TYPE_SUBTITLE || type == TYPE_AUDIO) { if (type == C.TRACK_TYPE_TEXT || type == C.TRACK_TYPE_AUDIO) {
List<Variant> variantList = type == TYPE_AUDIO ? playlist.audios : playlist.subtitles; List<Variant> variantList = type == C.TRACK_TYPE_AUDIO ? playlist.audios : playlist.subtitles;
if (variantList != null && !variantList.isEmpty()) { if (variantList != null && !variantList.isEmpty()) {
variants = new Variant[variantList.size()]; variants = new Variant[variantList.size()];
variantList.toArray(variants); variantList.toArray(variants);

View File

@ -87,8 +87,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
/** /**
* @param manifestFetcher A fetcher for the manifest. * @param manifestFetcher A fetcher for the manifest.
* @param streamElementType The type of stream element exposed by this source. One of * @param streamElementType The type of stream element exposed by this source. One of
* {@link StreamElement#TYPE_VIDEO}, {@link StreamElement#TYPE_AUDIO} and * {@link C#TRACK_TYPE_VIDEO}, {@link C#TRACK_TYPE_AUDIO} and {@link C#TRACK_TYPE_TEXT}.
* {@link StreamElement#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
@ -322,9 +321,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
long timescale = manifest.streamElements[i].timescale; long timescale = manifest.streamElements[i].timescale;
extractorWrappers = new ChunkExtractorWrapper[formats.length]; extractorWrappers = new ChunkExtractorWrapper[formats.length];
for (int j = 0; j < formats.length; j++) { for (int j = 0; j < formats.length; j++) {
int nalUnitLengthFieldLength = streamElementType == StreamElement.TYPE_VIDEO ? 4 : -1; int nalUnitLengthFieldLength = streamElementType == C.TRACK_TYPE_VIDEO ? 4 : -1;
int mp4TrackType = getMp4TrackType(streamElementType); Track track = new Track(j, streamElementType, timescale, C.UNKNOWN_TIME_US, durationUs,
Track track = new Track(j, mp4TrackType, timescale, C.UNKNOWN_TIME_US, durationUs,
formats[j], trackEncryptionBoxes, nalUnitLengthFieldLength, null, null); formats[j], trackEncryptionBoxes, nalUnitLengthFieldLength, null, null);
FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME FragmentedMp4Extractor.FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME
@ -429,17 +427,4 @@ public class SmoothStreamingChunkSource implements ChunkSource {
data[secondPosition] = temp; data[secondPosition] = temp;
} }
private static int getMp4TrackType(int streamElementType) {
switch (streamElementType) {
case StreamElement.TYPE_AUDIO:
return Track.TYPE_soun;
case StreamElement.TYPE_VIDEO:
return Track.TYPE_vide;
case StreamElement.TYPE_TEXT:
return Track.TYPE_text;
default:
throw new IllegalArgumentException("Invalid stream type: " + streamElementType);
}
}
} }

View File

@ -127,11 +127,6 @@ public class SmoothStreamingManifest {
*/ */
public static class StreamElement { public static class StreamElement {
public static final int TYPE_UNKNOWN = -1;
public static final int TYPE_AUDIO = 0;
public static final int TYPE_VIDEO = 1;
public static final int TYPE_TEXT = 2;
private static final String URL_PLACEHOLDER_START_TIME = "{start time}"; private static final String URL_PLACEHOLDER_START_TIME = "{start time}";
private static final String URL_PLACEHOLDER_BITRATE = "{bitrate}"; private static final String URL_PLACEHOLDER_BITRATE = "{bitrate}";

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer.smoothstreaming; package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.Format; import com.google.android.exoplayer.Format;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.extractor.mp4.PsshAtomUtil; import com.google.android.exoplayer.extractor.mp4.PsshAtomUtil;
@ -534,7 +535,7 @@ public class SmoothStreamingManifestParser implements UriLoadable.Parser<SmoothS
private void parseStreamElementStartTag(XmlPullParser parser) throws ParserException { private void parseStreamElementStartTag(XmlPullParser parser) throws ParserException {
type = parseType(parser); type = parseType(parser);
putNormalizedAttribute(KEY_TYPE, type); putNormalizedAttribute(KEY_TYPE, type);
if (type == StreamElement.TYPE_TEXT) { if (type == C.TRACK_TYPE_TEXT) {
subType = parseRequiredString(parser, KEY_SUB_TYPE); subType = parseRequiredString(parser, KEY_SUB_TYPE);
} else { } else {
subType = parser.getAttributeValue(null, KEY_SUB_TYPE); subType = parser.getAttributeValue(null, KEY_SUB_TYPE);
@ -559,11 +560,11 @@ public class SmoothStreamingManifestParser implements UriLoadable.Parser<SmoothS
String value = parser.getAttributeValue(null, KEY_TYPE); String value = parser.getAttributeValue(null, KEY_TYPE);
if (value != null) { if (value != null) {
if (KEY_TYPE_AUDIO.equalsIgnoreCase(value)) { if (KEY_TYPE_AUDIO.equalsIgnoreCase(value)) {
return StreamElement.TYPE_AUDIO; return C.TRACK_TYPE_AUDIO;
} else if (KEY_TYPE_VIDEO.equalsIgnoreCase(value)) { } else if (KEY_TYPE_VIDEO.equalsIgnoreCase(value)) {
return StreamElement.TYPE_VIDEO; return C.TRACK_TYPE_VIDEO;
} else if (KEY_TYPE_TEXT.equalsIgnoreCase(value)) { } else if (KEY_TYPE_TEXT.equalsIgnoreCase(value)) {
return StreamElement.TYPE_TEXT; return C.TRACK_TYPE_TEXT;
} else { } else {
throw new ParserException("Invalid key value[" + value + "]"); throw new ParserException("Invalid key value[" + value + "]");
} }
@ -617,14 +618,14 @@ public class SmoothStreamingManifestParser implements UriLoadable.Parser<SmoothS
int bitrate = parseRequiredInt(parser, KEY_BITRATE); int bitrate = parseRequiredInt(parser, KEY_BITRATE);
String sampleMimeType = fourCCToMimeType(parseRequiredString(parser, KEY_FOUR_CC)); String sampleMimeType = fourCCToMimeType(parseRequiredString(parser, KEY_FOUR_CC));
if (type == StreamElement.TYPE_VIDEO) { if (type == C.TRACK_TYPE_VIDEO) {
int width = parseRequiredInt(parser, KEY_MAX_WIDTH); int width = parseRequiredInt(parser, KEY_MAX_WIDTH);
int height = parseRequiredInt(parser, KEY_MAX_HEIGHT); int height = parseRequiredInt(parser, KEY_MAX_HEIGHT);
List<byte[]> codecSpecificData = buildCodecSpecificData( List<byte[]> codecSpecificData = buildCodecSpecificData(
parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA)); parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA));
format = Format.createVideoContainerFormat(id, MimeTypes.VIDEO_MP4, sampleMimeType, bitrate, format = Format.createVideoContainerFormat(id, MimeTypes.VIDEO_MP4, sampleMimeType, bitrate,
width, height, Format.NO_VALUE, codecSpecificData); width, height, Format.NO_VALUE, codecSpecificData);
} else if (type == StreamElement.TYPE_AUDIO) { } else if (type == C.TRACK_TYPE_AUDIO) {
sampleMimeType = sampleMimeType == null ? MimeTypes.AUDIO_AAC : sampleMimeType; sampleMimeType = sampleMimeType == null ? MimeTypes.AUDIO_AAC : sampleMimeType;
int channels = parseRequiredInt(parser, KEY_CHANNELS); int channels = parseRequiredInt(parser, KEY_CHANNELS);
int samplingRate = parseRequiredInt(parser, KEY_SAMPLING_RATE); int samplingRate = parseRequiredInt(parser, KEY_SAMPLING_RATE);
@ -633,7 +634,7 @@ public class SmoothStreamingManifestParser implements UriLoadable.Parser<SmoothS
String language = (String) getNormalizedAttribute(KEY_LANGUAGE); String language = (String) getNormalizedAttribute(KEY_LANGUAGE);
format = Format.createAudioContainerFormat(id, MimeTypes.AUDIO_MP4, sampleMimeType, bitrate, format = Format.createAudioContainerFormat(id, MimeTypes.AUDIO_MP4, sampleMimeType, bitrate,
channels, samplingRate, codecSpecificData, language); channels, samplingRate, codecSpecificData, language);
} else if (type == StreamElement.TYPE_TEXT) { } else if (type == C.TRACK_TYPE_TEXT) {
String language = (String) getNormalizedAttribute(KEY_LANGUAGE); String language = (String) getNormalizedAttribute(KEY_LANGUAGE);
format = Format.createTextContainerFormat(id, MimeTypes.APPLICATION_MP4, sampleMimeType, format = Format.createTextContainerFormat(id, MimeTypes.APPLICATION_MP4, sampleMimeType,
bitrate, language); bitrate, language);