Use C constants as event source ids.

The construction currently in SourceBuilder will be
moving inside of the core lib, so it can't refer to
DemoPlayer constants.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=120822759
This commit is contained in:
olly 2016-04-26 09:52:52 -07:00 committed by Oliver Woodman
parent 845d250442
commit 784a18a188
3 changed files with 33 additions and 32 deletions

View File

@ -409,29 +409,32 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
private void updateButtonVisibilities() {
retryButton.setVisibility(playerNeedsSource ? View.VISIBLE : View.GONE);
videoButton.setVisibility(haveTracks(DemoPlayer.TYPE_VIDEO) ? View.VISIBLE : View.GONE);
audioButton.setVisibility(haveTracks(DemoPlayer.TYPE_AUDIO) ? View.VISIBLE : View.GONE);
textButton.setVisibility(haveTracks(DemoPlayer.TYPE_TEXT) ? View.VISIBLE : View.GONE);
videoButton.setVisibility(haveTracks(DemoPlayer.RENDERER_INDEX_VIDEO) ? View.VISIBLE
: View.GONE);
audioButton.setVisibility(haveTracks(DemoPlayer.RENDERER_INDEX_AUDIO) ? View.VISIBLE
: View.GONE);
textButton.setVisibility(haveTracks(DemoPlayer.RENDERER_INDEX_TEXT) ? View.VISIBLE
: View.GONE);
}
private boolean haveTracks(int type) {
private boolean haveTracks(int rendererIndex) {
TrackInfo trackInfo = player == null ? null : player.getTrackInfo();
return trackInfo != null && trackInfo.getTrackGroups(type).length != 0;
return trackInfo != null && trackInfo.getTrackGroups(rendererIndex).length != 0;
}
public void showVideoPopup(@SuppressWarnings("unused") View v) {
trackSelectionHelper.showSelectionDialog(this, R.string.video, player.getTrackInfo(),
DemoPlayer.TYPE_VIDEO);
DemoPlayer.RENDERER_INDEX_VIDEO);
}
public void showAudioPopup(@SuppressWarnings("unused") View v) {
trackSelectionHelper.showSelectionDialog(this, R.string.audio, player.getTrackInfo(),
DemoPlayer.TYPE_AUDIO);
DemoPlayer.RENDERER_INDEX_AUDIO);
}
public void showTextPopup(@SuppressWarnings("unused") View v) {
trackSelectionHelper.showSelectionDialog(this, R.string.text, player.getTrackInfo(),
DemoPlayer.TYPE_TEXT);
DemoPlayer.RENDERER_INDEX_TEXT);
}
private void toggleControlsVisibility() {

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.CodecCounters;
import com.google.android.exoplayer.DefaultTrackSelector;
import com.google.android.exoplayer.DefaultTrackSelector.TrackInfo;
@ -133,10 +134,10 @@ public class DemoPlayer implements ExoPlayer.Listener, DefaultTrackSelector.Even
public static final int STATE_ENDED = ExoPlayer.STATE_ENDED;
public static final int RENDERER_COUNT = 4;
public static final int TYPE_VIDEO = 0;
public static final int TYPE_AUDIO = 1;
public static final int TYPE_TEXT = 2;
public static final int TYPE_METADATA = 3;
public static final int RENDERER_INDEX_VIDEO = 0;
public static final int RENDERER_INDEX_AUDIO = 1;
public static final int RENDERER_INDEX_TEXT = 2;
public static final int RENDERER_INDEX_METADATA = 3;
private final ExoPlayer player;
private final DefaultTrackSelector trackSelector;
@ -336,10 +337,10 @@ public class DemoPlayer implements ExoPlayer.Listener, DefaultTrackSelector.Even
if (infoListener == null) {
return;
}
if (sourceId == TYPE_VIDEO) {
if (sourceId == C.TRACK_TYPE_VIDEO) {
videoFormat = format;
infoListener.onVideoFormatEnabled(format, trigger, mediaTimeMs);
} else if (sourceId == TYPE_AUDIO) {
} else if (sourceId == C.TRACK_TYPE_AUDIO) {
infoListener.onAudioFormatEnabled(format, trigger, mediaTimeMs);
}
}

View File

@ -81,23 +81,21 @@ public class SourceBuilder {
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter));
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_VIDEO);
// Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_AUDIO,
audioDataSource, null);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, C.TRACK_TYPE_TEXT,
textDataSource, null);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_TEXT);
return new MultiSampleSource(videoSampleSource, audioSampleSource, textSampleSource);
}
@ -122,22 +120,21 @@ public class SourceBuilder {
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_VIDEO, videoDataSource, new AdaptiveEvaluator(bandwidthMeter));
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_VIDEO);
// Build the audio renderer.
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_AUDIO, audioDataSource, null);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
C.TRACK_TYPE_TEXT, textDataSource, null);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher, C.TRACK_TYPE_TEXT,
textDataSource, null);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_TEXT);
return new MultiSampleSource(videoSampleSource, audioSampleSource, textSampleSource);
}
@ -156,23 +153,23 @@ public class SourceBuilder {
PtsTimestampAdjusterProvider timestampAdjusterProvider = new PtsTimestampAdjusterProvider();
DataSource defaultDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource defaultChunkSource = new HlsChunkSource(manifestFetcher,
C.TRACK_TYPE_DEFAULT, defaultDataSource, timestampAdjusterProvider,
HlsChunkSource defaultChunkSource = new HlsChunkSource(manifestFetcher, C.TRACK_TYPE_DEFAULT,
defaultDataSource, timestampAdjusterProvider,
new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter));
HlsSampleSource defaultSampleSource = new HlsSampleSource(defaultChunkSource, loadControl,
MUXED_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MUXED_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_VIDEO);
DataSource audioDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource audioChunkSource = new HlsChunkSource(manifestFetcher, C.TRACK_TYPE_AUDIO,
audioDataSource, timestampAdjusterProvider, null);
HlsSampleSource audioSampleSource = new HlsSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_AUDIO);
DataSource subtitleDataSource = dataSourceFactory.createDataSource(bandwidthMeter);
HlsChunkSource subtitleChunkSource = new HlsChunkSource(manifestFetcher,
C.TRACK_TYPE_TEXT, subtitleDataSource, timestampAdjusterProvider, null);
HlsChunkSource subtitleChunkSource = new HlsChunkSource(manifestFetcher, C.TRACK_TYPE_TEXT,
subtitleDataSource, timestampAdjusterProvider, null);
HlsSampleSource subtitleSampleSource = new HlsSampleSource(subtitleChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, C.TRACK_TYPE_TEXT);
return new MultiSampleSource(defaultSampleSource, audioSampleSource, subtitleSampleSource);
}