Steps toward full multi-track support.

1. Remove requirement for TrackRenderer implementations to report
   current position, unless they are time sources.
2. Expose whether renderers have media to play. The immediate benefit
   of this is to solve the referenced GitHub issue, and also to only
   display the appropriate Audio/Video/Text buttons in the demo app
   for the media being played. This is also a natural step toward
   multi-track support.

Github issue: #541
This commit is contained in:
Oliver Woodman 2015-06-18 14:24:53 +01:00
parent 2c29d88330
commit 8e58a3f5f5
16 changed files with 285 additions and 266 deletions

View File

@ -268,6 +268,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
return new ExtractorRendererBuilder(this, userAgent, contentUri,
new FragmentedMp4Extractor());
case TYPE_WEBM:
case TYPE_MKV:
return new ExtractorRendererBuilder(this, userAgent, contentUri, new WebmExtractor());
default:
throw new IllegalStateException("Unsupported type: " + contentType);
@ -377,7 +378,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
}
private boolean haveTracks(int type) {
return player != null && player.getTracks(type) != null;
return player != null && player.getTrackCount(type) > 0;
}
public void showVideoPopup(View v) {
@ -440,8 +441,8 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
if (player == null) {
return;
}
String[] tracks = player.getTracks(trackType);
if (tracks == null) {
int trackCount = player.getTrackCount(trackType);
if (trackCount == 0) {
return;
}
popup.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@ -455,11 +456,11 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
Menu menu = popup.getMenu();
// ID_OFFSET ensures we avoid clashing with Menu.NONE (which equals 0)
menu.add(MENU_GROUP_TRACKS, DemoPlayer.DISABLED_TRACK + ID_OFFSET, Menu.NONE, R.string.off);
if (tracks.length == 1 && TextUtils.isEmpty(tracks[0])) {
if (trackCount == 1 && TextUtils.isEmpty(player.getTrackName(trackType, 0))) {
menu.add(MENU_GROUP_TRACKS, DemoPlayer.PRIMARY_TRACK + ID_OFFSET, Menu.NONE, R.string.on);
} else {
for (int i = 0; i < tracks.length; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE, tracks[i]);
for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE, player.getTrackName(trackType, i));
}
}
menu.setGroupCheckable(MENU_GROUP_TRACKS, true, true);

View File

@ -263,8 +263,12 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
pushSurface(true);
}
public String[] getTracks(int type) {
return trackNames == null ? null : trackNames[type];
public int getTrackCount(int type) {
return !player.getRendererHasMedia(type) ? 0 : trackNames[type].length;
}
public String getTrackName(int type, int index) {
return trackNames[type][index];
}
public int getSelectedTrackIndex(int type) {
@ -323,15 +327,16 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
if (multiTrackSources == null) {
multiTrackSources = new MultiTrackChunkSource[RENDERER_COUNT];
}
for (int i = 0; i < RENDERER_COUNT; i++) {
if (renderers[i] == null) {
for (int rendererIndex = 0; rendererIndex < RENDERER_COUNT; rendererIndex++) {
if (renderers[rendererIndex] == null) {
// Convert a null renderer to a dummy renderer.
renderers[i] = new DummyTrackRenderer();
} else if (trackNames[i] == null) {
// We have a renderer so we must have at least one track, but the names are unknown.
// Initialize the correct number of null track names.
int trackCount = multiTrackSources[i] == null ? 1 : multiTrackSources[i].getTrackCount();
trackNames[i] = new String[trackCount];
renderers[rendererIndex] = new DummyTrackRenderer();
}
if (trackNames[rendererIndex] == null) {
// Convert a null trackNames to an array of suitable length.
int trackCount = multiTrackSources[rendererIndex] != null
? multiTrackSources[rendererIndex].getTrackCount() : 1;
trackNames[rendererIndex] = new String[trackCount];
}
}
// Complete preparation.

View File

@ -17,6 +17,7 @@ package com.google.android.exoplayer.ext.opus;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaClock;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleSource;
@ -38,7 +39,7 @@ import java.util.List;
*
* @author vigneshv@google.com (Vignesh Venkatasubramanian)
*/
public class LibopusAudioTrackRenderer extends TrackRenderer {
public class LibopusAudioTrackRenderer extends TrackRenderer implements MediaClock {
/**
* Interface definition for a callback to be notified of {@link LibopusAudioTrackRenderer} events.
@ -87,6 +88,7 @@ public class LibopusAudioTrackRenderer extends TrackRenderer {
private int trackIndex;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean sourceIsReady;
@ -119,8 +121,8 @@ public class LibopusAudioTrackRenderer extends TrackRenderer {
}
@Override
protected boolean isTimeSource() {
return true;
protected MediaClock getMediaClock() {
return this;
}
@Override
@ -237,7 +239,7 @@ public class LibopusAudioTrackRenderer extends TrackRenderer {
// If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
currentPositionUs = Long.MIN_VALUE;
allowPositionDiscontinuity = true;
}
// Release the buffer if it was consumed.
@ -323,26 +325,31 @@ public class LibopusAudioTrackRenderer extends TrackRenderer {
}
@Override
protected long getCurrentPositionUs() {
long audioTrackCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (audioTrackCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
// Make sure we don't ever report time moving backwards.
currentPositionUs = Math.max(currentPositionUs, audioTrackCurrentPositionUs);
public long getPositionUs() {
long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs
: Math.max(currentPositionUs, newCurrentPositionUs);
allowPositionDiscontinuity = false;
}
return currentPositionUs;
}
@Override
protected long getBufferedPositionUs() {
long sourceBufferedPosition = source.getBufferedPositionUs();
return sourceBufferedPosition == UNKNOWN_TIME_US || sourceBufferedPosition == END_OF_TRACK_US
? sourceBufferedPosition : Math.max(sourceBufferedPosition, getCurrentPositionUs());
return source.getBufferedPositionUs();
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
source.seekToUs(positionUs);
seekToInternal(positionUs);
}
private void seekToInternal(long positionUs) {
audioTrack.reset();
currentPositionUs = positionUs;
allowPositionDiscontinuity = true;
source.seekToUs(positionUs);
inputStreamEnded = false;
outputStreamEnded = false;
@ -352,10 +359,7 @@ public class LibopusAudioTrackRenderer extends TrackRenderer {
@Override
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
sourceIsReady = false;
inputStreamEnded = false;
outputStreamEnded = false;
currentPositionUs = Long.MIN_VALUE;
seekToInternal(positionUs);
}
@Override

View File

@ -111,7 +111,6 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
private boolean outputRgb;
private int trackIndex;
private long currentPositionUs;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean sourceIsReady;
@ -181,9 +180,9 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
}
try {
sourceIsReady = source.continueBuffering(positionUs);
checkForDiscontinuity();
checkForDiscontinuity(positionUs);
if (format == null) {
readFormat();
readFormat(positionUs);
} else {
// TODO: Add support for dynamic switching between one type of surface to another.
// Create the decoder.
@ -194,7 +193,7 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
processOutputBuffer(positionUs, elapsedRealtimeUs);
// Queue input buffers.
while (feedInputBuffer()) {}
while (feedInputBuffer(positionUs)) {}
}
} catch (VpxDecoderException e) {
notifyDecoderError(e);
@ -226,7 +225,7 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
long elapsedSinceStartOfLoop = SystemClock.elapsedRealtime() * 1000 - elapsedRealtimeUs;
long timeToRenderUs = outputBuffer.timestampUs - positionUs - elapsedSinceStartOfLoop;
if (timeToRenderUs < -30000 || outputBuffer.timestampUs < currentPositionUs) {
if (timeToRenderUs < -30000 || outputBuffer.timestampUs < positionUs) {
// Drop frame if we are too late.
droppedFrameCount++;
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
@ -276,7 +275,6 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
}
private void releaseOutputBuffer() throws VpxDecoderException {
currentPositionUs = outputBuffer.timestampUs;
decoder.releaseOutputBuffer(outputBuffer);
outputBuffer = null;
}
@ -296,7 +294,7 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
surface.unlockCanvasAndPost(canvas);
}
private boolean feedInputBuffer() throws IOException, VpxDecoderException {
private boolean feedInputBuffer(long positionUs) throws IOException, VpxDecoderException {
if (inputStreamEnded) {
return false;
}
@ -308,8 +306,8 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
}
}
int result = source.readData(trackIndex, currentPositionUs, formatHolder,
inputBuffer.sampleHolder, false);
int result = source.readData(trackIndex, positionUs, formatHolder, inputBuffer.sampleHolder,
false);
if (result == SampleSource.NOTHING_READ) {
return false;
}
@ -336,11 +334,11 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
return true;
}
private void checkForDiscontinuity() throws IOException {
private void checkForDiscontinuity(long positionUs) throws IOException {
if (decoder == null) {
return;
}
int result = source.readData(trackIndex, currentPositionUs, formatHolder, null, true);
int result = source.readData(trackIndex, positionUs, formatHolder, null, true);
if (result == SampleSource.DISCONTINUITY_READ) {
flushDecoder();
}
@ -367,36 +365,28 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
return source.getTrackInfo(trackIndex).durationUs;
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getBufferedPositionUs() {
long sourceBufferedPosition = source.getBufferedPositionUs();
return sourceBufferedPosition == UNKNOWN_TIME_US || sourceBufferedPosition == END_OF_TRACK_US
? sourceBufferedPosition : Math.max(sourceBufferedPosition, getCurrentPositionUs());
return source.getBufferedPositionUs();
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
currentPositionUs = positionUs;
source.seekToUs(positionUs);
inputStreamEnded = false;
outputStreamEnded = false;
renderedFirstFrame = false;
sourceIsReady = false;
seekToInternal();
}
@Override
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
seekToInternal();
}
private void seekToInternal() {
sourceIsReady = false;
inputStreamEnded = false;
outputStreamEnded = false;
renderedFirstFrame = false;
currentPositionUs = positionUs;
}
@Override
@ -427,8 +417,8 @@ public class LibvpxVideoTrackRenderer extends TrackRenderer {
source.disable(trackIndex);
}
private void readFormat() throws IOException {
int result = source.readData(trackIndex, currentPositionUs, formatHolder, null, false);
private void readFormat(long positionUs) throws IOException {
int result = source.readData(trackIndex, positionUs, formatHolder, null, false);
if (result == SampleSource.FORMAT_READ) {
format = formatHolder.format;
}

View File

@ -59,9 +59,4 @@ public class DummyTrackRenderer extends TrackRenderer {
throw new IllegalStateException();
}
@Override
protected long getCurrentPositionUs() {
throw new IllegalStateException();
}
}

View File

@ -261,21 +261,31 @@ public interface ExoPlayer {
*/
public void prepare(TrackRenderer... renderers);
/**
* Returns whether the renderer at the given index has media to play.
* <p>
* Always returns false whilst the player is in the {@link #STATE_PREPARING} state.
*
* @param rendererIndex The index of the renderer.
* @return True if the renderer has media to play, false otherwise.
*/
public boolean getRendererHasMedia(int rendererIndex);
/**
* Sets whether the renderer at the given index is enabled.
*
* @param index The index of the renderer.
* @param rendererIndex The index of the renderer.
* @param enabled Whether the renderer at the given index should be enabled.
*/
public void setRendererEnabled(int index, boolean enabled);
public void setRendererEnabled(int rendererIndex, boolean enabled);
/**
* Whether the renderer at the given index is enabled.
*
* @param index The index of the renderer.
* @param rendererIndex The index of the renderer.
* @return Whether the renderer is enabled.
*/
public boolean getRendererEnabled(int index);
public boolean getRendererEnabled(int rendererIndex);
/**
* Sets whether playback should proceed when {@link #getPlaybackState()} == {@link #STATE_READY}.

View File

@ -21,6 +21,7 @@ import android.os.Looper;
import android.os.Message;
import android.util.Log;
import java.util.Arrays;
import java.util.concurrent.CopyOnWriteArraySet;
/**
@ -33,6 +34,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
private final Handler eventHandler;
private final ExoPlayerImplInternal internalPlayer;
private final CopyOnWriteArraySet<Listener> listeners;
private final boolean[] rendererHasMediaFlags;
private final boolean[] rendererEnabledFlags;
private boolean playWhenReady;
@ -56,6 +58,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
this.playWhenReady = false;
this.playbackState = STATE_IDLE;
this.listeners = new CopyOnWriteArraySet<>();
this.rendererHasMediaFlags = new boolean[rendererCount];
this.rendererEnabledFlags = new boolean[rendererCount];
for (int i = 0; i < rendererEnabledFlags.length; i++) {
rendererEnabledFlags[i] = true;
@ -92,20 +95,26 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public void prepare(TrackRenderer... renderers) {
Arrays.fill(rendererHasMediaFlags, false);
internalPlayer.prepare(renderers);
}
@Override
public void setRendererEnabled(int index, boolean enabled) {
if (rendererEnabledFlags[index] != enabled) {
rendererEnabledFlags[index] = enabled;
internalPlayer.setRendererEnabled(index, enabled);
public boolean getRendererHasMedia(int rendererIndex) {
return rendererHasMediaFlags[rendererIndex];
}
@Override
public void setRendererEnabled(int rendererIndex, boolean enabled) {
if (rendererEnabledFlags[rendererIndex] != enabled) {
rendererEnabledFlags[rendererIndex] = enabled;
internalPlayer.setRendererEnabled(rendererIndex, enabled);
}
}
@Override
public boolean getRendererEnabled(int index) {
return rendererEnabledFlags[index];
public boolean getRendererEnabled(int rendererIndex) {
return rendererEnabledFlags[rendererIndex];
}
@Override
@ -182,6 +191,16 @@ import java.util.concurrent.CopyOnWriteArraySet;
// Not private so it can be called from an inner class without going through a thunk method.
/* package */ void handleEvent(Message msg) {
switch (msg.what) {
case ExoPlayerImplInternal.MSG_PREPARED: {
boolean[] rendererHasMediaFlags = (boolean[]) msg.obj;
System.arraycopy(rendererHasMediaFlags, 0, this.rendererHasMediaFlags, 0,
rendererHasMediaFlags.length);
playbackState = msg.arg1;
for (Listener listener : listeners) {
listener.onPlayerStateChanged(playWhenReady, playbackState);
}
break;
}
case ExoPlayerImplInternal.MSG_STATE_CHANGED: {
playbackState = msg.arg1;
for (Listener listener : listeners) {

View File

@ -40,9 +40,10 @@ import java.util.List;
private static final String TAG = "ExoPlayerImplInternal";
// External messages
public static final int MSG_STATE_CHANGED = 1;
public static final int MSG_SET_PLAY_WHEN_READY_ACK = 2;
public static final int MSG_ERROR = 3;
public static final int MSG_PREPARED = 1;
public static final int MSG_STATE_CHANGED = 2;
public static final int MSG_SET_PLAY_WHEN_READY_ACK = 3;
public static final int MSG_ERROR = 4;
// Internal messages
private static final int MSG_PREPARE = 1;
@ -62,14 +63,15 @@ import java.util.List;
private final Handler handler;
private final HandlerThread internalPlaybackThread;
private final Handler eventHandler;
private final MediaClock mediaClock;
private final StandaloneMediaClock standaloneMediaClock;
private final boolean[] rendererEnabledFlags;
private final long minBufferUs;
private final long minRebufferUs;
private final List<TrackRenderer> enabledRenderers;
private TrackRenderer[] renderers;
private TrackRenderer timeSourceTrackRenderer;
private TrackRenderer rendererMediaClockSource;
private MediaClock rendererMediaClock;
private boolean released;
private boolean playWhenReady;
@ -98,7 +100,7 @@ import java.util.List;
this.durationUs = TrackRenderer.UNKNOWN_TIME_US;
this.bufferedPositionUs = TrackRenderer.UNKNOWN_TIME_US;
mediaClock = new MediaClock();
standaloneMediaClock = new StandaloneMediaClock();
enabledRenderers = new ArrayList<>(rendererEnabledFlags.length);
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
@ -250,9 +252,11 @@ import java.util.List;
resetInternal();
this.renderers = renderers;
for (int i = 0; i < renderers.length; i++) {
if (renderers[i].isTimeSource()) {
Assertions.checkState(timeSourceTrackRenderer == null);
timeSourceTrackRenderer = renderers[i];
MediaClock mediaClock = renderers[i].getMediaClock();
if (mediaClock != null) {
Assertions.checkState(rendererMediaClock == null);
rendererMediaClock = mediaClock;
rendererMediaClockSource = renderers[i];
}
}
setState(ExoPlayer.STATE_PREPARING);
@ -280,9 +284,11 @@ import java.util.List;
long durationUs = 0;
boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true;
for (int i = 0; i < renderers.length; i++) {
TrackRenderer renderer = renderers[i];
if (renderer.getState() == TrackRenderer.STATE_PREPARED) {
boolean[] rendererHasMediaFlags = new boolean[renderers.length];
for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) {
TrackRenderer renderer = renderers[rendererIndex];
rendererHasMediaFlags[rendererIndex] = renderer.getState() == TrackRenderer.STATE_PREPARED;
if (rendererHasMediaFlags[rendererIndex]) {
if (durationUs == TrackRenderer.UNKNOWN_TIME_US) {
// We've already encountered a track for which the duration is unknown, so the media
// duration is unknown regardless of the duration of this track.
@ -296,7 +302,7 @@ import java.util.List;
durationUs = Math.max(durationUs, trackDurationUs);
}
}
if (rendererEnabledFlags[i]) {
if (rendererEnabledFlags[rendererIndex]) {
renderer.enable(positionUs, false);
enabledRenderers.add(renderer);
allRenderersEnded = allRenderersEnded && renderer.isEnded();
@ -309,14 +315,19 @@ import java.util.List;
if (allRenderersEnded
&& (durationUs == TrackRenderer.UNKNOWN_TIME_US || durationUs <= positionUs)) {
// We don't expect this case, but handle it anyway.
setState(ExoPlayer.STATE_ENDED);
state = ExoPlayer.STATE_ENDED;
} else {
setState(allRenderersReadyOrEnded ? ExoPlayer.STATE_READY : ExoPlayer.STATE_BUFFERING);
if (playWhenReady && state == ExoPlayer.STATE_READY) {
startRenderers();
}
state = allRenderersReadyOrEnded ? ExoPlayer.STATE_READY : ExoPlayer.STATE_BUFFERING;
}
// Fire an event indicating that the player has been prepared, passing the initial state and
// renderer media flags.
eventHandler.obtainMessage(MSG_PREPARED, state, 0, rendererHasMediaFlags).sendToTarget();
// Start the renderers if required, and schedule the first piece of work.
if (playWhenReady && state == ExoPlayer.STATE_READY) {
startRenderers();
}
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
@ -364,26 +375,26 @@ import java.util.List;
private void startRenderers() throws ExoPlaybackException {
rebuffering = false;
mediaClock.start();
standaloneMediaClock.start();
for (int i = 0; i < enabledRenderers.size(); i++) {
enabledRenderers.get(i).start();
}
}
private void stopRenderers() throws ExoPlaybackException {
mediaClock.stop();
standaloneMediaClock.stop();
for (int i = 0; i < enabledRenderers.size(); i++) {
ensureStopped(enabledRenderers.get(i));
}
}
private void updatePositionUs() {
if (timeSourceTrackRenderer != null && enabledRenderers.contains(timeSourceTrackRenderer)
&& !timeSourceTrackRenderer.isEnded()) {
positionUs = timeSourceTrackRenderer.getCurrentPositionUs();
mediaClock.setPositionUs(positionUs);
if (rendererMediaClock != null && enabledRenderers.contains(rendererMediaClockSource)
&& !rendererMediaClockSource.isEnded()) {
positionUs = rendererMediaClock.getPositionUs();
standaloneMediaClock.setPositionUs(positionUs);
} else {
positionUs = mediaClock.getPositionUs();
positionUs = standaloneMediaClock.getPositionUs();
}
elapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
}
@ -464,8 +475,8 @@ import java.util.List;
private void seekToInternal(long positionMs) throws ExoPlaybackException {
rebuffering = false;
positionUs = positionMs * 1000L;
mediaClock.stop();
mediaClock.setPositionUs(positionUs);
standaloneMediaClock.stop();
standaloneMediaClock.setPositionUs(positionUs);
if (state == ExoPlayer.STATE_IDLE || state == ExoPlayer.STATE_PREPARING) {
return;
}
@ -496,7 +507,7 @@ import java.util.List;
handler.removeMessages(MSG_DO_SOME_WORK);
handler.removeMessages(MSG_INCREMENTAL_PREPARE);
rebuffering = false;
mediaClock.stop();
standaloneMediaClock.stop();
if (renderers == null) {
return;
}
@ -506,7 +517,8 @@ import java.util.List;
release(renderer);
}
renderers = null;
timeSourceTrackRenderer = null;
rendererMediaClock = null;
rendererMediaClockSource = null;
enabledRenderers.clear();
}
@ -555,18 +567,18 @@ import java.util.List;
}
}
private void setRendererEnabledInternal(int index, boolean enabled)
private void setRendererEnabledInternal(int rendererIndex, boolean enabled)
throws ExoPlaybackException {
if (rendererEnabledFlags[index] == enabled) {
if (rendererEnabledFlags[rendererIndex] == enabled) {
return;
}
rendererEnabledFlags[index] = enabled;
rendererEnabledFlags[rendererIndex] = enabled;
if (state == ExoPlayer.STATE_IDLE || state == ExoPlayer.STATE_PREPARING) {
return;
}
TrackRenderer renderer = renderers[index];
TrackRenderer renderer = renderers[rendererIndex];
int rendererState = renderer.getState();
if (rendererState != TrackRenderer.STATE_PREPARED &&
rendererState != TrackRenderer.STATE_ENABLED &&
@ -583,10 +595,10 @@ import java.util.List;
}
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
} else {
if (renderer == timeSourceTrackRenderer) {
if (renderer == rendererMediaClockSource) {
// We've been using timeSourceTrackRenderer to advance the current position, but it's
// being disabled. Sync mediaClock so that it can take over timing responsibilities.
mediaClock.setPositionUs(renderer.getCurrentPositionUs());
standaloneMediaClock.setPositionUs(rendererMediaClock.getPositionUs());
}
ensureStopped(renderer);
enabledRenderers.remove(renderer);

View File

@ -15,65 +15,14 @@
*/
package com.google.android.exoplayer;
import android.os.SystemClock;
/**
* A simple clock for tracking the progression of media time. The clock can be started, stopped and
* its time can be set and retrieved. When started, this clock is based on
* {@link SystemClock#elapsedRealtime()}.
* Tracks the progression of media time.
*/
/* package */ class MediaClock {
private boolean started;
public interface MediaClock {
/**
* The media time when the clock was last set or stopped.
* @return The current media position in microseconds.
*/
private long positionUs;
/**
* The difference between {@link SystemClock#elapsedRealtime()} and {@link #positionUs}
* when the clock was last set or started.
*/
private long deltaUs;
/**
* Starts the clock. Does nothing if the clock is already started.
*/
public void start() {
if (!started) {
started = true;
deltaUs = elapsedRealtimeMinus(positionUs);
}
}
/**
* Stops the clock. Does nothing if the clock is already stopped.
*/
public void stop() {
if (started) {
positionUs = elapsedRealtimeMinus(deltaUs);
started = false;
}
}
/**
* @param timeUs The position to set in microseconds.
*/
public void setPositionUs(long timeUs) {
this.positionUs = timeUs;
deltaUs = elapsedRealtimeMinus(timeUs);
}
/**
* @return The current position in microseconds.
*/
public long getPositionUs() {
return started ? elapsedRealtimeMinus(deltaUs) : positionUs;
}
private long elapsedRealtimeMinus(long toSubtractUs) {
return SystemClock.elapsedRealtime() * 1000 - toSubtractUs;
}
long getPositionUs();
}

View File

@ -31,7 +31,7 @@ import java.nio.ByteBuffer;
* Decodes and renders audio using {@link MediaCodec} and {@link android.media.AudioTrack}.
*/
@TargetApi(16)
public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implements MediaClock {
/**
* Interface definition for a callback to be notified of {@link MediaCodecAudioTrackRenderer}
@ -72,6 +72,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
private int audioSessionId;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
/**
* @param source The upstream source from which the renderer obtains samples.
@ -151,8 +152,8 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
}
@Override
protected boolean isTimeSource() {
return true;
protected MediaClock getMediaClock() {
return this;
}
@Override
@ -163,7 +164,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected void onEnabled(long positionUs, boolean joining) {
super.onEnabled(positionUs, joining);
currentPositionUs = Long.MIN_VALUE;
seekToInternal(positionUs);
}
@Override
@ -219,14 +220,12 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
}
@Override
protected long getCurrentPositionUs() {
long audioTrackCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (audioTrackCurrentPositionUs == AudioTrack.CURRENT_POSITION_NOT_SET) {
// Use the super class position before audio playback starts.
currentPositionUs = Math.max(currentPositionUs, super.getCurrentPositionUs());
} else {
// Make sure we don't ever report time moving backwards.
currentPositionUs = Math.max(currentPositionUs, audioTrackCurrentPositionUs);
public long getPositionUs() {
long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs
: Math.max(currentPositionUs, newCurrentPositionUs);
allowPositionDiscontinuity = false;
}
return currentPositionUs;
}
@ -244,9 +243,14 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
super.seekTo(positionUs);
seekToInternal(positionUs);
}
private void seekToInternal(long positionUs) {
// TODO: Try and re-use the same AudioTrack instance once [Internal: b/7941810] is fixed.
audioTrack.reset();
currentPositionUs = Long.MIN_VALUE;
currentPositionUs = positionUs;
allowPositionDiscontinuity = true;
}
@Override
@ -290,7 +294,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
// If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
currentPositionUs = Long.MIN_VALUE;
allowPositionDiscontinuity = true;
}
// Release the buffer if it was consumed.

View File

@ -212,7 +212,6 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
private boolean outputStreamEnded;
private boolean waitingForKeys;
private boolean waitingForFirstSyncFrame;
private long currentPositionUs;
/**
* @param source The upstream source from which the renderer obtains samples.
@ -283,11 +282,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
@Override
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
sourceState = SOURCE_STATE_NOT_READY;
inputStreamEnded = false;
outputStreamEnded = false;
waitingForKeys = false;
currentPositionUs = positionUs;
seekToInternal();
}
/**
@ -457,11 +452,6 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
source.release();
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getDurationUs() {
return source.getTrackInfo(trackIndex).durationUs;
@ -469,15 +459,16 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
@Override
protected long getBufferedPositionUs() {
long sourceBufferedPosition = source.getBufferedPositionUs();
return sourceBufferedPosition == UNKNOWN_TIME_US || sourceBufferedPosition == END_OF_TRACK_US
? sourceBufferedPosition : Math.max(sourceBufferedPosition, getCurrentPositionUs());
return source.getBufferedPositionUs();
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
currentPositionUs = positionUs;
source.seekToUs(positionUs);
seekToInternal();
}
private void seekToInternal() {
sourceState = SOURCE_STATE_NOT_READY;
inputStreamEnded = false;
outputStreamEnded = false;
@ -499,9 +490,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
sourceState = source.continueBuffering(positionUs)
? (sourceState == SOURCE_STATE_NOT_READY ? SOURCE_STATE_READY : sourceState)
: SOURCE_STATE_NOT_READY;
checkForDiscontinuity();
checkForDiscontinuity(positionUs);
if (format == null) {
readFormat();
readFormat(positionUs);
}
if (codec == null && shouldInitCodec()) {
maybeInitCodec();
@ -509,8 +500,8 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
if (codec != null) {
TraceUtil.beginSection("drainAndFeed");
while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {}
if (feedInputBuffer(true)) {
while (feedInputBuffer(false)) {}
if (feedInputBuffer(positionUs, true)) {
while (feedInputBuffer(positionUs, false)) {}
}
TraceUtil.endSection();
}
@ -520,18 +511,18 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
private void readFormat() throws IOException, ExoPlaybackException {
int result = source.readData(trackIndex, currentPositionUs, formatHolder, sampleHolder, false);
private void readFormat(long positionUs) throws IOException, ExoPlaybackException {
int result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false);
if (result == SampleSource.FORMAT_READ) {
onInputFormatChanged(formatHolder);
}
}
private void checkForDiscontinuity() throws IOException, ExoPlaybackException {
private void checkForDiscontinuity(long positionUs) throws IOException, ExoPlaybackException {
if (codec == null) {
return;
}
int result = source.readData(trackIndex, currentPositionUs, formatHolder, sampleHolder, true);
int result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, true);
if (result == SampleSource.DISCONTINUITY_READ) {
flushCodec();
}
@ -561,13 +552,16 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
/**
* @param positionUs The current media time in microseconds, measured at the start of the
* current iteration of the rendering loop.
* @param firstFeed True if this is the first call to this method from the current invocation of
* {@link #doSomeWork(long, long)}. False otherwise.
* @return True if it may be possible to feed more input data. False otherwise.
* @throws IOException If an error occurs reading data from the upstream source.
* @throws ExoPlaybackException If an error occurs feeding the input buffer.
*/
private boolean feedInputBuffer(boolean firstFeed) throws IOException, ExoPlaybackException {
private boolean feedInputBuffer(long positionUs, boolean firstFeed)
throws IOException, ExoPlaybackException {
if (inputStreamEnded
|| codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
// The input stream has ended, or we need to re-initialize the codec but are still waiting
@ -607,7 +601,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING;
}
result = source.readData(trackIndex, currentPositionUs, formatHolder, sampleHolder, false);
result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false);
if (firstFeed && sourceState == SOURCE_STATE_READY && result == SampleSource.NOTHING_READ) {
sourceState = SOURCE_STATE_READY_READ_MAY_FAIL;
}
@ -857,8 +851,6 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
if (decodeOnlyIndex != -1) {
decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
} else {
currentPositionUs = outputBufferInfo.presentationTimeUs;
}
outputIndex = -1;
return true;

View File

@ -0,0 +1,76 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import android.os.SystemClock;
/**
* A standalone {@link MediaClock}. The clock can be started, stopped and its time can be set and
* retrieved. When started, this clock is based on {@link SystemClock#elapsedRealtime()}.
*/
/* package */ class StandaloneMediaClock implements MediaClock {
private boolean started;
/**
* The media time when the clock was last set or stopped.
*/
private long positionUs;
/**
* The difference between {@link SystemClock#elapsedRealtime()} and {@link #positionUs}
* when the clock was last set or started.
*/
private long deltaUs;
/**
* Starts the clock. Does nothing if the clock is already started.
*/
public void start() {
if (!started) {
started = true;
deltaUs = elapsedRealtimeMinus(positionUs);
}
}
/**
* Stops the clock. Does nothing if the clock is already stopped.
*/
public void stop() {
if (started) {
positionUs = elapsedRealtimeMinus(deltaUs);
started = false;
}
}
/**
* @param timeUs The position to set in microseconds.
*/
public void setPositionUs(long timeUs) {
this.positionUs = timeUs;
deltaUs = elapsedRealtimeMinus(timeUs);
}
@Override
public long getPositionUs() {
return started ? elapsedRealtimeMinus(deltaUs) : positionUs;
}
private long elapsedRealtimeMinus(long toSubtractUs) {
return SystemClock.elapsedRealtime() * 1000 - toSubtractUs;
}
}

View File

@ -81,18 +81,15 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
private int state;
/**
* A time source renderer is a renderer that, when started, advances its own playback position.
* This means that {@link #getCurrentPositionUs()} will return increasing positions independently
* to increasing values being passed to {@link #doSomeWork(long, long)}. A player may have at most
* one time source renderer. If provided, the player will use such a renderer as its source of
* time during playback.
* <p>
* This method may be called when the renderer is in any state.
* If the renderer advances its own playback position then this method returns a corresponding
* {@link MediaClock}. If provided, the player will use the returned {@link MediaClock} as its
* source of time during playback. A player may have at most one renderer that returns a
* {@link MediaClock} from this method.
*
* @return True if the renderer should be considered a time source. False otherwise.
* @return The {@link MediaClock} tracking the playback position of the renderer, or null.
*/
protected boolean isTimeSource() {
return false;
protected MediaClock getMediaClock() {
return null;
}
/**
@ -312,16 +309,6 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
*/
protected abstract long getDurationUs();
/**
* Returns the current playback position.
* <p>
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}
*
* @return The current playback position in microseconds.
*/
protected abstract long getCurrentPositionUs();
/**
* Returns an estimate of the absolute position in microseconds up to which data is buffered.
* <p>

View File

@ -63,7 +63,6 @@ public class MetadataTrackRenderer<T> extends TrackRenderer implements Callback
private final SampleHolder sampleHolder;
private int trackIndex;
private long currentPositionUs;
private boolean inputStreamEnded;
private long pendingMetadataTimestamp;
@ -112,17 +111,16 @@ public class MetadataTrackRenderer<T> extends TrackRenderer implements Callback
@Override
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
seekToInternal(positionUs);
seekToInternal();
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
source.seekToUs(positionUs);
seekToInternal(positionUs);
seekToInternal();
}
private void seekToInternal(long positionUs) {
currentPositionUs = positionUs;
private void seekToInternal() {
pendingMetadata = null;
inputStreamEnded = false;
}
@ -130,7 +128,6 @@ public class MetadataTrackRenderer<T> extends TrackRenderer implements Callback
@Override
protected void doSomeWork(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
currentPositionUs = positionUs;
try {
source.continueBuffering(positionUs);
} catch (IOException e) {
@ -152,7 +149,7 @@ public class MetadataTrackRenderer<T> extends TrackRenderer implements Callback
}
}
if (pendingMetadata != null && pendingMetadataTimestamp <= currentPositionUs) {
if (pendingMetadata != null && pendingMetadataTimestamp <= positionUs) {
invokeRenderer(pendingMetadata);
pendingMetadata = null;
}
@ -169,11 +166,6 @@ public class MetadataTrackRenderer<T> extends TrackRenderer implements Callback
return source.getTrackInfo(trackIndex).durationUs;
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getBufferedPositionUs() {
return TrackRenderer.END_OF_TRACK_US;

View File

@ -52,7 +52,6 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
private int parserIndex;
private int trackIndex;
private long currentPositionUs;
private boolean inputStreamEnded;
private Subtitle subtitle;
@ -110,18 +109,17 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
parserThread = new HandlerThread("textParser");
parserThread.start();
parserHelper = new SubtitleParserHelper(parserThread.getLooper(), subtitleParsers[parserIndex]);
seekToInternal(positionUs);
seekToInternal();
}
@Override
protected void seekTo(long positionUs) {
source.seekToUs(positionUs);
seekToInternal(positionUs);
seekToInternal();
}
private void seekToInternal(long positionUs) {
private void seekToInternal() {
inputStreamEnded = false;
currentPositionUs = positionUs;
subtitle = null;
nextSubtitle = null;
parserHelper.flush();
@ -130,7 +128,6 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
@Override
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
currentPositionUs = positionUs;
try {
source.continueBuffering(positionUs);
} catch (IOException e) {
@ -205,11 +202,6 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
source.release();
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getDurationUs() {
return source.getTrackInfo(trackIndex).durationUs;

View File

@ -63,7 +63,6 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
private final TreeSet<ClosedCaptionList> pendingCaptionLists;
private int trackIndex;
private long currentPositionUs;
private boolean inputStreamEnded;
private int captionMode;
@ -114,17 +113,16 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
@Override
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
seekToInternal(positionUs);
seekToInternal();
}
@Override
protected void seekTo(long positionUs) throws ExoPlaybackException {
source.seekToUs(positionUs);
seekToInternal(positionUs);
seekToInternal();
}
private void seekToInternal(long positionUs) {
currentPositionUs = positionUs;
private void seekToInternal() {
inputStreamEnded = false;
pendingCaptionLists.clear();
clearPendingSample();
@ -134,9 +132,7 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
}
@Override
protected void doSomeWork(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
currentPositionUs = positionUs;
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
try {
source.continueBuffering(positionUs);
} catch (IOException e) {
@ -144,7 +140,7 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
}
if (isSamplePending()) {
maybeParsePendingSample();
maybeParsePendingSample(positionUs);
}
int result = inputStreamEnded ? SampleSource.END_OF_STREAM : SampleSource.SAMPLE_READ;
@ -152,7 +148,7 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
try {
result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false);
if (result == SampleSource.SAMPLE_READ) {
maybeParsePendingSample();
maybeParsePendingSample(positionUs);
} else if (result == SampleSource.END_OF_STREAM) {
inputStreamEnded = true;
}
@ -162,7 +158,7 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
}
while (!pendingCaptionLists.isEmpty()) {
if (pendingCaptionLists.first().timeUs > currentPositionUs) {
if (pendingCaptionLists.first().timeUs > positionUs) {
// We're too early to render any of the pending caption lists.
return;
}
@ -186,11 +182,6 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
return source.getTrackInfo(trackIndex).durationUs;
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getBufferedPositionUs() {
return TrackRenderer.END_OF_TRACK_US;
@ -238,8 +229,8 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
}
}
private void maybeParsePendingSample() {
if (sampleHolder.timeUs > currentPositionUs + MAX_SAMPLE_READAHEAD_US) {
private void maybeParsePendingSample(long positionUs) {
if (sampleHolder.timeUs > positionUs + MAX_SAMPLE_READAHEAD_US) {
// We're too early to parse the sample.
return;
}