Poll the audio timestamp more frequently at the start of playback

Add logic to poll AudioTimestamp at different rates depending on when
playback starts and how the audio timestamp advances.

This fixes a pause about 500 ms after starting playback that occurs on some
devices and also makes our polling interval match the recommendations of the
AudioTrack documentation.

Issue: #3830
Issue: #3841

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=189765200
This commit is contained in:
andrewlewis 2018-03-20 10:37:21 -07:00 committed by Oliver Woodman
parent 95d591e0ce
commit c9d827fa2c
3 changed files with 363 additions and 130 deletions

View File

@ -24,10 +24,14 @@
* Allow adding and removing `MediaSourceEventListener`s to MediaSources after
they have been created. Listening to events is now supported for all
media sources including composite sources.
* Audio: Factor out `AudioTrack` position tracking from `DefaultAudioSink`.
* Audio:
* Factor out `AudioTrack` position tracking from `DefaultAudioSink`.
* Fix an issue where the playback position would pause just after playback
begins, and poll the audio timestamp less frequently once it starts
advancing ([#3841](https://github.com/google/ExoPlayer/issues/3841)).
* Caching:
* Add release method to Cache interface.
* Prevent multiple instances of SimpleCache in the same folder.
* Prevent multiple instances of SimpleCache in the same folder.
Previous instance must be released.
### 2.7.1 ###

View File

@ -0,0 +1,303 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import android.annotation.TargetApi;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Polls the {@link AudioTrack} timestamp, if the platform supports it, taking care of polling at
* the appropriate rate to detect when the timestamp starts to advance.
*
* <p>When the audio track isn't paused, call {@link #maybePollTimestamp(long)} regularly to check
* for timestamp updates. If it returns {@code true}, call {@link #getTimestampPositionFrames()} and
* {@link #getTimestampSystemTimeUs()} to access the updated timestamp, then call {@link
* #acceptTimestamp()} or {@link #rejectTimestamp()} to accept or reject it.
*
* <p>If {@link #hasTimestamp()} returns {@code true}, call {@link #getTimestampSystemTimeUs()} to
* get the system time at which the latest timestamp was sampled and {@link
* #getTimestampPositionFrames()} to get its position in frames. If {@link #isTimestampAdvancing()}
* returns {@code true}, the caller should assume that the timestamp has been increasing in real
* time since it was sampled. Otherwise, it may be stationary.
*
* <p>Call {@link #reset()} when pausing or resuming the track.
*/
/* package */ final class AudioTimestampPoller {
/** Timestamp polling states. */
@Retention(RetentionPolicy.SOURCE)
@IntDef({
STATE_INITIALIZING,
STATE_TIMESTAMP,
STATE_TIMESTAMP_ADVANCING,
STATE_NO_TIMESTAMP,
STATE_ERROR
})
private @interface State {}
/** State when first initializing. */
private static final int STATE_INITIALIZING = 0;
/** State when we have a timestamp and we don't know if it's advancing. */
private static final int STATE_TIMESTAMP = 1;
/** State when we have a timestamp and we know it is advancing. */
private static final int STATE_TIMESTAMP_ADVANCING = 2;
/** State when the no timestamp is available. */
private static final int STATE_NO_TIMESTAMP = 3;
/** State when the last timestamp was rejected as invalid. */
private static final int STATE_ERROR = 4;
/** The polling interval for {@link #STATE_INITIALIZING} and {@link #STATE_TIMESTAMP}. */
private static final int FAST_POLL_INTERVAL_US = 5_000;
/**
* The polling interval for {@link #STATE_TIMESTAMP_ADVANCING} and {@link #STATE_NO_TIMESTAMP}.
*/
private static final int SLOW_POLL_INTERVAL_US = 10_000_000;
/** The polling interval for {@link #STATE_ERROR}. */
private static final int ERROR_POLL_INTERVAL_US = 500_000;
/**
* The minimum duration to remain in {@link #STATE_INITIALIZING} if no timestamps are being
* returned before transitioning to {@link #STATE_NO_TIMESTAMP}.
*/
private static final int INITIALIZING_DURATION_US = 500_000;
private final @Nullable AudioTimestampV19 audioTimestamp;
private @State int state;
private long initializeSystemTimeUs;
private long sampleIntervalUs;
private long lastTimestampSampleTimeUs;
private long initialTimestampPositionFrames;
/**
* Creates a new audio timestamp poller.
*
* @param audioTrack The audio track that will provide timestamps, if the platform supports it.
*/
public AudioTimestampPoller(AudioTrack audioTrack) {
if (Util.SDK_INT >= 19) {
audioTimestamp = new AudioTimestampV19(audioTrack);
reset();
} else {
audioTimestamp = null;
updateState(STATE_NO_TIMESTAMP);
}
}
/**
* Polls the timestamp if required and returns whether it was updated. If {@code true}, the latest
* timestamp is available via {@link #getTimestampSystemTimeUs()} and {@link
* #getTimestampPositionFrames()}, and the caller should call {@link #acceptTimestamp()} if the
* timestamp was valid, or {@link #rejectTimestamp()} otherwise. The values returned by {@link
* #hasTimestamp()} and {@link #isTimestampAdvancing()} may be updated.
*
* @param systemTimeUs The current system time, in microseconds.
* @return Whether the timestamp was updated.
*/
public boolean maybePollTimestamp(long systemTimeUs) {
if (audioTimestamp == null || (systemTimeUs - lastTimestampSampleTimeUs) < sampleIntervalUs) {
return false;
}
lastTimestampSampleTimeUs = systemTimeUs;
boolean updatedTimestamp = audioTimestamp.maybeUpdateTimestamp();
switch (state) {
case STATE_INITIALIZING:
if (updatedTimestamp
&& audioTimestamp.getTimestampSystemTimeUs() >= initializeSystemTimeUs) {
// We have an initial timestamp, but don't know if it's advancing yet.
initialTimestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
updateState(STATE_TIMESTAMP);
} else if (systemTimeUs - initializeSystemTimeUs > INITIALIZING_DURATION_US) {
// We haven't received a timestamp for a while, so they probably aren't available for the
// current audio route. Poll infrequently in case the route changes later.
// TODO: Ideally we should listen for audio route changes in order to detect when a
// timestamp becomes available again.
updateState(STATE_NO_TIMESTAMP);
}
break;
case STATE_TIMESTAMP:
if (updatedTimestamp) {
long timestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
if (timestampPositionFrames > initialTimestampPositionFrames) {
updateState(STATE_TIMESTAMP_ADVANCING);
}
} else {
reset();
}
break;
case STATE_TIMESTAMP_ADVANCING:
if (!updatedTimestamp) {
// The audio route may have changed, so reset polling.
reset();
}
break;
case STATE_NO_TIMESTAMP:
if (updatedTimestamp) {
// The audio route may have changed, so reset polling.
reset();
}
break;
case STATE_ERROR:
// Do nothing. If the caller accepts any new timestamp we'll reset polling.
break;
default:
throw new IllegalStateException();
}
return updatedTimestamp;
}
/**
* Rejects the timestamp last polled in {@link #maybePollTimestamp(long)}. The instance will enter
* the error state and poll timestamps infrequently until the next call to {@link
* #acceptTimestamp()}.
*/
public void rejectTimestamp() {
updateState(STATE_ERROR);
}
/**
* Accepts the timestamp last polled in {@link #maybePollTimestamp(long)}. If the instance is in
* the error state, it will begin to poll timestamps frequently again.
*/
public void acceptTimestamp() {
if (state == STATE_ERROR) {
reset();
}
}
/**
* Returns whether this instance has a timestamp that can be used to calculate the audio track
* position. If {@code true}, call {@link #getTimestampSystemTimeUs()} and {@link
* #getTimestampSystemTimeUs()} to access the timestamp.
*/
public boolean hasTimestamp() {
return state == STATE_TIMESTAMP || state == STATE_TIMESTAMP_ADVANCING;
}
/**
* Returns whether the timestamp appears to be advancing. If {@code true}, call {@link
* #getTimestampSystemTimeUs()} and {@link #getTimestampSystemTimeUs()} to access the timestamp. A
* current position for the track can be extrapolated based on elapsed real time since the system
* time at which the timestamp was sampled.
*/
public boolean isTimestampAdvancing() {
return state == STATE_TIMESTAMP_ADVANCING;
}
/** Resets polling. Should be called whenever the audio track is paused or resumed. */
public void reset() {
if (audioTimestamp != null) {
updateState(STATE_INITIALIZING);
}
}
/**
* If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
* the system time at which the latest timestamp was sampled, in microseconds.
*/
public long getTimestampSystemTimeUs() {
return audioTimestamp != null ? audioTimestamp.getTimestampSystemTimeUs() : C.TIME_UNSET;
}
/**
* If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
* the latest timestamp's position in frames.
*/
public long getTimestampPositionFrames() {
return audioTimestamp != null ? audioTimestamp.getTimestampPositionFrames() : C.POSITION_UNSET;
}
private void updateState(@State int state) {
this.state = state;
switch (state) {
case STATE_INITIALIZING:
// Force polling a timestamp immediately, and poll quickly.
lastTimestampSampleTimeUs = 0;
initialTimestampPositionFrames = C.POSITION_UNSET;
initializeSystemTimeUs = System.nanoTime() / 1000;
sampleIntervalUs = FAST_POLL_INTERVAL_US;
break;
case STATE_TIMESTAMP:
sampleIntervalUs = FAST_POLL_INTERVAL_US;
break;
case STATE_TIMESTAMP_ADVANCING:
case STATE_NO_TIMESTAMP:
sampleIntervalUs = SLOW_POLL_INTERVAL_US;
break;
case STATE_ERROR:
sampleIntervalUs = ERROR_POLL_INTERVAL_US;
break;
default:
throw new IllegalStateException();
}
}
@TargetApi(19)
private static final class AudioTimestampV19 {
private final AudioTrack audioTrack;
private final AudioTimestamp audioTimestamp;
private long rawTimestampFramePositionWrapCount;
private long lastTimestampRawPositionFrames;
private long lastTimestampPositionFrames;
/**
* Creates a new {@link AudioTimestamp} wrapper.
*
* @param audioTrack The audio track that will provide timestamps.
*/
public AudioTimestampV19(AudioTrack audioTrack) {
this.audioTrack = audioTrack;
audioTimestamp = new AudioTimestamp();
}
/**
* Attempts to update the audio track timestamp. Returns {@code true} if the timestamp was
* updated, in which case the updated timestamp system time and position can be accessed with
* {@link #getTimestampSystemTimeUs()} and {@link #getTimestampPositionFrames()}. Returns {@code
* false} if no timestamp is available, in which case those methods should not be called.
*/
public boolean maybeUpdateTimestamp() {
boolean updated = audioTrack.getTimestamp(audioTimestamp);
if (updated) {
long rawPositionFrames = audioTimestamp.framePosition;
if (lastTimestampRawPositionFrames > rawPositionFrames) {
// The value must have wrapped around.
rawTimestampFramePositionWrapCount++;
}
lastTimestampRawPositionFrames = rawPositionFrames;
lastTimestampPositionFrames =
rawPositionFrames + (rawTimestampFramePositionWrapCount << 32);
}
return updated;
}
public long getTimestampSystemTimeUs() {
return audioTimestamp.nanoTime / 1000;
}
public long getTimestampPositionFrames() {
return lastTimestampPositionFrames;
}
}
}

View File

@ -15,12 +15,10 @@
*/
package com.google.android.exoplayer2.audio;
import android.annotation.TargetApi;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.os.SystemClock;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
@ -125,29 +123,26 @@ import java.lang.reflect.Method;
private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 500000;
private final Listener listener;
private final long[] playheadOffsets;
private AudioTrack audioTrack;
private int outputSampleRate;
private int outputPcmFrameSize;
private int bufferSize;
private AudioTimestampPoller audioTimestampPoller;
private int outputSampleRate;
private boolean needsPassthroughWorkarounds;
private long bufferSizeUs;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
private boolean audioTimestampSet;
private long lastTimestampSampleTimeUs;
private Method getLatencyMethod;
private int outputPcmFrameSize;
private long resumeSystemTimeUs;
private long latencyUs;
private boolean hasData;
private boolean needsPassthroughWorkarounds;
private @Nullable AudioTimestampV19 audioTimestamp;
private boolean isOutputPcm;
private long lastLatencySampleTimeUs;
private long lastRawPlaybackHeadPosition;
@ -193,15 +188,13 @@ import java.lang.reflect.Method;
int outputPcmFrameSize,
int bufferSize) {
this.audioTrack = audioTrack;
this.bufferSize = bufferSize;
this.outputPcmFrameSize = outputPcmFrameSize;
this.bufferSize = bufferSize;
audioTimestampPoller = new AudioTimestampPoller(audioTrack);
outputSampleRate = audioTrack.getSampleRate();
needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
isOutputPcm = Util.isEncodingPcm(outputEncoding);
bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
if (Util.SDK_INT >= 19) {
audioTimestamp = new AudioTimestampV19(audioTrack);
}
lastRawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
passthroughWorkaroundPauseOffset = 0;
@ -219,15 +212,17 @@ import java.lang.reflect.Method;
// If the device supports it, use the playback timestamp from AudioTrack.getTimestamp.
// Otherwise, derive a smoothed position by sampling the track's frame position.
long systemTimeUs = System.nanoTime() / 1000;
long positionUs;
if (audioTimestamp != null && audioTimestampSet) {
if (audioTimestampPoller.hasTimestamp()) {
// Calculate the speed-adjusted position using the timestamp (which may be in the future).
long elapsedSinceTimestampUs = systemTimeUs - audioTimestamp.getTimestampSystemTimeUs();
long elapsedSinceTimestampFrames = durationUsToFrames(elapsedSinceTimestampUs);
long elapsedFrames =
audioTimestamp.getTimestampPositionFrames() + elapsedSinceTimestampFrames;
positionUs = framesToDurationUs(elapsedFrames);
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
if (!audioTimestampPoller.isTimestampAdvancing()) {
return timestampPositionUs;
}
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
return timestampPositionUs + elapsedSinceTimestampUs;
} else {
long positionUs;
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
positionUs = getPlaybackHeadPositionUs();
@ -240,13 +235,13 @@ import java.lang.reflect.Method;
if (!sourceEnded) {
positionUs -= latencyUs;
}
return positionUs;
}
return positionUs;
}
/** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
public void start() {
resumeSystemTimeUs = System.nanoTime() / 1000;
audioTimestampPoller.reset();
}
/** Returns whether the audio track is in the playing state. */
@ -341,7 +336,14 @@ import java.lang.reflect.Method;
*/
public boolean pause() {
resetSyncParams();
return stopTimestampUs == C.TIME_UNSET;
if (stopTimestampUs == C.TIME_UNSET) {
// The audio track is going to be paused, so reset the timestamp poller to ensure it doesn't
// supply an advancing position.
audioTimestampPoller.reset();
return true;
}
// We've handled the end of the stream already, so there's no need to pause the track.
return false;
}
/**
@ -351,7 +353,7 @@ import java.lang.reflect.Method;
public void reset() {
resetSyncParams();
audioTrack = null;
audioTimestamp = null;
audioTimestampPoller = null;
}
private void maybeSampleSyncParams() {
@ -380,40 +382,36 @@ import java.lang.reflect.Method;
// platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353].
return;
}
maybeUpdateAudioTimestamp(systemTimeUs, playbackPositionUs);
maybePollAndCheckTimestamp(systemTimeUs, playbackPositionUs);
maybeUpdateLatency(systemTimeUs);
}
private void maybeUpdateAudioTimestamp(long systemTimeUs, long playbackPositionUs) {
if (audioTimestamp != null
&& systemTimeUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
audioTimestampSet = audioTimestamp.maybeUpdateTimestamp();
if (audioTimestampSet) {
// Perform sanity checks on the timestamp.
long audioTimestampSystemTimeUs = audioTimestamp.getTimestampSystemTimeUs();
long audioTimestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
if (audioTimestampSystemTimeUs < resumeSystemTimeUs) {
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet = false;
} else if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onSystemTimeUsMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampSet = false;
} else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onPositionFramesMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampSet = false;
}
}
lastTimestampSampleTimeUs = systemTimeUs;
private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) {
if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) {
return;
}
// Perform sanity checks on the timestamp and accept/reject it.
long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs();
long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onSystemTimeUsMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onPositionFramesMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else {
audioTimestampPoller.acceptTimestamp();
}
}
@ -445,17 +443,11 @@ import java.lang.reflect.Method;
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
}
private long durationUsToFrames(long durationUs) {
return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND;
}
private void resetSyncParams() {
smoothedPlayheadOffsetUs = 0;
playheadOffsetCount = 0;
nextPlayheadOffsetIndex = 0;
lastPlayheadSampleTimeUs = 0;
audioTimestampSet = false;
lastTimestampSampleTimeUs = 0;
}
/**
@ -540,70 +532,4 @@ import java.lang.reflect.Method;
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
@TargetApi(19)
private static final class AudioTimestampV19 {
private final AudioTrack audioTrack;
private final AudioTimestamp audioTimestamp;
private long rawTimestampFramePositionWrapCount;
private long lastTimestampRawPositionFrames;
private long lastTimestampPositionFrames;
/**
* Creates a new {@link AudioTimestamp} wrapper.
*
* @param audioTrack The audio track that will provide timestamps.
*/
public AudioTimestampV19(AudioTrack audioTrack) {
this.audioTrack = audioTrack;
audioTimestamp = new AudioTimestamp();
}
/**
* Attempts to update the audio track timestamp. Returns {@code true} if the timestamp was
* updated, in which case the updated timestamp system time and position can be accessed with
* {@link #getTimestampSystemTimeUs()} and {@link #getTimestampPositionFrames()}. Returns {@code
* false} if no timestamp is available, in which case those methods should not be called.
*/
public boolean maybeUpdateTimestamp() {
boolean updated = audioTrack.getTimestamp(audioTimestamp);
if (updated) {
long rawPositionFrames = audioTimestamp.framePosition;
if (lastTimestampRawPositionFrames > rawPositionFrames) {
// The value must have wrapped around.
rawTimestampFramePositionWrapCount++;
}
lastTimestampRawPositionFrames = rawPositionFrames;
lastTimestampPositionFrames =
rawPositionFrames + (rawTimestampFramePositionWrapCount << 32);
}
return updated;
}
/**
* Returns the {@link android.media.AudioTimestamp#nanoTime} obtained during the most recent
* call to {@link #maybeUpdateTimestamp()} that returned true.
*
* @return The nanoTime obtained during the most recent call to {@link #maybeUpdateTimestamp()}
* that returned true.
*/
public long getTimestampSystemTimeUs() {
return audioTimestamp.nanoTime / 1000;
}
/**
* Returns the {@link android.media.AudioTimestamp#framePosition} obtained during the most
* recent call to {@link #maybeUpdateTimestamp()} that returned true. The value is adjusted so
* that wrap around only occurs if the value exceeds {@link Long#MAX_VALUE} (which in practice
* will never happen).
*
* @return The framePosition obtained during the most recent call to {@link
* #maybeUpdateTimestamp()} that returned true.
*/
public long getTimestampPositionFrames() {
return lastTimestampPositionFrames;
}
}
}