Consider audio output latency when source has ended

With a [previous change](f05e6a7d6e), we makes `hasPendingData()` return `false` once we've found that the `AudioTrack` has played out all the written frames, to avoid it permanently stays `true` even when the source has ended. However, this is aggressive as the audio output device can still have latency in playing out those frames. So `hasPendingData()` should stay `true` a bit longer (for the duration of `latencyUs`) until finally turn to `false`, as well as the `getCurrentPositionUs()` should increment smoothly without a jump for the duration of `latencyUs`.

PiperOrigin-RevId: 738004292
(cherry picked from commit 6470c97af415d91ad46a1f21c7f2ab5b0716f39c)
This commit is contained in:
tianyifeng 2025-03-18 07:54:13 -07:00 committed by tonihei
parent efb109dd88
commit bd104b1cc4
3 changed files with 44 additions and 46 deletions

View File

@ -284,7 +284,7 @@ import java.lang.reflect.Method;
resetSyncParams();
}
public long getCurrentPositionUs(boolean sourceEnded) {
public long getCurrentPositionUs() {
AudioTrack audioTrack = checkNotNull(this.audioTrack);
if (audioTrack.getPlayState() == PLAYSTATE_PLAYING) {
maybeSampleSyncParams();
@ -307,7 +307,11 @@ import java.lang.reflect.Method;
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
positionUs = getPlaybackHeadPositionUs();
positionUs =
stopTimestampUs != C.TIME_UNSET
? sampleCountToDurationUs(
getSimulatedPlaybackHeadPositionAfterStop(), outputSampleRate)
: getPlaybackHeadPositionUs();
} else {
// getPlaybackHeadPositionUs() only has a granularity of ~20 ms, so we base the position off
// the system clock (and a smoothed offset between it and the playhead position) so as to
@ -316,8 +320,11 @@ import java.lang.reflect.Method;
Util.getMediaDurationForPlayoutDuration(
systemTimeUs + smoothedPlayheadOffsetUs, audioTrackPlaybackSpeed);
}
if (!sourceEnded) {
positionUs = max(0, positionUs - latencyUs);
positionUs = max(0, positionUs - latencyUs);
if (stopTimestampUs != C.TIME_UNSET) {
positionUs =
min(sampleCountToDurationUs(endPlaybackHeadPosition, outputSampleRate), positionUs);
}
}
@ -450,13 +457,8 @@ import java.lang.reflect.Method;
* @return Whether the audio track has any pending data to play out.
*/
public boolean hasPendingData(long writtenFrames) {
if (stopTimestampUs != C.TIME_UNSET) {
return writtenFrames > getPlaybackHeadPosition() || forceHasPendingData();
} else {
long currentPositionUs = getCurrentPositionUs(/* sourceEnded= */ false);
return writtenFrames > durationUsToSampleCount(currentPositionUs, outputSampleRate)
|| forceHasPendingData();
}
return writtenFrames > durationUsToSampleCount(getCurrentPositionUs(), outputSampleRate)
|| forceHasPendingData();
}
/**
@ -633,19 +635,11 @@ import java.lang.reflect.Method;
* @return The playback head position, in frames.
*/
private long getPlaybackHeadPosition() {
long currentTimeMs = clock.elapsedRealtime();
if (stopTimestampUs != C.TIME_UNSET) {
if (checkNotNull(this.audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
// If AudioTrack is paused while stopping, then return cached playback head position.
return stopPlaybackHeadPosition;
}
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = msToUs(currentTimeMs) - stopTimestampUs;
long mediaTimeSinceStopUs =
Util.getMediaDurationForPlayoutDuration(elapsedTimeSinceStopUs, audioTrackPlaybackSpeed);
long framesSinceStop = durationUsToSampleCount(mediaTimeSinceStopUs, outputSampleRate);
return min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop);
long simulatedPlaybackHeadPositionAfterStop = getSimulatedPlaybackHeadPositionAfterStop();
return min(endPlaybackHeadPosition, simulatedPlaybackHeadPositionAfterStop);
}
long currentTimeMs = clock.elapsedRealtime();
if (currentTimeMs - lastRawPlaybackHeadPositionSampleTimeMs
>= RAW_PLAYBACK_HEAD_POSITION_UPDATE_INTERVAL_MS) {
updateRawPlaybackHeadPosition(currentTimeMs);
@ -654,6 +648,19 @@ import java.lang.reflect.Method;
return rawPlaybackHeadPosition + sumRawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
private long getSimulatedPlaybackHeadPositionAfterStop() {
if (checkNotNull(this.audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
// If AudioTrack is paused while stopping, then return cached playback head position.
return stopPlaybackHeadPosition;
}
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = msToUs(clock.elapsedRealtime()) - stopTimestampUs;
long mediaTimeSinceStopUs =
Util.getMediaDurationForPlayoutDuration(elapsedTimeSinceStopUs, audioTrackPlaybackSpeed);
long framesSinceStop = durationUsToSampleCount(mediaTimeSinceStopUs, outputSampleRate);
return stopPlaybackHeadPosition + framesSinceStop;
}
private void updateRawPlaybackHeadPosition(long currentTimeMs) {
AudioTrack audioTrack = checkNotNull(this.audioTrack);
int state = audioTrack.getPlayState();

View File

@ -676,7 +676,7 @@ public final class DefaultAudioSink implements AudioSink {
if (!isAudioTrackInitialized() || startMediaTimeUsNeedsInit) {
return CURRENT_POSITION_NOT_SET;
}
long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded);
long positionUs = audioTrackPositionTracker.getCurrentPositionUs();
positionUs = min(positionUs, configuration.framesToDurationUs(getWrittenFrames()));
return applySkipping(applyMediaPositionParameters(positionUs));
}

View File

@ -75,15 +75,14 @@ public class AudioTrackPositionTrackerTest {
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
audioTrackPositionTracker.getCurrentPositionUs();
}
// Reset audio track and write bytes to simulate position overflow.
audioTrack.flush();
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isGreaterThan(4294967296L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isGreaterThan(4294967296L);
}
@Test
@ -99,7 +98,7 @@ public class AudioTrackPositionTrackerTest {
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
audioTrackPositionTracker.getCurrentPositionUs();
}
// Reset audio track to simulate track reuse and transition.
@ -109,8 +108,7 @@ public class AudioTrackPositionTrackerTest {
writeBytesAndAdvanceTime(audioTrack);
// Expected position is msToUs(# of writes)*TIME_TO_ADVANCE_MS.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(3000000L);
}
@Test
@ -126,14 +124,13 @@ public class AudioTrackPositionTrackerTest {
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
audioTrackPositionTracker.getCurrentPositionUs();
}
// Reset audio track to simulate track transition and set tracker to expect playback head reset.
audioTrack.flush();
audioTrackPositionTracker.expectRawPlaybackHeadReset();
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(3000000L);
// Pause tracker, pause audio track, and advance time to test that position does not change
// during pause
@ -142,8 +139,7 @@ public class AudioTrackPositionTrackerTest {
clock.advanceTime(TIME_TO_ADVANCE_MS);
// Expected position is msToUs(# of writes)*TIME_TO_ADVANCE_MS.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(3000000L);
}
@Test
@ -161,15 +157,14 @@ public class AudioTrackPositionTrackerTest {
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack1);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
audioTrackPositionTracker.getCurrentPositionUs();
}
// Reset audio track and set tracker to expect playback head reset to simulate track transition.
audioTrack1.flush();
audioTrackPositionTracker.expectRawPlaybackHeadReset();
writeBytesAndAdvanceTime(audioTrack1);
// Test for correct setup with current position being accumulated position.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(3000000L);
// Set new audio track and reset position tracker to simulate transition to new AudioTrack.
audioTrackPositionTracker.reset();
@ -184,8 +179,7 @@ public class AudioTrackPositionTrackerTest {
writeBytesAndAdvanceTime(audioTrack2);
// Expected position is msToUs(1 write)*TIME_TO_ADVANCE_MS.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(1000000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(1000000L);
}
@Test
@ -204,14 +198,13 @@ public class AudioTrackPositionTrackerTest {
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
audioTrackPositionTracker.getCurrentPositionUs();
}
// Reset audio track and write bytes to simulate position overflow.
audioTrack.flush();
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isGreaterThan(4294967296L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isGreaterThan(4294967296L);
}
@Test
@ -234,8 +227,7 @@ public class AudioTrackPositionTrackerTest {
// Advance time during paused state.
clock.advanceTime(2_000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(2_000_000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(2_000_000L);
}
@Test
@ -261,8 +253,7 @@ public class AudioTrackPositionTrackerTest {
audioTrackPositionTracker.start();
audioTrack.play();
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(2_000_000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs()).isEqualTo(2_000_000L);
}
private void writeBytesAndAdvanceTime(AudioTrack audioTrack) {