Update cached playbackHeadPosition when pausing after AudioTrack.stop()

In some streaming scenarios, like offload, the sink may finish writing buffers a bit before playback reaches the end of the track. In this case a player may pause while in this 'stopping' state.

The AudioTrackPositionTracker needs to update the cached values it uses to calculate position in the `PLAYSTATE_STOPPED`/`PLAYSTATE_STOPPING` states if pause/play are called during this period.

PiperOrigin-RevId: 571345914
This commit is contained in:
michaelkatz 2023-10-06 08:35:13 -07:00 committed by Copybara-Service
parent ab42d64d6d
commit a789db5b41
2 changed files with 74 additions and 13 deletions

View File

@ -362,6 +362,9 @@ import java.lang.reflect.Method;
/** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
public void start() {
if (stopTimestampUs != C.TIME_UNSET) {
stopTimestampUs = msToUs(clock.elapsedRealtime());
}
checkNotNull(audioTimestampPoller).reset();
}
@ -464,6 +467,7 @@ import java.lang.reflect.Method;
checkNotNull(audioTimestampPoller).reset();
return true;
}
stopPlaybackHeadPosition = getPlaybackHeadPosition();
// We've handled the end of the stream already, so there's no need to pause the track.
return false;
}
@ -623,6 +627,10 @@ import java.lang.reflect.Method;
private long getPlaybackHeadPosition() {
long currentTimeMs = clock.elapsedRealtime();
if (stopTimestampUs != C.TIME_UNSET) {
if (checkNotNull(this.audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
// If AudioTrack is paused while stopping, then return cached playback head position.
return stopPlaybackHeadPosition;
}
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = msToUs(currentTimeMs) - stopTimestampUs;
long mediaTimeSinceStopUs =

View File

@ -74,13 +74,13 @@ public class AudioTrackPositionTrackerTest {
audioTrack.play();
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
}
// Reset audio track and write bytes to simulate position overflow.
audioTrack.flush();
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isGreaterThan(4294967296L);
@ -98,7 +98,7 @@ public class AudioTrackPositionTrackerTest {
audioTrack.play();
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
}
@ -106,7 +106,7 @@ public class AudioTrackPositionTrackerTest {
// Set tracker to expect playback head reset.
audioTrack.flush();
audioTrackPositionTracker.expectRawPlaybackHeadReset();
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
// Expected position is msToUs(# of writes)*TIME_TO_ADVANCE_MS.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
@ -125,13 +125,13 @@ public class AudioTrackPositionTrackerTest {
audioTrack.play();
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
}
// Reset audio track to simulate track transition and set tracker to expect playback head reset.
audioTrack.flush();
audioTrackPositionTracker.expectRawPlaybackHeadReset();
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
@ -160,13 +160,13 @@ public class AudioTrackPositionTrackerTest {
audioTrack1.play();
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
advanceTimeAndWriteBytes(audioTrack1);
writeBytesAndAdvanceTime(audioTrack1);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
}
// Reset audio track and set tracker to expect playback head reset to simulate track transition.
audioTrack1.flush();
audioTrackPositionTracker.expectRawPlaybackHeadReset();
advanceTimeAndWriteBytes(audioTrack1);
writeBytesAndAdvanceTime(audioTrack1);
// Test for correct setup with current position being accumulated position.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(3000000L);
@ -179,8 +179,9 @@ public class AudioTrackPositionTrackerTest {
C.ENCODING_PCM_16BIT,
OUTPUT_PCM_FRAME_SIZE,
MIN_BUFFER_SIZE);
audioTrackPositionTracker.start();
audioTrack2.play();
advanceTimeAndWriteBytes(audioTrack2);
writeBytesAndAdvanceTime(audioTrack2);
// Expected position is msToUs(1 write)*TIME_TO_ADVANCE_MS.
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
@ -197,26 +198,78 @@ public class AudioTrackPositionTrackerTest {
C.ENCODING_PCM_16BIT,
OUTPUT_PCM_FRAME_SIZE,
MIN_BUFFER_SIZE);
audioTrackPositionTracker.start();
audioTrack.play();
// Advance and write to audio track at least twice to move rawHeadPosition past wrap point.
for (int i = 0; i < 2; i++) {
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false);
}
// Reset audio track and write bytes to simulate position overflow.
audioTrack.flush();
advanceTimeAndWriteBytes(audioTrack);
writeBytesAndAdvanceTime(audioTrack);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isGreaterThan(4294967296L);
}
private void advanceTimeAndWriteBytes(AudioTrack audioTrack) {
clock.advanceTime(TIME_TO_ADVANCE_MS);
@Test
public void getCurrentPositionUs_afterHandleEndOfStreamWithPause_returnsCorrectPosition() {
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ false,
C.ENCODING_PCM_16BIT,
OUTPUT_PCM_FRAME_SIZE,
MIN_BUFFER_SIZE);
audioTrackPositionTracker.start();
audioTrack.play();
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
}
audioTrackPositionTracker.handleEndOfStream(2_000_000L);
audioTrackPositionTracker.pause();
audioTrack.pause();
// Advance time during paused state.
clock.advanceTime(2_000L);
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(2_000_000L);
}
@Test
public void getCurrentPositionUs_afterHandleEndOfStreamWithPausePlay_returnsCorrectPosition() {
audioTrackPositionTracker.setAudioTrack(
audioTrack,
/* isPassthrough= */ false,
C.ENCODING_PCM_16BIT,
OUTPUT_PCM_FRAME_SIZE,
MIN_BUFFER_SIZE);
audioTrackPositionTracker.start();
audioTrack.play();
for (int i = 0; i < 2; i++) {
writeBytesAndAdvanceTime(audioTrack);
}
// Provide Long.MAX_VALUE so that tracker relies on estimation and not total duration.
audioTrackPositionTracker.handleEndOfStream(Long.MAX_VALUE);
audioTrackPositionTracker.pause();
audioTrack.pause();
// Advance time during paused state.
clock.advanceTime(2_000L);
audioTrackPositionTracker.start();
audioTrack.play();
assertThat(audioTrackPositionTracker.getCurrentPositionUs(/* sourceEnded= */ false))
.isEqualTo(2_000_000L);
}
private void writeBytesAndAdvanceTime(AudioTrack audioTrack) {
ByteBuffer byteBuffer = createDefaultSilenceBuffer();
int bytesRemaining = byteBuffer.remaining();
audioTrack.write(byteBuffer, bytesRemaining, AudioTrack.WRITE_NON_BLOCKING);
clock.advanceTime(TIME_TO_ADVANCE_MS);
}
/** Creates a one second silence buffer for 44.1 kHz stereo 16-bit audio. */