Set static interval as default for dynamic scheduling with audio

Applications providing custom `AudioSink` implementations should have the dynamic scheduling for audio playback fallback to the static interval if they are not implementing `AudioSink#getAudioTrackBufferSizeUs()`.

PiperOrigin-RevId: 743082057
(cherry picked from commit 9e80d6d263d04021e24d4897f415898964a93a05)
This commit is contained in:
michaelkatz 2025-04-02 04:45:29 -07:00 committed by tonihei
parent 2bdf632369
commit 567ee030b3
6 changed files with 268 additions and 350 deletions

View File

@ -485,6 +485,9 @@ public interface ExoPlayer extends Player {
* <p>If enabled, ExoPlayer's playback loop will run as rarely as possible by scheduling work
* for when {@link Renderer} progress can be made.
*
* <p>If a custom {@link AudioSink} is used then it must correctly implement {@link
* AudioSink#getAudioTrackBufferSizeUs()} to enable dynamic scheduling for audio playback.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param dynamicSchedulingEnabled Whether to enable dynamic scheduling.

View File

@ -593,7 +593,7 @@ public interface AudioSink {
/**
* Returns the size of the underlying {@link AudioTrack} buffer in microseconds. If unsupported or
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET};
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET}.
*
* <p>If the {@link AudioTrack} is configured with a compressed encoding, then the returned
* duration is an estimated minimum based on the encoding's maximum encoded byte rate.

View File

@ -170,6 +170,7 @@ public abstract class DecoderAudioRenderer<
private long largestQueuedPresentationTimeUs;
private long lastBufferInStreamPresentationTimeUs;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
public DecoderAudioRenderer() {
this(/* eventHandler= */ null, /* eventListener= */ null);
@ -247,9 +248,14 @@ public abstract class DecoderAudioRenderer<
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
@ -312,6 +318,7 @@ public abstract class DecoderAudioRenderer<
try {
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
@ -593,6 +600,7 @@ public abstract class DecoderAudioRenderer<
outputStreamEnded = true;
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
}
private void flushDecoder() throws ExoPlaybackException {
@ -668,6 +676,7 @@ public abstract class DecoderAudioRenderer<
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
@ -697,6 +706,7 @@ public abstract class DecoderAudioRenderer<
setOutputStreamOffsetUs(C.TIME_UNSET);
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
setSourceDrmSession(null);
releaseDecoder();

View File

@ -126,6 +126,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private int rendererPriority;
private boolean isStarted;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
/**
* @param context A context.
@ -523,9 +524,15 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
@ -687,6 +694,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
}
@ -711,6 +719,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSinkNeedsReset = true;
inputFormat = null;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
audioSink.flush();
} finally {
@ -726,6 +735,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onReset() {
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
super.onReset();
} finally {
@ -865,6 +875,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
}
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e,

View File

@ -226,178 +226,6 @@ public class DecoderAudioRendererTest {
inOrderAudioSink.verify(mockAudioSink, times(2)).handleBuffer(any(), anyLong(), anyInt());
}
@Test
public void getDurationToProgressUs_usingWrittenDurationUs_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
audioRenderer.start();
while (latchDecode.getCount() != 0) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(75_000L);
}
@Test
public void
getDurationToProgressUs_usingWrittenDurationUsWithDoublePlaybackSpeed_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
PlaybackParameters playbackParametersWithDoubleSpeed =
new PlaybackParameters(/* speed= */ 2.0f);
when(mockAudioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
while (latchDecode.getCount() != 0) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(37_500L);
}
@Test
public void
getDurationToProgressUs_usingWrittenDurationUsWithPlaybackAdvancement_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, fakeClock);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
long rendererPositionElapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
while (latchDecode.getCount() != 0) {
audioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
}
audioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
// Simulate playback progressing between render() and getDurationToProgressUs call
fakeClock.advanceTime(/* timeDiffMs= */ 10);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
assertThat(durationToProgressUs).isEqualTo(65_000L);
}
@Test
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
throws Exception {
@ -571,8 +399,64 @@ public class DecoderAudioRendererTest {
}
@Test
public void getDurationToProgressUs_afterReadToEndOfStream_returnsCalculatedDuration()
throws Exception {
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
audioRenderer.start();
while (latchDecode.getCount() != 0) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(10_000L);
}
@Test
public void
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
@ -628,6 +512,65 @@ public class DecoderAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
ForwardingAudioSink forwardingAudioSink =
new ForwardingAudioSink(mockAudioSink) {
@Override
public void playToEndOfStream() throws WriteException {
super.playToEndOfStream();
hasCalledPlayToEndOfStream.set(true);
}
};
audioRenderer = createAudioRenderer(forwardingAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
audioRenderer.setCurrentStreamFinal();
while (!hasCalledPlayToEndOfStream.get()) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);

View File

@ -733,177 +733,6 @@ public class MediaCodecAudioRendererTest {
verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132);
}
@Test
public void getDurationToProgressUs_usingWrittenDurationUs_returnsCalculatedDuration()
throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150_000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
mediaCodecAudioRenderer.start();
for (int i = 0; i < 10; i++) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(75_000L);
}
@Test
public void
getDurationToProgressUs_usingWrittenDurationUsWithDoublePlaybackSpeed_returnsCalculatedDuration()
throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
PlaybackParameters playbackParametersWithDoubleSpeed =
new PlaybackParameters(/* speed= */ 2.0f);
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150_000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
mediaCodecAudioRenderer.start();
for (int i = 0; i < 10; i++) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(37_500L);
}
@Test
public void
getDurationToProgressUs_usingWrittenDurationUsWithPlaybackAdvancement_returnsCalculatedDuration()
throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
/* eventHandler= */ new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink);
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, fakeClock);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150_000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
mediaCodecAudioRenderer.start();
for (int i = 0; i < 10; i++) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, fakeClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
// Simulate playback progressing between render() and getDurationToProgressUs call
long rendererPositionElapsedRealtimeUs = fakeClock.elapsedRealtime() * 1000;
fakeClock.advanceTime(/* timeDiffMs= */ 10);
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
assertThat(durationToProgressUs).isEqualTo(65_000L);
}
@Test
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
throws Exception {
@ -1076,8 +905,58 @@ public class MediaCodecAudioRendererTest {
}
@Test
public void getDurationToProgressUs_afterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
public void
getDurationToProgressUs_withAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
throws Exception {
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150_000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
mediaCodecAudioRenderer.start();
for (int i = 0; i < 10; i++) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(10_000L);
}
@Test
public void
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
@ -1146,6 +1025,78 @@ public class MediaCodecAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink) {
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
super.renderToEndOfStream();
hasCalledRenderToEndOfStream.set(true);
}
};
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!hasCalledRenderToEndOfStream.get()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);