Schedule exoplayer work to when MediaCodecAudioRenderer can progress

Currently ExoPlayer schedules its main work loop on a 10 ms interval. When renderers cannot make any more progress (ex: hardware buffers are fully written with audio data), ExoPlayer should be able to schedule the next work task further than 10ms out into the future.

Through `experimentalSetDynamicSchedulingEnabled` and these changes to `MediaCodecAudioRenderer`, ExoPlayer can use the data provided by the audio renderer to dynamically schedule its work tasks based on when it is expected that progress can be made.

PiperOrigin-RevId: 638677454
This commit is contained in:
michaelkatz 2024-05-30 09:20:47 -07:00 committed by Copybara-Service
parent e879c4ac43
commit 43f719fbb2
4 changed files with 213 additions and 0 deletions

View File

@ -91,6 +91,9 @@
different audio formats (for example stereo to mono) can cause the different audio formats (for example stereo to mono) can cause the
processor to throw an exception processor to throw an exception
([#1352](https://github.com/androidx/media/issues/1352)). ([#1352](https://github.com/androidx/media/issues/1352)).
* Implement `MediaCodecAudioRenderer.getDurationToProgressUs` so that
ExoPlayer will dynamically schedule its main work loop to when the
MediaCodecAudioRenderer can make progress.
* Video: * Video:
* Fix decoder fallback logic for Dolby Vision to use a compatible AV1 * Fix decoder fallback logic for Dolby Vision to use a compatible AV1
decoder if needed decoder if needed

View File

@ -123,6 +123,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Nullable private WakeupListener wakeupListener; @Nullable private WakeupListener wakeupListener;
private boolean hasPendingReportedSkippedSilence; private boolean hasPendingReportedSkippedSilence;
private int rendererPriority; private int rendererPriority;
private boolean isStarted;
private long nextBufferToWritePresentationTimeUs;
/** /**
* @param context A context. * @param context A context.
@ -263,6 +265,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
this.audioSink = audioSink; this.audioSink = audioSink;
rendererPriority = C.PRIORITY_PLAYBACK; rendererPriority = C.PRIORITY_PLAYBACK;
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
audioSink.setListener(new AudioSinkListener()); audioSink.setListener(new AudioSinkListener());
} }
@ -476,6 +479,23 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return this; return this;
} }
@Override
public long getDurationToProgressUs(long positionUs, long elapsedRealtimeUs) {
if (nextBufferToWritePresentationTimeUs != C.TIME_UNSET) {
long durationUs =
(long)
((nextBufferToWritePresentationTimeUs - positionUs)
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
/ 2);
if (isStarted) {
// Account for the elapsed time since the start of this iteration of the rendering loop.
durationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
}
return max(DEFAULT_DURATION_TO_PROGRESS_US, durationUs);
}
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
@Override @Override
protected float getCodecOperatingRateV23( protected float getCodecOperatingRateV23(
float targetPlaybackSpeed, Format format, Format[] streamFormats) { float targetPlaybackSpeed, Format format, Format[] streamFormats) {
@ -627,11 +647,13 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onStarted() { protected void onStarted() {
super.onStarted(); super.onStarted();
audioSink.play(); audioSink.play();
isStarted = true;
} }
@Override @Override
protected void onStopped() { protected void onStopped() {
updateCurrentPosition(); updateCurrentPosition();
isStarted = false;
audioSink.pause(); audioSink.pause();
super.onStopped(); super.onStopped();
} }
@ -725,6 +747,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
Format format) Format format)
throws ExoPlaybackException { throws ExoPlaybackException {
checkNotNull(buffer); checkNotNull(buffer);
// Reset nextBufferToWritePresentationTimeUs to default value C.TIME_UNSET for if
// buffer is skipped, dropped, or written.
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
if (decryptOnlyCodecFormat != null if (decryptOnlyCodecFormat != null
&& (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
@ -771,6 +796,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
decoderCounters.renderedOutputBufferCount += sampleCount; decoderCounters.renderedOutputBufferCount += sampleCount;
return true; return true;
} else {
// Downstream buffers are full, set nextBufferToWritePresentationTimeUs to the presentation
// time of the current 'to be written' sample.
nextBufferToWritePresentationTimeUs = bufferPresentationTimeUs;
} }
return false; return false;
@ -780,6 +809,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void renderToEndOfStream() throws ExoPlaybackException { protected void renderToEndOfStream() throws ExoPlaybackException {
try { try {
audioSink.playToEndOfStream(); audioSink.playToEndOfStream();
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
}
} catch (AudioSink.WriteException e) { } catch (AudioSink.WriteException e) {
throw createRendererException( throw createRendererException(
e, e,

View File

@ -1706,6 +1706,18 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
return 0; return 0;
} }
/**
* Returns the presentation time of the last buffer in the stream.
*
* <p>If the last buffer has not yet been read off the sample queue then the return value will be
* {@link C#TIME_UNSET}.
*
* @return The presentation time of the last buffer in the stream.
*/
protected long getLastBufferInStreamPresentationTimeUs() {
return lastBufferInStreamPresentationTimeUs;
}
/** /**
* Called when an output buffer is successfully processed. * Called when an output buffer is successfully processed.
* *
@ -2375,12 +2387,23 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
onInputFormatChanged(formatHolder); onInputFormatChanged(formatHolder);
return; return;
case C.RESULT_NOTHING_READ: case C.RESULT_NOTHING_READ:
if (hasReadStreamToEnd()) {
// Notify output queue of the last buffer's timestamp.
lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
}
return; return;
case C.RESULT_BUFFER_READ: case C.RESULT_BUFFER_READ:
if (bypassSampleBuffer.isEndOfStream()) { if (bypassSampleBuffer.isEndOfStream()) {
inputStreamEnded = true; inputStreamEnded = true;
lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
return; return;
} }
largestQueuedPresentationTimeUs =
max(largestQueuedPresentationTimeUs, bypassSampleBuffer.timeUs);
if (hasReadStreamToEnd() || buffer.isLastSample()) {
// Notify output queue of the last buffer's timestamp.
lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
}
if (waitingForFirstSampleInFormat) { if (waitingForFirstSampleInFormat) {
// This is the first buffer in a new format, the output format must be updated. // This is the first buffer in a new format, the output format must be updated.
outputFormat = checkNotNull(inputFormat); outputFormat = checkNotNull(inputFormat);

View File

@ -24,6 +24,7 @@ import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.longThat;
import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@ -39,6 +40,7 @@ import androidx.media3.common.C;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackException;
import androidx.media3.common.PlaybackParameters;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.exoplayer.ExoPlaybackException; import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.RendererCapabilities; import androidx.media3.exoplayer.RendererCapabilities;
@ -51,6 +53,7 @@ import androidx.media3.exoplayer.mediacodec.MediaCodecInfo;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector; import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.source.MediaSource; import androidx.media3.exoplayer.source.MediaSource;
import androidx.media3.exoplayer.upstream.DefaultAllocator; import androidx.media3.exoplayer.upstream.DefaultAllocator;
import androidx.media3.test.utils.FakeClock;
import androidx.media3.test.utils.FakeSampleStream; import androidx.media3.test.utils.FakeSampleStream;
import androidx.media3.test.utils.TestUtil; import androidx.media3.test.utils.TestUtil;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
@ -717,6 +720,158 @@ public class MediaCodecAudioRendererTest {
verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132); verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132);
} }
@Test
public void getDurationToProgressUs_withAudioSinkBuffersFull_returnsCalculatedDuration()
throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(75_000L);
}
@Test
public void
getDurationToProgressUs_withAudioSinkBuffersFullAndDoublePlaybackSpeed_returnsCalculatedDuration()
throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
PlaybackParameters playbackParametersWithDoubleSpeed =
new PlaybackParameters(/* speed= */ 2.0f);
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(37_500L);
}
@Test
public void
getDurationToProgressUs_withAudioSinkBuffersFullAndPlaybackAdvancement_returnsCalculatedDuration()
throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
/* eventHandler= */ new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink);
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, fakeClock);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
mediaCodecAudioRenderer.start();
long rendererPositionElapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
mediaCodecAudioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
mediaCodecAudioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
// Simulate playback progressing between render() and getDurationToProgressUs call
fakeClock.advanceTime(/* timeDiffMs= */ 10);
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
assertThat(durationToProgressUs).isEqualTo(65_000L);
}
private static Format getAudioSinkFormat(Format inputFormat) { private static Format getAudioSinkFormat(Format inputFormat) {
return new Format.Builder() return new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_RAW) .setSampleMimeType(MimeTypes.AUDIO_RAW)