Add AudioSink#setOffloadDelayPadding to offload play gapless Opus

Created new method `AudioSink#setOffloadDelayPadding` that will set delay and padding data onto an `AudioTrack` using `AudioTrack#setOffloadDelayPadding`. This feature adds support for offloaded, gapless Opus playback as the content requires the capability to set padding data post-`AudioSink#configure`.

PiperOrigin-RevId: 562518193
This commit is contained in:
michaelkatz 2023-09-04 04:00:01 -07:00 committed by Copybara-Service
parent 2bdda35731
commit 9d58788d86
8 changed files with 139 additions and 11 deletions

View File

@ -20,7 +20,7 @@
* Track Selection:
* Extractors:
* Audio:
* Audio Offload:
* Add support for Opus gapless metadata during offload playback.
* Video:
* Text:
* Metadata:

View File

@ -523,11 +523,18 @@ public interface AudioSink {
/**
* Sets audio offload mode, if possible. Enabling offload is only possible if the sink is based on
* a platform {@link AudioTrack}, and requires platform API version 29 onwards.
*
* @throws IllegalStateException Thrown if enabling offload on platform API version < 29.
*/
@RequiresApi(29)
default void setOffloadMode(@OffloadMode int offloadMode) {}
/**
* Sets offload delay padding on the {@link AudioTrack}, if possible. Setting the offload delay
* padding is only possible if the sink is based on a platform {@link AudioTrack} in offload mode.
* Also requires platform API version 29 onwards.
*/
@RequiresApi(29)
default void setOffloadDelayPadding(int delayInFrames, int paddingInFrames) {}
/**
* Sets the playback volume.
*

View File

@ -1356,12 +1356,24 @@ public final class DefaultAudioSink implements AudioSink {
}
}
@RequiresApi(29)
@Override
public void setOffloadMode(@OffloadMode int offloadMode) {
Assertions.checkState(Util.SDK_INT >= 29);
this.offloadMode = offloadMode;
}
@RequiresApi(29)
@Override
public void setOffloadDelayPadding(int delayInFrames, int paddingInFrames) {
if (audioTrack != null
&& isOffloadedPlayback(audioTrack)
&& configuration != null
&& configuration.enableOffloadGapless) {
audioTrack.setOffloadDelayPadding(delayInFrames, paddingInFrames);
}
}
@Override
public void setVolume(float volume) {
if (this.volume != volume) {

View File

@ -173,10 +173,17 @@ public class ForwardingAudioSink implements AudioSink {
}
@Override
@RequiresApi(29)
public void setOffloadMode(@OffloadMode int offloadMode) {
sink.setOffloadMode(offloadMode);
}
@Override
@RequiresApi(29)
public void setOffloadDelayPadding(int delayInFrames, int paddingInFrames) {
sink.setOffloadDelayPadding(delayInFrames, paddingInFrames);
}
@Override
public void setVolume(float volume) {
sink.setVolume(volume);

View File

@ -45,6 +45,7 @@ import androidx.media3.common.util.Log;
import androidx.media3.common.util.MediaFormatUtil;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.exoplayer.DecoderReuseEvaluation;
import androidx.media3.exoplayer.DecoderReuseEvaluation.DecoderDiscardReasons;
import androidx.media3.exoplayer.ExoPlaybackException;
@ -64,7 +65,9 @@ import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil.DecoderQueryException;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.Objects;
/**
* Decodes and renders audio using {@link MediaCodec} and an {@link AudioSink}.
@ -820,6 +823,22 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
}
@Override
protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) {
if (Util.SDK_INT >= 29
&& buffer.format != null
&& Objects.equals(buffer.format.sampleMimeType, MimeTypes.AUDIO_OPUS)
&& isBypassEnabled()) {
ByteBuffer data = checkNotNull(buffer.supplementalData);
int preSkip = checkNotNull(buffer.format).encoderDelay;
if (data.remaining() == 8) {
int discardSamples =
(int) ((data.order(ByteOrder.LITTLE_ENDIAN).getLong() * 48_000L) / C.NANOS_PER_SECOND);
audioSink.setOffloadDelayPadding(preSkip, discardSamples);
}
}
}
/**
* Returns a maximum input size suitable for configuring a codec for {@code format} in a way that
* will allow possible adaptation to other compatible formats in {@code streamFormats}.

View File

@ -77,6 +77,7 @@ import androidx.media3.exoplayer.source.MediaPeriod;
import androidx.media3.exoplayer.source.SampleStream;
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
import androidx.media3.exoplayer.source.SampleStream.ReadFlags;
import androidx.media3.extractor.OpusUtil;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@ -85,6 +86,7 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayDeque;
import java.util.List;
import java.util.Objects;
/** An abstract renderer that uses {@link MediaCodec} to decode samples for rendering. */
//
@ -2334,16 +2336,28 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
if (waitingForFirstSampleInFormat) {
// This is the first buffer in a new format, the output format must be updated.
outputFormat = checkNotNull(inputFormat);
if (Objects.equals(outputFormat.sampleMimeType, MimeTypes.AUDIO_OPUS)
&& !outputFormat.initializationData.isEmpty()) {
// Format mimetype is Opus so format should be updated with preSkip data.
// TODO(b/298634018): Adjust encoderDelay value based on starting position.
int numberPreSkipSamples =
OpusUtil.getPreSkipSamples(outputFormat.initializationData.get(0));
outputFormat = outputFormat.buildUpon().setEncoderDelay(numberPreSkipSamples).build();
}
onOutputFormatChanged(outputFormat, /* mediaFormat= */ null);
waitingForFirstSampleInFormat = false;
}
// Try to append the buffer to the batch buffer.
bypassSampleBuffer.flip();
if (inputFormat != null
&& inputFormat.sampleMimeType != null
&& inputFormat.sampleMimeType.equals(MimeTypes.AUDIO_OPUS)) {
oggOpusAudioPacketizer.packetize(bypassSampleBuffer, inputFormat.initializationData);
if (outputFormat != null
&& Objects.equals(outputFormat.sampleMimeType, MimeTypes.AUDIO_OPUS)) {
if (bypassSampleBuffer.hasSupplementalData()) {
// Set format on sample buffer so that it contains the mimetype and encodingDelay.
bypassSampleBuffer.format = outputFormat;
handleInputBufferSupplementalData(bypassSampleBuffer);
}
oggOpusAudioPacketizer.packetize(bypassSampleBuffer, outputFormat.initializationData);
}
if (!haveBypassBatchBufferAndNewSampleSameDecodeOnlyState()
|| !bypassBatchBuffer.append(bypassSampleBuffer)) {

View File

@ -18,6 +18,7 @@ package androidx.media3.exoplayer.audio;
import static androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem.END_OF_STREAM_ITEM;
import static androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem.format;
import static androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem.oneByteSample;
import static androidx.media3.test.utils.FakeSampleStream.FakeSampleStreamItem.sample;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any;
@ -50,6 +51,7 @@ import androidx.media3.exoplayer.mediacodec.MediaCodecInfo;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.upstream.DefaultAllocator;
import androidx.media3.test.utils.FakeSampleStream;
import androidx.media3.test.utils.TestUtil;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
@ -63,12 +65,21 @@ import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import org.robolectric.annotation.Config;
/** Unit tests for {@link MediaCodecAudioRenderer} */
@RunWith(AndroidJUnit4.class)
public class MediaCodecAudioRendererTest {
@Rule public final MockitoRule mockito = MockitoJUnit.rule();
/** Payload for Ogg ID Header Page in accordance with RFC 7845. */
private static final byte[] OGG_OPUS_ID_HEADER_PAYLOAD =
TestUtil.createByteArray(
0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64, 0x1, 0x2, 0x38, 0x1, 0x80, 0xBB, 0x0, 0x0,
0x0, 0x0, 0x0);
private static final byte[] SAMPLE_ONE_BYTE_WITH_SUPPLEMENTAL_DATA =
TestUtil.createByteArray(0x0, 0x0, 0x0, 0x1, 0x64, 0xA, 0x2C, 0x2A, 0x0, 0x0, 0x0, 0x0, 0x0);
private static final Format AUDIO_AAC =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
@ -78,11 +89,23 @@ public class MediaCodecAudioRendererTest {
.setEncoderDelay(100)
.setEncoderPadding(150)
.build();
private static final Format AUDIO_OPUS =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_OPUS)
.setChannelCount(2)
.setSampleRate(48000)
.setInitializationData(ImmutableList.of(OGG_OPUS_ID_HEADER_PAYLOAD))
.build();
private static final AudioOffloadSupport AUDIO_OFFLOAD_SUPPORTED_GAPLESS_NOT_SUPPORTED =
new AudioOffloadSupport.Builder()
.setIsFormatSupported(true)
.setIsGaplessSupported(false)
.build();
private static final AudioOffloadSupport AUDIO_OFFLOAD_SUPPORTED_GAPLESS_SUPPORTED =
new AudioOffloadSupport.Builder()
.setIsFormatSupported(true)
.setIsGaplessSupported(true)
.build();
private static final RendererConfiguration
RENDERER_CONFIGURATION_OFFLOAD_ENABLED_GAPLESS_REQUIRED =
new RendererConfiguration(
@ -639,6 +662,46 @@ public class MediaCodecAudioRendererTest {
verify(audioSink).setOffloadMode(AudioSink.OFFLOAD_MODE_DISABLED);
}
@Test
@Config(minSdk = 30)
public void render_offloadedOpusWithPadding_callsSetOffloadDelayPadding() throws Exception {
when(audioSink.getFormatOffloadSupport(any()))
.thenReturn(AUDIO_OFFLOAD_SUPPORTED_GAPLESS_SUPPORTED);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_OPUS,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100, C.BUFFER_FLAG_KEY_FRAME),
sample(
/* timeUs= */ 150,
C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA,
SAMPLE_ONE_BYTE_WITH_SUPPLEMENTAL_DATA),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RENDERER_CONFIGURATION_OFFLOAD_ENABLED_GAPLESS_REQUIRED,
new Format[] {AUDIO_OPUS},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0);
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!mediaCodecAudioRenderer.isEnded()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, /* elapsedRealtimeUs= */ 0);
}
verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132);
}
private static Format getAudioSinkFormat(Format inputFormat) {
return new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_RAW)

View File

@ -139,6 +139,16 @@ public class OpusUtil {
return getPacketDurationUs(buffer[0], buffer.length > 1 ? buffer[1] : 0);
}
/**
* Returns the number of pre-skip samples specified by the given Opus codec initialization data.
*
* @param header The Opus Identification header.
* @return The number of pre-skip samples.
*/
public static int getPreSkipSamples(byte[] header) {
return ((header[11] & 0xFF) << 8) | (header[10] & 0xFF);
}
private static long getPacketDurationUs(byte packetByte0, byte packetByte1) {
// See RFC6716, Sections 3.1 and 3.2.
int toc = packetByte0 & 0xFF;
@ -171,10 +181,6 @@ public class OpusUtil {
return (long) frames * frameDurationUs;
}
private static int getPreSkipSamples(byte[] header) {
return ((header[11] & 0xFF) << 8) | (header[10] & 0xFF);
}
private static byte[] buildNativeOrderByteArray(long value) {
return ByteBuffer.allocate(8).order(ByteOrder.nativeOrder()).putLong(value).array();
}