Allow writing multiple frame in a buffer

Currently only one access unit can be written per
buffer write. This has been found to be power
inefficient in an offload situation.

#exo-offload

PiperOrigin-RevId: 294886188
This commit is contained in:
krocard 2020-02-13 12:35:41 +00:00 committed by Oliver Woodman
parent 5949cbecc3
commit ebce903aaf
9 changed files with 122 additions and 26 deletions

View File

@ -27,6 +27,9 @@
* Add `DataSpec.Builder` and deprecate most `DataSpec` constructors.
* Add `DataSpec.customData` to allow applications to pass custom data through
`DataSource` chains.
* Move the handling of encoded buffers in audio passthrough from
`AudioSink.handleBuffer` to `AudioSink.handleEncodedBuffer` to allow
passing multiple encoded frames in one buffer.
* Text:
* Parse `<ruby>` and `<rt>` tags in WebVTT subtitles (rendering is coming
later).

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer2.audio;
import android.media.AudioTrack;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
@ -222,7 +223,7 @@ public interface AudioSink {
void handleDiscontinuity();
/**
* Attempts to process data from a {@link ByteBuffer}, starting from its current position and
* Attempts to process PCM data from a {@link ByteBuffer}, starting from its current position and
* ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
* number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
* {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
@ -232,6 +233,9 @@ public interface AudioSink {
* except in the case of an intervening call to {@link #flush()} (or to {@link #configure(int,
* int, int, int, int[], int, int)} that causes the sink to be flushed).
*
* <p>For encoded data (eg in passthrough), use {@link #handleEncodedBuffer(ByteBuffer, long,
* int)}.
*
* @param buffer The buffer containing audio data.
* @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
* @return Whether the buffer was handled fully.
@ -241,6 +245,27 @@ public interface AudioSink {
boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException;
/**
* Attempts to process data from a {@link ByteBuffer}, starting from its current position and
* ending at its limit (exclusive).
*
* <p>This method is the same as {@link #handleBuffer(ByteBuffer, long)} for encoded (non PCM)
* audio. The only difference is that it requires to pass the number of audio encoded access
* unites (frames) in the buffer. This is used in passthrough mode.
*
* @param buffer The buffer containing audio data.
* @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
* @param accessUnitCount The number of encoded access units in the buffer.
* @return Whether the buffer was handled fully.
* @throws InitializationException If an error occurs initializing the sink.
* @throws WriteException If an error occurs writing the audio data.
*/
default boolean handleEncodedBuffer(
ByteBuffer buffer, long presentationTimeUs, @IntRange(from = 1) int accessUnitCount)
throws InitializationException, WriteException {
throw new UnsupportedOperationException();
}
/**
* Processes any remaining data. {@link #isEnded()} will return {@code true} when no data remains.
*

View File

@ -260,6 +260,7 @@ public final class DefaultAudioSink implements AudioSink {
private ByteBuffer[] outputBuffers;
@Nullable private ByteBuffer inputBuffer;
@Nullable private ByteBuffer outputBuffer;
int outputBufferEncodedAccessUnitCount;
private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex;
@ -559,13 +560,28 @@ public final class DefaultAudioSink implements AudioSink {
}
@Override
@SuppressWarnings("ReferenceEquality")
public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException {
Assertions.checkArgument(configuration.isInputPcm);
return handleBufferInternal(buffer, presentationTimeUs, /* encodedAccessUnitCount= */ 0);
}
@Override
public boolean handleEncodedBuffer(
ByteBuffer buffer, long presentationTimeUs, int accessUnitCount)
throws InitializationException, WriteException {
Assertions.checkArgument(!configuration.isInputPcm);
return handleBufferInternal(buffer, presentationTimeUs, accessUnitCount);
}
@SuppressWarnings("ReferenceEquality")
private boolean handleBufferInternal(
ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount)
throws InitializationException, WriteException {
Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
if (pendingConfiguration != null) {
if (!drainAudioProcessorsToEndOfStream()) {
if (!drainToEndOfStream()) {
// There's still pending data in audio processors to write to the track.
return false;
} else if (!pendingConfiguration.canReuseAudioTrack(configuration)) {
@ -615,7 +631,7 @@ public final class DefaultAudioSink implements AudioSink {
}
if (afterDrainPlaybackParameters != null) {
if (!drainAudioProcessorsToEndOfStream()) {
if (!drainToEndOfStream()) {
// Don't process any more input until draining completes.
return false;
}
@ -641,7 +657,7 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeUsNeedsSync = true;
}
if (startMediaTimeUsNeedsSync) {
if (!drainAudioProcessorsToEndOfStream()) {
if (!drainToEndOfStream()) {
// Don't update timing until pending AudioProcessor buffers are completely drained.
return false;
}
@ -660,16 +676,16 @@ public final class DefaultAudioSink implements AudioSink {
if (configuration.isInputPcm) {
submittedPcmBytes += buffer.remaining();
} else {
submittedEncodedFrames += framesPerEncodedSample;
submittedEncodedFrames += framesPerEncodedSample * encodedAccessUnitCount;
}
inputBuffer = buffer;
}
if (configuration.processingEnabled) {
processBuffers(presentationTimeUs);
processBuffers(presentationTimeUs, encodedAccessUnitCount);
} else {
writeBuffer(inputBuffer, presentationTimeUs);
writeBuffer(inputBuffer, presentationTimeUs, encodedAccessUnitCount);
}
if (!inputBuffer.hasRemaining()) {
@ -686,14 +702,15 @@ public final class DefaultAudioSink implements AudioSink {
return false;
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
private void processBuffers(long avSyncPresentationTimeUs, int encodedAccessUnitCount)
throws WriteException {
int count = activeAudioProcessors.length;
int index = count;
while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1]
: (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs);
writeBuffer(input, avSyncPresentationTimeUs, encodedAccessUnitCount);
} else {
AudioProcessor audioProcessor = activeAudioProcessors[index];
audioProcessor.queueInput(input);
@ -717,7 +734,9 @@ public final class DefaultAudioSink implements AudioSink {
}
@SuppressWarnings("ReferenceEquality")
private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException {
private void writeBuffer(
ByteBuffer buffer, long avSyncPresentationTimeUs, int encodedAccessUnitCount)
throws WriteException {
if (!buffer.hasRemaining()) {
return;
}
@ -725,6 +744,7 @@ public final class DefaultAudioSink implements AudioSink {
Assertions.checkArgument(outputBuffer == buffer);
} else {
outputBuffer = buffer;
outputBufferEncodedAccessUnitCount = encodedAccessUnitCount;
if (Util.SDK_INT < 21) {
int bytesRemaining = buffer.remaining();
if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
@ -768,21 +788,22 @@ public final class DefaultAudioSink implements AudioSink {
}
if (bytesWritten == bytesRemaining) {
if (!configuration.isInputPcm) {
writtenEncodedFrames += framesPerEncodedSample;
writtenEncodedFrames += framesPerEncodedSample * encodedAccessUnitCount;
}
outputBuffer = null;
outputBufferEncodedAccessUnitCount = 0;
}
}
@Override
public void playToEndOfStream() throws WriteException {
if (!handledEndOfStream && isInitialized() && drainAudioProcessorsToEndOfStream()) {
if (!handledEndOfStream && isInitialized() && drainToEndOfStream()) {
playPendingData();
handledEndOfStream = true;
}
}
private boolean drainAudioProcessorsToEndOfStream() throws WriteException {
private boolean drainToEndOfStream() throws WriteException {
boolean audioProcessorNeedsEndOfStream = false;
if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingAudioProcessorIndex =
@ -794,7 +815,8 @@ public final class DefaultAudioSink implements AudioSink {
if (audioProcessorNeedsEndOfStream) {
audioProcessor.queueEndOfStream();
}
processBuffers(C.TIME_UNSET);
// audio is always PCM in audio processors, thus there is no encoded access unit count
processBuffers(C.TIME_UNSET, /* encodedAccessUnitCount= */ 0);
if (!audioProcessor.isEnded()) {
return false;
}
@ -804,7 +826,7 @@ public final class DefaultAudioSink implements AudioSink {
// Finish writing any remaining output to the track.
if (outputBuffer != null) {
writeBuffer(outputBuffer, C.TIME_UNSET);
writeBuffer(outputBuffer, C.TIME_UNSET, outputBufferEncodedAccessUnitCount);
if (outputBuffer != null) {
return false;
}
@ -957,6 +979,7 @@ public final class DefaultAudioSink implements AudioSink {
flushAudioProcessors();
inputBuffer = null;
outputBuffer = null;
outputBufferEncodedAccessUnitCount = 0;
stoppedAudioTrack = false;
handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET;

View File

@ -79,6 +79,13 @@ public class ForwardingAudioSink implements AudioSink {
return sink.handleBuffer(buffer, presentationTimeUs);
}
@Override
public boolean handleEncodedBuffer(
ByteBuffer buffer, long presentationTimeUs, int accessUnitCount)
throws InitializationException, WriteException {
return sink.handleEncodedBuffer(buffer, presentationTimeUs, accessUnitCount);
}
@Override
public void playToEndOfStream() throws WriteException {
sink.playToEndOfStream();

View File

@ -90,6 +90,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
@C.Encoding private int audioSinkEncoding;
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
@ -588,6 +590,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
// TODO(internal: b/145658993) Use outputFormat instead.
throw createRendererException(e, inputFormat);
}
audioSinkEncoding = encoding;
}
/**
@ -751,6 +754,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
ByteBuffer buffer,
int bufferIndex,
int bufferFlags,
int sampleCount,
long bufferPresentationTimeUs,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
@ -776,16 +780,25 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true;
}
boolean fullyConsumed;
try {
if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
if (Util.isEncodingLinearPcm(audioSinkEncoding)) {
fullyConsumed = audioSink.handleBuffer(buffer, bufferPresentationTimeUs);
} else {
fullyConsumed =
audioSink.handleEncodedBuffer(buffer, bufferPresentationTimeUs, sampleCount);
}
} catch (AudioSink.InitializationException | AudioSink.WriteException e) {
// TODO(internal: b/145658993) Use outputFormat instead.
throw createRendererException(e, inputFormat);
}
if (fullyConsumed) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
return false;
}

View File

@ -1738,6 +1738,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputBuffer,
outputIndex,
outputBufferInfo.flags,
/* sampleCount= */ 1,
outputBufferInfo.presentationTimeUs,
isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
@ -1759,6 +1760,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputBuffer,
outputIndex,
outputBufferInfo.flags,
/* sampleCount= */ 1,
outputBufferInfo.presentationTimeUs,
isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
@ -1826,6 +1828,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
* @param buffer The output buffer to process.
* @param bufferIndex The index of the output buffer.
* @param bufferFlags The flags attached to the output buffer.
* @param sampleCount The number of samples extracted from the sample queue in the buffer. This
* allows handling multiple samples as a batch for efficiency.
* @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds.
* @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY}
* by the source.
@ -1841,6 +1845,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
ByteBuffer buffer,
int bufferIndex,
int bufferFlags,
int sampleCount,
long bufferPresentationTimeUs,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
@ -1897,7 +1902,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
/**
* Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link
* #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, boolean,
* #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, int, long, boolean, boolean,
* Format)} to get the playback position with respect to the media.
*/
protected final long getOutputStreamOffsetUs() {

View File

@ -790,6 +790,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
ByteBuffer buffer,
int bufferIndex,
int bufferFlags,
int sampleCount,
long bufferPresentationTimeUs,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,

View File

@ -155,10 +155,11 @@ import java.util.ArrayList;
protected boolean processOutputBuffer(
long positionUs,
long elapsedRealtimeUs,
MediaCodec codec,
@Nullable MediaCodec codec,
ByteBuffer buffer,
int bufferIndex,
int bufferFlags,
int sampleCount,
long bufferPresentationTimeUs,
boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
@ -177,6 +178,7 @@ import java.util.ArrayList;
buffer,
bufferIndex,
bufferFlags,
sampleCount,
bufferPresentationTimeUs,
isDecodeOnlyBuffer,
isLastBuffer,

View File

@ -80,9 +80,26 @@ public final class CapturingAudioSink extends ForwardingAudioSink implements Dum
}
@Override
@SuppressWarnings("ReferenceEquality")
public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException {
interceptBuffer(buffer, presentationTimeUs);
boolean fullyConsumed = super.handleBuffer(buffer, presentationTimeUs);
updateCurrentBuffer(fullyConsumed);
return fullyConsumed;
}
@Override
public boolean handleEncodedBuffer(
ByteBuffer buffer, long presentationTimeUs, int accessUnitCount)
throws InitializationException, WriteException {
interceptBuffer(buffer, presentationTimeUs);
boolean fullyConsumed = super.handleEncodedBuffer(buffer, presentationTimeUs, accessUnitCount);
updateCurrentBuffer(fullyConsumed);
return fullyConsumed;
}
@SuppressWarnings("ReferenceEquality")
private void interceptBuffer(ByteBuffer buffer, long presentationTimeUs) {
// handleBuffer is called repeatedly with the same buffer until it's been fully consumed by the
// sink. We only want to dump each buffer once, and we need to do so before the sink being
// forwarded to has a chance to modify its position.
@ -90,13 +107,13 @@ public final class CapturingAudioSink extends ForwardingAudioSink implements Dum
interceptedData.add(new DumpableBuffer(buffer, presentationTimeUs));
currentBuffer = buffer;
}
boolean fullyConsumed = super.handleBuffer(buffer, presentationTimeUs);
}
private void updateCurrentBuffer(boolean fullyConsumed) {
if (fullyConsumed) {
currentBuffer = null;
}
return fullyConsumed;
}
@Override
public void flush() {
currentBuffer = null;