Separate TransformerAudioRenderer and new AudioSamplePipeline.

`TransformerAudioRenderer` reads input and passes `DecoderInputBuffer`s
to the `AudioSamplePipeline`. The `AudioSamplePipeline` handles all
steps from decoding to encoding. `TransformerAudioRenderer` receives
`DecoderInputBuffer`s from the `AudioSamplePipeline` and passes their
data to the muxer.

`AudioSamplePipeline` implements a new interface `SamplePipeline`.
A pass-through pipeline will be added in a future cl.

PiperOrigin-RevId: 407555102
This commit is contained in:
hschlueter 2021-11-04 12:06:10 +00:00 committed by Ian Baker
parent 49271ec74f
commit b87f26490b
5 changed files with 612 additions and 373 deletions

View File

@ -285,30 +285,6 @@ sample:
size = 230
isKeyFrame = true
presentationTimeUs = 67627
sample:
trackIndex = 1
dataHashCode = -1830836678
size = 1051
isKeyFrame = false
presentationTimeUs = 500500
sample:
trackIndex = 1
dataHashCode = 1767407540
size = 874
isKeyFrame = false
presentationTimeUs = 467133
sample:
trackIndex = 1
dataHashCode = 918440283
size = 781
isKeyFrame = false
presentationTimeUs = 533866
sample:
trackIndex = 1
dataHashCode = -1408463661
size = 4725
isKeyFrame = false
presentationTimeUs = 700700
sample:
trackIndex = 0
dataHashCode = -997198863
@ -399,6 +375,30 @@ sample:
size = 6
isKeyFrame = true
presentationTimeUs = 107644
sample:
trackIndex = 1
dataHashCode = -1830836678
size = 1051
isKeyFrame = false
presentationTimeUs = 500500
sample:
trackIndex = 1
dataHashCode = 1767407540
size = 874
isKeyFrame = false
presentationTimeUs = 467133
sample:
trackIndex = 1
dataHashCode = 918440283
size = 781
isKeyFrame = false
presentationTimeUs = 533866
sample:
trackIndex = 1
dataHashCode = -1408463661
size = 4725
isKeyFrame = false
presentationTimeUs = 700700
sample:
trackIndex = 1
dataHashCode = 1569455924

View File

@ -132,64 +132,148 @@ sample:
presentationTimeUs = 0
sample:
trackIndex = 0
dataHashCode = -833872563
size = 1732
dataHashCode = 1000136444
size = 140
isKeyFrame = true
presentationTimeUs = 416
sample:
trackIndex = 0
dataHashCode = -135901925
size = 380
dataHashCode = 217961709
size = 172
isKeyFrame = true
presentationTimeUs = 36499
presentationTimeUs = 3332
sample:
trackIndex = 0
dataHashCode = -879376936
size = 176
isKeyFrame = true
presentationTimeUs = 6915
sample:
trackIndex = 0
dataHashCode = 1259979587
size = 192
isKeyFrame = true
presentationTimeUs = 10581
sample:
trackIndex = 0
dataHashCode = 907407225
size = 188
isKeyFrame = true
presentationTimeUs = 14581
sample:
trackIndex = 0
dataHashCode = -904354707
size = 176
isKeyFrame = true
presentationTimeUs = 18497
sample:
trackIndex = 0
dataHashCode = 1001385853
size = 172
isKeyFrame = true
presentationTimeUs = 22163
sample:
trackIndex = 0
dataHashCode = 1545716086
size = 196
isKeyFrame = true
presentationTimeUs = 25746
sample:
trackIndex = 0
dataHashCode = 358710839
size = 180
isKeyFrame = true
presentationTimeUs = 29829
sample:
trackIndex = 0
dataHashCode = -671124798
size = 140
isKeyFrame = true
presentationTimeUs = 33579
sample:
trackIndex = 0
dataHashCode = -945404910
size = 120
isKeyFrame = true
presentationTimeUs = 36495
sample:
trackIndex = 0
dataHashCode = 1881048379
size = 88
isKeyFrame = true
presentationTimeUs = 38995
sample:
trackIndex = 0
dataHashCode = 1059579897
size = 88
isKeyFrame = true
presentationTimeUs = 40828
sample:
trackIndex = 0
dataHashCode = 1496098648
size = 84
isKeyFrame = true
presentationTimeUs = 42661
sample:
trackIndex = 0
dataHashCode = 250093960
size = 751
isKeyFrame = true
presentationTimeUs = 44415
presentationTimeUs = 44411
sample:
trackIndex = 0
dataHashCode = 1895536226
size = 1045
isKeyFrame = true
presentationTimeUs = 59998
presentationTimeUs = 59994
sample:
trackIndex = 0
dataHashCode = 1723596464
size = 947
isKeyFrame = true
presentationTimeUs = 81748
presentationTimeUs = 81744
sample:
trackIndex = 0
dataHashCode = -978803114
size = 946
isKeyFrame = true
presentationTimeUs = 101414
presentationTimeUs = 101410
sample:
trackIndex = 0
dataHashCode = 387377078
size = 946
isKeyFrame = true
presentationTimeUs = 121080
presentationTimeUs = 121076
sample:
trackIndex = 0
dataHashCode = -132658698
size = 901
isKeyFrame = true
presentationTimeUs = 140746
presentationTimeUs = 140742
sample:
trackIndex = 0
dataHashCode = 1495036471
size = 899
isKeyFrame = true
presentationTimeUs = 159496
presentationTimeUs = 159492
sample:
trackIndex = 0
dataHashCode = 304440590
size = 878
isKeyFrame = true
presentationTimeUs = 178162
presentationTimeUs = 178158
sample:
trackIndex = 0
dataHashCode = -1955900344
size = 112
isKeyFrame = true
presentationTimeUs = 196408
sample:
trackIndex = 0
dataHashCode = 88896626
size = 116
isKeyFrame = true
presentationTimeUs = 198741
sample:
trackIndex = 1
dataHashCode = 2139021989
@ -214,12 +298,6 @@ sample:
size = 1193
isKeyFrame = false
presentationTimeUs = 734083
sample:
trackIndex = 0
dataHashCode = -752661703
size = 228
isKeyFrame = true
presentationTimeUs = 196412
sample:
trackIndex = 1
dataHashCode = -1554795381

View File

@ -0,0 +1,373 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static java.lang.Math.min;
import android.media.MediaCodec.BufferInfo;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.PlaybackException;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.audio.AudioProcessor;
import androidx.media3.exoplayer.audio.AudioProcessor.AudioFormat;
import androidx.media3.exoplayer.audio.SonicAudioProcessor;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* Pipeline to decode audio samples, apply transformations on the raw samples, and re-encode them.
*/
@RequiresApi(18)
/* package */ final class AudioSamplePipeline implements SamplePipeline {
private static final String TAG = "AudioSamplePipeline";
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
private final MediaCodecAdapterWrapper decoder;
private final Format decoderInputFormat;
private final DecoderInputBuffer decoderInputBuffer;
private final SonicAudioProcessor sonicAudioProcessor;
private final SpeedProvider speedProvider;
private final DecoderInputBuffer encoderInputBuffer;
private final DecoderInputBuffer encoderOutputBuffer;
private final Transformation transformation;
private final int rendererIndex;
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
private @MonotonicNonNull MediaCodecAdapterWrapper encoder;
private long nextEncoderInputBufferTimeUs;
private ByteBuffer sonicOutputBuffer;
private boolean drainingSonicForSpeedChange;
private float currentSpeed;
public AudioSamplePipeline(
Format decoderInputFormat, Transformation transformation, int rendererIndex)
throws ExoPlaybackException {
this.decoderInputFormat = decoderInputFormat;
this.transformation = transformation;
this.rendererIndex = rendererIndex;
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderOutputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
sonicAudioProcessor = new SonicAudioProcessor();
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
currentSpeed = speedProvider.getSpeed(0);
try {
this.decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
} catch (IOException e) {
// TODO (internal b/184262323): Assign an adequate error code.
throw ExoPlaybackException.createForRenderer(
e,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
@Override
public void release() {
sonicAudioProcessor.reset();
decoder.release();
if (encoder != null) {
encoder.release();
}
}
@Override
public boolean processData() throws ExoPlaybackException {
if (!ensureEncoderAndAudioProcessingConfigured()) {
return false;
}
if (sonicAudioProcessor.isActive()) {
return feedEncoderFromSonic() || feedSonicFromDecoder();
} else {
return feedEncoderFromDecoder();
}
}
@Override
@Nullable
public DecoderInputBuffer dequeueInputBuffer() {
return decoder.maybeDequeueInputBuffer(decoderInputBuffer) ? decoderInputBuffer : null;
}
@Override
public void queueInputBuffer() {
decoder.queueInputBuffer(decoderInputBuffer);
}
@Override
@Nullable
public Format getOutputFormat() {
return encoder != null ? encoder.getOutputFormat() : null;
}
@Override
public boolean isEnded() {
return encoder != null && encoder.isEnded();
}
@Override
@Nullable
public DecoderInputBuffer getOutputBuffer() {
if (encoder != null) {
encoderOutputBuffer.data = encoder.getOutputBuffer();
if (encoderOutputBuffer.data != null) {
encoderOutputBuffer.timeUs = checkNotNull(encoder.getOutputBufferInfo()).presentationTimeUs;
return encoderOutputBuffer;
}
}
return null;
}
@Override
public void releaseOutputBuffer() {
if (encoder != null) {
encoder.releaseOutputBuffer();
}
}
/**
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
* pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
private boolean feedEncoderFromDecoder() {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (decoder.isEnded()) {
queueEndOfStreamToEncoder();
return false;
}
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
flushSonicAndSetSpeed(currentSpeed);
return false;
}
feedEncoder(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
* possible to pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
private boolean feedEncoderFromSonic() {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (!sonicOutputBuffer.hasRemaining()) {
sonicOutputBuffer = sonicAudioProcessor.getOutput();
if (!sonicOutputBuffer.hasRemaining()) {
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
queueEndOfStreamToEncoder();
}
return false;
}
}
feedEncoder(sonicOutputBuffer);
return true;
}
/**
* Attempts to process decoder output data, and returns whether it may be possible to process more
* data immediately by calling this method again.
*/
private boolean feedSonicFromDecoder() {
if (drainingSonicForSpeedChange) {
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
flushSonicAndSetSpeed(currentSpeed);
drainingSonicForSpeedChange = false;
}
return false;
}
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
// there is output still to be processed.
if (sonicOutputBuffer.hasRemaining()) {
return false;
}
if (decoder.isEnded()) {
sonicAudioProcessor.queueEndOfStream();
return false;
}
checkState(!sonicAudioProcessor.isEnded());
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
sonicAudioProcessor.queueEndOfStream();
drainingSonicForSpeedChange = true;
return false;
}
sonicAudioProcessor.queueInput(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Feeds as much data as possible between the current position and limit of the specified {@link
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
*/
@RequiresNonNull({"encoder", "encoderInputAudioFormat"})
private void feedEncoder(ByteBuffer inputBuffer) {
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
int bufferLimit = inputBuffer.limit();
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
encoderInputBufferData.put(inputBuffer);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
nextEncoderInputBufferTimeUs +=
getBufferDurationUs(
/* bytesWritten= */ encoderInputBufferData.position(),
encoderInputAudioFormat.bytesPerFrame,
encoderInputAudioFormat.sampleRate);
encoderInputBuffer.setFlags(0);
encoderInputBuffer.flip();
inputBuffer.limit(bufferLimit);
encoder.queueInputBuffer(encoderInputBuffer);
}
@RequiresNonNull("encoder")
private void queueEndOfStreamToEncoder() {
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
encoderInputBuffer.flip();
// Queuing EOS should only occur with an empty buffer.
encoder.queueInputBuffer(encoderInputBuffer);
}
/**
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
* configured yet, and returns whether they have been configured.
*/
@EnsuresNonNullIf(
expression = {"encoder", "encoderInputAudioFormat"},
result = true)
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
if (encoder != null && encoderInputAudioFormat != null) {
return true;
}
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
AudioFormat outputAudioFormat =
new AudioFormat(
decoderOutputFormat.sampleRate,
decoderOutputFormat.channelCount,
decoderOutputFormat.pcmEncoding);
if (transformation.flattenForSlowMotion) {
try {
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
flushSonicAndSetSpeed(currentSpeed);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
String audioMimeType =
transformation.audioMimeType == null
? decoderInputFormat.sampleMimeType
: transformation.audioMimeType;
try {
encoder =
MediaCodecAdapterWrapper.createForAudioEncoding(
new Format.Builder()
.setSampleMimeType(audioMimeType)
.setSampleRate(outputAudioFormat.sampleRate)
.setChannelCount(outputAudioFormat.channelCount)
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
.build());
} catch (IOException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
encoderInputAudioFormat = outputAudioFormat;
return true;
}
private boolean isSpeedChanging(BufferInfo bufferInfo) {
if (!transformation.flattenForSlowMotion) {
return false;
}
float newSpeed = speedProvider.getSpeed(bufferInfo.presentationTimeUs);
boolean speedChanging = newSpeed != currentSpeed;
currentSpeed = newSpeed;
return speedChanging;
}
private void flushSonicAndSetSpeed(float speed) {
sonicAudioProcessor.setSpeed(speed);
sonicAudioProcessor.setPitch(speed);
sonicAudioProcessor.flush();
}
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
return ExoPlaybackException.createForRenderer(
cause,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
errorCode);
}
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
long framesWritten = bytesWritten / bytesPerFrame;
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
}
}

View File

@ -0,0 +1,69 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import androidx.annotation.Nullable;
import androidx.media3.common.Format;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.exoplayer.ExoPlaybackException;
/**
* Pipeline for processing {@link DecoderInputBuffer DecoderInputBuffers}.
*
* <p>This pipeline can be used to implement transformations of audio or video samples.
*/
/* package */ interface SamplePipeline {
/** Returns a buffer if the pipeline is ready to accept input, and {@code null} otherwise. */
@Nullable
DecoderInputBuffer dequeueInputBuffer();
/**
* Informs the pipeline that its input buffer contains new input.
*
* <p>Should be called after filling the input buffer from {@link #dequeueInputBuffer()} with new
* input.
*/
void queueInputBuffer();
/**
* Process the input data and returns whether more data can be processed by calling this method
* again.
*/
boolean processData() throws ExoPlaybackException;
/** Returns the output format of the pipeline if available, and {@code null} otherwise. */
@Nullable
Format getOutputFormat();
/** Returns an output buffer if the pipeline has produced output, and {@code null} otherwise */
@Nullable
DecoderInputBuffer getOutputBuffer();
/**
* Releases the pipeline's output buffer.
*
* <p>Should be called when the output buffer from {@link #getOutputBuffer()} is no longer needed.
*/
void releaseOutputBuffer();
/** Returns whether the pipeline has ended. */
boolean isEnded();
/** Releases all resources held by the pipeline. */
void release();
}

View File

@ -17,25 +17,16 @@
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static java.lang.Math.min;
import android.media.MediaCodec.BufferInfo;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.PlaybackException;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.FormatHolder;
import androidx.media3.exoplayer.audio.AudioProcessor;
import androidx.media3.exoplayer.audio.AudioProcessor.AudioFormat;
import androidx.media3.exoplayer.audio.SonicAudioProcessor;
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -44,37 +35,18 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/* package */ final class TransformerAudioRenderer extends TransformerBaseRenderer {
private static final String TAG = "TransformerAudioRenderer";
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
private static final float SPEED_UNSET = -1f;
private final DecoderInputBuffer decoderInputBuffer;
private final DecoderInputBuffer encoderInputBuffer;
private final SonicAudioProcessor sonicAudioProcessor;
@Nullable private MediaCodecAdapterWrapper decoder;
@Nullable private MediaCodecAdapterWrapper encoder;
@Nullable private SpeedProvider speedProvider;
private @MonotonicNonNull Format decoderInputFormat;
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
private ByteBuffer sonicOutputBuffer;
private long nextEncoderInputBufferTimeUs;
private float currentSpeed;
private @MonotonicNonNull SamplePipeline samplePipeline;
private boolean muxerWrapperTrackAdded;
private boolean muxerWrapperTrackEnded;
private boolean hasEncoderOutputFormat;
private boolean drainingSonicForSpeedChange;
public TransformerAudioRenderer(
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
super(C.TRACK_TYPE_AUDIO, muxerWrapper, mediaClock, transformation);
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
sonicAudioProcessor = new SonicAudioProcessor();
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
}
@Override
@ -89,201 +61,94 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override
protected void onReset() {
decoderInputBuffer.clear();
decoderInputBuffer.data = null;
encoderInputBuffer.clear();
encoderInputBuffer.data = null;
sonicAudioProcessor.reset();
if (decoder != null) {
decoder.release();
decoder = null;
if (samplePipeline != null) {
samplePipeline.release();
}
if (encoder != null) {
encoder.release();
encoder = null;
}
speedProvider = null;
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
muxerWrapperTrackAdded = false;
muxerWrapperTrackEnded = false;
hasEncoderOutputFormat = false;
drainingSonicForSpeedChange = false;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (!isRendererStarted || isEnded()) {
if (!isRendererStarted || isEnded() || !ensureRendererConfigured()) {
return;
}
if (ensureDecoderConfigured()) {
MediaCodecAdapterWrapper decoder = this.decoder;
if (ensureEncoderAndAudioProcessingConfigured()) {
MediaCodecAdapterWrapper encoder = this.encoder;
while (feedMuxerFromEncoder(encoder)) {}
if (sonicAudioProcessor.isActive()) {
while (feedEncoderFromSonic(decoder, encoder)) {}
while (feedSonicFromDecoder(decoder)) {}
} else {
while (feedEncoderFromDecoder(decoder, encoder)) {}
while (feedMuxerFromPipeline() || samplePipeline.processData() || feedPipelineFromInput()) {}
}
/** Attempts to read the input format and to initialize the sample pipeline. */
@EnsuresNonNullIf(expression = "samplePipeline", result = true)
private boolean ensureRendererConfigured() throws ExoPlaybackException {
if (samplePipeline != null) {
return true;
}
while (feedDecoderFromInput(decoder)) {}
FormatHolder formatHolder = getFormatHolder();
@ReadDataResult
int result = readSource(formatHolder, decoderInputBuffer, /* readFlags= */ FLAG_REQUIRE_FORMAT);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
samplePipeline =
new AudioSamplePipeline(checkNotNull(formatHolder.format), transformation, getIndex());
return true;
}
/**
* Attempts to write encoder output data to the muxer, and returns whether it may be possible to
* write more data immediately by calling this method again.
* Attempts to write sample pipeline output data to the muxer, and returns whether it may be
* possible to write more data immediately by calling this method again.
*/
private boolean feedMuxerFromEncoder(MediaCodecAdapterWrapper encoder) {
if (!hasEncoderOutputFormat) {
@Nullable Format encoderOutputFormat = encoder.getOutputFormat();
if (encoderOutputFormat == null) {
@RequiresNonNull("samplePipeline")
private boolean feedMuxerFromPipeline() {
if (!muxerWrapperTrackAdded) {
@Nullable Format samplePipelineOutputFormat = samplePipeline.getOutputFormat();
if (samplePipelineOutputFormat == null) {
return false;
}
hasEncoderOutputFormat = true;
muxerWrapper.addTrackFormat(encoderOutputFormat);
muxerWrapperTrackAdded = true;
muxerWrapper.addTrackFormat(samplePipelineOutputFormat);
}
if (encoder.isEnded()) {
if (samplePipeline.isEnded()) {
muxerWrapper.endTrack(getTrackType());
muxerWrapperTrackEnded = true;
return false;
}
@Nullable ByteBuffer encoderOutputBuffer = encoder.getOutputBuffer();
if (encoderOutputBuffer == null) {
@Nullable DecoderInputBuffer samplePipelineOutputBuffer = samplePipeline.getOutputBuffer();
if (samplePipelineOutputBuffer == null) {
return false;
}
BufferInfo encoderOutputBufferInfo = checkNotNull(encoder.getOutputBufferInfo());
if (!muxerWrapper.writeSample(
getTrackType(),
encoderOutputBuffer,
samplePipelineOutputBuffer.data,
/* isKeyFrame= */ true,
encoderOutputBufferInfo.presentationTimeUs)) {
samplePipelineOutputBuffer.timeUs)) {
return false;
}
encoder.releaseOutputBuffer();
samplePipeline.releaseOutputBuffer();
return true;
}
/**
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
* Attempts to pass input data to the sample pipeline, and returns whether it may be possible to
* pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private boolean feedEncoderFromDecoder(
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
@RequiresNonNull("samplePipeline")
private boolean feedPipelineFromInput() {
@Nullable DecoderInputBuffer samplePipelineInputBuffer = samplePipeline.dequeueInputBuffer();
if (samplePipelineInputBuffer == null) {
return false;
}
if (decoder.isEnded()) {
queueEndOfStreamToEncoder(encoder);
return false;
}
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
flushSonicAndSetSpeed(currentSpeed);
return false;
}
feedEncoder(encoder, decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
* possible to pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private boolean feedEncoderFromSonic(
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (!sonicOutputBuffer.hasRemaining()) {
sonicOutputBuffer = sonicAudioProcessor.getOutput();
if (!sonicOutputBuffer.hasRemaining()) {
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
queueEndOfStreamToEncoder(encoder);
}
return false;
}
}
feedEncoder(encoder, sonicOutputBuffer);
return true;
}
/**
* Attempts to process decoder output data, and returns whether it may be possible to process more
* data immediately by calling this method again.
*/
private boolean feedSonicFromDecoder(MediaCodecAdapterWrapper decoder) {
if (drainingSonicForSpeedChange) {
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
flushSonicAndSetSpeed(currentSpeed);
drainingSonicForSpeedChange = false;
}
return false;
}
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
// there is output still to be processed.
if (sonicOutputBuffer.hasRemaining()) {
return false;
}
if (decoder.isEnded()) {
sonicAudioProcessor.queueEndOfStream();
return false;
}
checkState(!sonicAudioProcessor.isEnded());
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
sonicAudioProcessor.queueEndOfStream();
drainingSonicForSpeedChange = true;
return false;
}
sonicAudioProcessor.queueInput(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass input data to the decoder, and returns whether it may be possible to pass more
* data immediately by calling this method again.
*/
private boolean feedDecoderFromInput(MediaCodecAdapterWrapper decoder) {
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false;
}
decoderInputBuffer.clear();
@ReadDataResult
int result = readSource(getFormatHolder(), decoderInputBuffer, /* readFlags= */ 0);
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
switch (result) {
case C.RESULT_BUFFER_READ:
mediaClock.updateTimeForTrackType(getTrackType(), decoderInputBuffer.timeUs);
decoderInputBuffer.timeUs -= streamOffsetUs;
decoderInputBuffer.flip();
decoder.queueInputBuffer(decoderInputBuffer);
return !decoderInputBuffer.isEndOfStream();
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
samplePipelineInputBuffer.timeUs -= streamOffsetUs;
samplePipelineInputBuffer.flip();
samplePipeline.queueInputBuffer();
return !samplePipelineInputBuffer.isEndOfStream();
case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ:
@ -291,150 +156,4 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false;
}
}
/**
* Feeds as much data as possible between the current position and limit of the specified {@link
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private void feedEncoder(MediaCodecAdapterWrapper encoder, ByteBuffer inputBuffer) {
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
int bufferLimit = inputBuffer.limit();
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
encoderInputBufferData.put(inputBuffer);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
nextEncoderInputBufferTimeUs +=
getBufferDurationUs(
/* bytesWritten= */ encoderInputBufferData.position(),
encoderInputAudioFormat.bytesPerFrame,
encoderInputAudioFormat.sampleRate);
encoderInputBuffer.setFlags(0);
encoderInputBuffer.flip();
inputBuffer.limit(bufferLimit);
encoder.queueInputBuffer(encoderInputBuffer);
}
private void queueEndOfStreamToEncoder(MediaCodecAdapterWrapper encoder) {
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
encoderInputBuffer.flip();
// Queuing EOS should only occur with an empty buffer.
encoder.queueInputBuffer(encoderInputBuffer);
}
/**
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
* configured yet, and returns whether they have been configured.
*/
@RequiresNonNull({"decoder", "decoderInputFormat"})
@EnsuresNonNullIf(
expression = {"encoder", "encoderInputAudioFormat"},
result = true)
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
if (encoder != null && encoderInputAudioFormat != null) {
return true;
}
MediaCodecAdapterWrapper decoder = this.decoder;
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
AudioFormat outputAudioFormat =
new AudioFormat(
decoderOutputFormat.sampleRate,
decoderOutputFormat.channelCount,
decoderOutputFormat.pcmEncoding);
if (transformation.flattenForSlowMotion) {
try {
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
flushSonicAndSetSpeed(currentSpeed);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
String audioMimeType =
transformation.audioMimeType == null
? decoderInputFormat.sampleMimeType
: transformation.audioMimeType;
try {
encoder =
MediaCodecAdapterWrapper.createForAudioEncoding(
new Format.Builder()
.setSampleMimeType(audioMimeType)
.setSampleRate(outputAudioFormat.sampleRate)
.setChannelCount(outputAudioFormat.channelCount)
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
.build());
} catch (IOException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
encoderInputAudioFormat = outputAudioFormat;
return true;
}
/**
* Attempts to configure the {@link #decoder} if it has not been configured yet, and returns
* whether the decoder has been configured.
*/
@EnsuresNonNullIf(
expression = {"decoderInputFormat", "decoder"},
result = true)
private boolean ensureDecoderConfigured() throws ExoPlaybackException {
if (decoder != null && decoderInputFormat != null) {
return true;
}
FormatHolder formatHolder = getFormatHolder();
@ReadDataResult int result = readSource(formatHolder, decoderInputBuffer, FLAG_REQUIRE_FORMAT);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
decoderInputFormat = checkNotNull(formatHolder.format);
MediaCodecAdapterWrapper decoder;
try {
decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
} catch (IOException e) {
// TODO (internal b/184262323): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
currentSpeed = speedProvider.getSpeed(0);
this.decoder = decoder;
return true;
}
private boolean isSpeedChanging(BufferInfo bufferInfo) {
if (!transformation.flattenForSlowMotion) {
return false;
}
float newSpeed = checkNotNull(speedProvider).getSpeed(bufferInfo.presentationTimeUs);
boolean speedChanging = newSpeed != currentSpeed;
currentSpeed = newSpeed;
return speedChanging;
}
private void flushSonicAndSetSpeed(float speed) {
sonicAudioProcessor.setSpeed(speed);
sonicAudioProcessor.setPitch(speed);
sonicAudioProcessor.flush();
}
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
return ExoPlaybackException.createForRenderer(
cause,
TAG,
getIndex(),
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
errorCode);
}
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
long framesWritten = bytesWritten / bytesPerFrame;
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
}
}