Separate TransformerAudioRenderer and new AudioSamplePipeline.
`TransformerAudioRenderer` reads input and passes `DecoderInputBuffer`s to the `AudioSamplePipeline`. The `AudioSamplePipeline` handles all steps from decoding to encoding. `TransformerAudioRenderer` receives `DecoderInputBuffer`s from the `AudioSamplePipeline` and passes their data to the muxer. `AudioSamplePipeline` implements a new interface `SamplePipeline`. A pass-through pipeline will be added in a future cl. PiperOrigin-RevId: 407555102
This commit is contained in:
parent
49271ec74f
commit
b87f26490b
@ -285,30 +285,6 @@ sample:
|
|||||||
size = 230
|
size = 230
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 67627
|
presentationTimeUs = 67627
|
||||||
sample:
|
|
||||||
trackIndex = 1
|
|
||||||
dataHashCode = -1830836678
|
|
||||||
size = 1051
|
|
||||||
isKeyFrame = false
|
|
||||||
presentationTimeUs = 500500
|
|
||||||
sample:
|
|
||||||
trackIndex = 1
|
|
||||||
dataHashCode = 1767407540
|
|
||||||
size = 874
|
|
||||||
isKeyFrame = false
|
|
||||||
presentationTimeUs = 467133
|
|
||||||
sample:
|
|
||||||
trackIndex = 1
|
|
||||||
dataHashCode = 918440283
|
|
||||||
size = 781
|
|
||||||
isKeyFrame = false
|
|
||||||
presentationTimeUs = 533866
|
|
||||||
sample:
|
|
||||||
trackIndex = 1
|
|
||||||
dataHashCode = -1408463661
|
|
||||||
size = 4725
|
|
||||||
isKeyFrame = false
|
|
||||||
presentationTimeUs = 700700
|
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = -997198863
|
dataHashCode = -997198863
|
||||||
@ -399,6 +375,30 @@ sample:
|
|||||||
size = 6
|
size = 6
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 107644
|
presentationTimeUs = 107644
|
||||||
|
sample:
|
||||||
|
trackIndex = 1
|
||||||
|
dataHashCode = -1830836678
|
||||||
|
size = 1051
|
||||||
|
isKeyFrame = false
|
||||||
|
presentationTimeUs = 500500
|
||||||
|
sample:
|
||||||
|
trackIndex = 1
|
||||||
|
dataHashCode = 1767407540
|
||||||
|
size = 874
|
||||||
|
isKeyFrame = false
|
||||||
|
presentationTimeUs = 467133
|
||||||
|
sample:
|
||||||
|
trackIndex = 1
|
||||||
|
dataHashCode = 918440283
|
||||||
|
size = 781
|
||||||
|
isKeyFrame = false
|
||||||
|
presentationTimeUs = 533866
|
||||||
|
sample:
|
||||||
|
trackIndex = 1
|
||||||
|
dataHashCode = -1408463661
|
||||||
|
size = 4725
|
||||||
|
isKeyFrame = false
|
||||||
|
presentationTimeUs = 700700
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 1
|
trackIndex = 1
|
||||||
dataHashCode = 1569455924
|
dataHashCode = 1569455924
|
||||||
|
@ -132,64 +132,148 @@ sample:
|
|||||||
presentationTimeUs = 0
|
presentationTimeUs = 0
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = -833872563
|
dataHashCode = 1000136444
|
||||||
size = 1732
|
size = 140
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 416
|
presentationTimeUs = 416
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = -135901925
|
dataHashCode = 217961709
|
||||||
size = 380
|
size = 172
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 36499
|
presentationTimeUs = 3332
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = -879376936
|
||||||
|
size = 176
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 6915
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1259979587
|
||||||
|
size = 192
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 10581
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 907407225
|
||||||
|
size = 188
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 14581
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = -904354707
|
||||||
|
size = 176
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 18497
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1001385853
|
||||||
|
size = 172
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 22163
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1545716086
|
||||||
|
size = 196
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 25746
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 358710839
|
||||||
|
size = 180
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 29829
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = -671124798
|
||||||
|
size = 140
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 33579
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = -945404910
|
||||||
|
size = 120
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 36495
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1881048379
|
||||||
|
size = 88
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 38995
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1059579897
|
||||||
|
size = 88
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 40828
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 1496098648
|
||||||
|
size = 84
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 42661
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 250093960
|
dataHashCode = 250093960
|
||||||
size = 751
|
size = 751
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 44415
|
presentationTimeUs = 44411
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 1895536226
|
dataHashCode = 1895536226
|
||||||
size = 1045
|
size = 1045
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 59998
|
presentationTimeUs = 59994
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 1723596464
|
dataHashCode = 1723596464
|
||||||
size = 947
|
size = 947
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 81748
|
presentationTimeUs = 81744
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = -978803114
|
dataHashCode = -978803114
|
||||||
size = 946
|
size = 946
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 101414
|
presentationTimeUs = 101410
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 387377078
|
dataHashCode = 387377078
|
||||||
size = 946
|
size = 946
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 121080
|
presentationTimeUs = 121076
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = -132658698
|
dataHashCode = -132658698
|
||||||
size = 901
|
size = 901
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 140746
|
presentationTimeUs = 140742
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 1495036471
|
dataHashCode = 1495036471
|
||||||
size = 899
|
size = 899
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 159496
|
presentationTimeUs = 159492
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 0
|
trackIndex = 0
|
||||||
dataHashCode = 304440590
|
dataHashCode = 304440590
|
||||||
size = 878
|
size = 878
|
||||||
isKeyFrame = true
|
isKeyFrame = true
|
||||||
presentationTimeUs = 178162
|
presentationTimeUs = 178158
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = -1955900344
|
||||||
|
size = 112
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 196408
|
||||||
|
sample:
|
||||||
|
trackIndex = 0
|
||||||
|
dataHashCode = 88896626
|
||||||
|
size = 116
|
||||||
|
isKeyFrame = true
|
||||||
|
presentationTimeUs = 198741
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 1
|
trackIndex = 1
|
||||||
dataHashCode = 2139021989
|
dataHashCode = 2139021989
|
||||||
@ -214,12 +298,6 @@ sample:
|
|||||||
size = 1193
|
size = 1193
|
||||||
isKeyFrame = false
|
isKeyFrame = false
|
||||||
presentationTimeUs = 734083
|
presentationTimeUs = 734083
|
||||||
sample:
|
|
||||||
trackIndex = 0
|
|
||||||
dataHashCode = -752661703
|
|
||||||
size = 228
|
|
||||||
isKeyFrame = true
|
|
||||||
presentationTimeUs = 196412
|
|
||||||
sample:
|
sample:
|
||||||
trackIndex = 1
|
trackIndex = 1
|
||||||
dataHashCode = -1554795381
|
dataHashCode = -1554795381
|
||||||
|
@ -0,0 +1,373 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
|
import static java.lang.Math.min;
|
||||||
|
|
||||||
|
import android.media.MediaCodec.BufferInfo;
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.annotation.RequiresApi;
|
||||||
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
|
import androidx.media3.common.PlaybackException;
|
||||||
|
import androidx.media3.decoder.DecoderInputBuffer;
|
||||||
|
import androidx.media3.exoplayer.ExoPlaybackException;
|
||||||
|
import androidx.media3.exoplayer.audio.AudioProcessor;
|
||||||
|
import androidx.media3.exoplayer.audio.AudioProcessor.AudioFormat;
|
||||||
|
import androidx.media3.exoplayer.audio.SonicAudioProcessor;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
||||||
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
|
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pipeline to decode audio samples, apply transformations on the raw samples, and re-encode them.
|
||||||
|
*/
|
||||||
|
@RequiresApi(18)
|
||||||
|
/* package */ final class AudioSamplePipeline implements SamplePipeline {
|
||||||
|
|
||||||
|
private static final String TAG = "AudioSamplePipeline";
|
||||||
|
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
|
||||||
|
|
||||||
|
private final MediaCodecAdapterWrapper decoder;
|
||||||
|
private final Format decoderInputFormat;
|
||||||
|
private final DecoderInputBuffer decoderInputBuffer;
|
||||||
|
|
||||||
|
private final SonicAudioProcessor sonicAudioProcessor;
|
||||||
|
private final SpeedProvider speedProvider;
|
||||||
|
|
||||||
|
private final DecoderInputBuffer encoderInputBuffer;
|
||||||
|
private final DecoderInputBuffer encoderOutputBuffer;
|
||||||
|
|
||||||
|
private final Transformation transformation;
|
||||||
|
private final int rendererIndex;
|
||||||
|
|
||||||
|
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
|
||||||
|
private @MonotonicNonNull MediaCodecAdapterWrapper encoder;
|
||||||
|
private long nextEncoderInputBufferTimeUs;
|
||||||
|
|
||||||
|
private ByteBuffer sonicOutputBuffer;
|
||||||
|
private boolean drainingSonicForSpeedChange;
|
||||||
|
private float currentSpeed;
|
||||||
|
|
||||||
|
public AudioSamplePipeline(
|
||||||
|
Format decoderInputFormat, Transformation transformation, int rendererIndex)
|
||||||
|
throws ExoPlaybackException {
|
||||||
|
this.decoderInputFormat = decoderInputFormat;
|
||||||
|
this.transformation = transformation;
|
||||||
|
this.rendererIndex = rendererIndex;
|
||||||
|
decoderInputBuffer =
|
||||||
|
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
|
encoderInputBuffer =
|
||||||
|
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
|
encoderOutputBuffer =
|
||||||
|
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
|
sonicAudioProcessor = new SonicAudioProcessor();
|
||||||
|
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
|
||||||
|
nextEncoderInputBufferTimeUs = 0;
|
||||||
|
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
|
||||||
|
currentSpeed = speedProvider.getSpeed(0);
|
||||||
|
try {
|
||||||
|
this.decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
|
||||||
|
} catch (IOException e) {
|
||||||
|
// TODO (internal b/184262323): Assign an adequate error code.
|
||||||
|
throw ExoPlaybackException.createForRenderer(
|
||||||
|
e,
|
||||||
|
TAG,
|
||||||
|
rendererIndex,
|
||||||
|
decoderInputFormat,
|
||||||
|
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
|
||||||
|
/* isRecoverable= */ false,
|
||||||
|
PlaybackException.ERROR_CODE_UNSPECIFIED);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
sonicAudioProcessor.reset();
|
||||||
|
decoder.release();
|
||||||
|
if (encoder != null) {
|
||||||
|
encoder.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean processData() throws ExoPlaybackException {
|
||||||
|
if (!ensureEncoderAndAudioProcessingConfigured()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (sonicAudioProcessor.isActive()) {
|
||||||
|
return feedEncoderFromSonic() || feedSonicFromDecoder();
|
||||||
|
} else {
|
||||||
|
return feedEncoderFromDecoder();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Nullable
|
||||||
|
public DecoderInputBuffer dequeueInputBuffer() {
|
||||||
|
return decoder.maybeDequeueInputBuffer(decoderInputBuffer) ? decoderInputBuffer : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void queueInputBuffer() {
|
||||||
|
decoder.queueInputBuffer(decoderInputBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Nullable
|
||||||
|
public Format getOutputFormat() {
|
||||||
|
return encoder != null ? encoder.getOutputFormat() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isEnded() {
|
||||||
|
return encoder != null && encoder.isEnded();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Nullable
|
||||||
|
public DecoderInputBuffer getOutputBuffer() {
|
||||||
|
if (encoder != null) {
|
||||||
|
encoderOutputBuffer.data = encoder.getOutputBuffer();
|
||||||
|
if (encoderOutputBuffer.data != null) {
|
||||||
|
encoderOutputBuffer.timeUs = checkNotNull(encoder.getOutputBufferInfo()).presentationTimeUs;
|
||||||
|
return encoderOutputBuffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void releaseOutputBuffer() {
|
||||||
|
if (encoder != null) {
|
||||||
|
encoder.releaseOutputBuffer();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
|
||||||
|
* pass more data immediately by calling this method again.
|
||||||
|
*/
|
||||||
|
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
|
||||||
|
private boolean feedEncoderFromDecoder() {
|
||||||
|
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decoder.isEnded()) {
|
||||||
|
queueEndOfStreamToEncoder();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
|
||||||
|
if (decoderOutputBuffer == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
|
||||||
|
flushSonicAndSetSpeed(currentSpeed);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
feedEncoder(decoderOutputBuffer);
|
||||||
|
if (!decoderOutputBuffer.hasRemaining()) {
|
||||||
|
decoder.releaseOutputBuffer();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
|
||||||
|
* possible to pass more data immediately by calling this method again.
|
||||||
|
*/
|
||||||
|
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
|
||||||
|
private boolean feedEncoderFromSonic() {
|
||||||
|
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sonicOutputBuffer.hasRemaining()) {
|
||||||
|
sonicOutputBuffer = sonicAudioProcessor.getOutput();
|
||||||
|
if (!sonicOutputBuffer.hasRemaining()) {
|
||||||
|
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
|
||||||
|
queueEndOfStreamToEncoder();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
feedEncoder(sonicOutputBuffer);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to process decoder output data, and returns whether it may be possible to process more
|
||||||
|
* data immediately by calling this method again.
|
||||||
|
*/
|
||||||
|
private boolean feedSonicFromDecoder() {
|
||||||
|
if (drainingSonicForSpeedChange) {
|
||||||
|
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
|
||||||
|
flushSonicAndSetSpeed(currentSpeed);
|
||||||
|
drainingSonicForSpeedChange = false;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
|
||||||
|
// there is output still to be processed.
|
||||||
|
if (sonicOutputBuffer.hasRemaining()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decoder.isEnded()) {
|
||||||
|
sonicAudioProcessor.queueEndOfStream();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
checkState(!sonicAudioProcessor.isEnded());
|
||||||
|
|
||||||
|
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
|
||||||
|
if (decoderOutputBuffer == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
|
||||||
|
sonicAudioProcessor.queueEndOfStream();
|
||||||
|
drainingSonicForSpeedChange = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
sonicAudioProcessor.queueInput(decoderOutputBuffer);
|
||||||
|
if (!decoderOutputBuffer.hasRemaining()) {
|
||||||
|
decoder.releaseOutputBuffer();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Feeds as much data as possible between the current position and limit of the specified {@link
|
||||||
|
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
|
||||||
|
*/
|
||||||
|
@RequiresNonNull({"encoder", "encoderInputAudioFormat"})
|
||||||
|
private void feedEncoder(ByteBuffer inputBuffer) {
|
||||||
|
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
|
||||||
|
int bufferLimit = inputBuffer.limit();
|
||||||
|
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
|
||||||
|
encoderInputBufferData.put(inputBuffer);
|
||||||
|
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
|
||||||
|
nextEncoderInputBufferTimeUs +=
|
||||||
|
getBufferDurationUs(
|
||||||
|
/* bytesWritten= */ encoderInputBufferData.position(),
|
||||||
|
encoderInputAudioFormat.bytesPerFrame,
|
||||||
|
encoderInputAudioFormat.sampleRate);
|
||||||
|
encoderInputBuffer.setFlags(0);
|
||||||
|
encoderInputBuffer.flip();
|
||||||
|
inputBuffer.limit(bufferLimit);
|
||||||
|
encoder.queueInputBuffer(encoderInputBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
@RequiresNonNull("encoder")
|
||||||
|
private void queueEndOfStreamToEncoder() {
|
||||||
|
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
|
||||||
|
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
|
||||||
|
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
|
||||||
|
encoderInputBuffer.flip();
|
||||||
|
// Queuing EOS should only occur with an empty buffer.
|
||||||
|
encoder.queueInputBuffer(encoderInputBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
|
||||||
|
* configured yet, and returns whether they have been configured.
|
||||||
|
*/
|
||||||
|
@EnsuresNonNullIf(
|
||||||
|
expression = {"encoder", "encoderInputAudioFormat"},
|
||||||
|
result = true)
|
||||||
|
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
|
||||||
|
if (encoder != null && encoderInputAudioFormat != null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
|
||||||
|
if (decoderOutputFormat == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
AudioFormat outputAudioFormat =
|
||||||
|
new AudioFormat(
|
||||||
|
decoderOutputFormat.sampleRate,
|
||||||
|
decoderOutputFormat.channelCount,
|
||||||
|
decoderOutputFormat.pcmEncoding);
|
||||||
|
if (transformation.flattenForSlowMotion) {
|
||||||
|
try {
|
||||||
|
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
|
||||||
|
flushSonicAndSetSpeed(currentSpeed);
|
||||||
|
} catch (AudioProcessor.UnhandledAudioFormatException e) {
|
||||||
|
// TODO(internal b/192864511): Assign an adequate error code.
|
||||||
|
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String audioMimeType =
|
||||||
|
transformation.audioMimeType == null
|
||||||
|
? decoderInputFormat.sampleMimeType
|
||||||
|
: transformation.audioMimeType;
|
||||||
|
try {
|
||||||
|
encoder =
|
||||||
|
MediaCodecAdapterWrapper.createForAudioEncoding(
|
||||||
|
new Format.Builder()
|
||||||
|
.setSampleMimeType(audioMimeType)
|
||||||
|
.setSampleRate(outputAudioFormat.sampleRate)
|
||||||
|
.setChannelCount(outputAudioFormat.channelCount)
|
||||||
|
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
|
||||||
|
.build());
|
||||||
|
} catch (IOException e) {
|
||||||
|
// TODO(internal b/192864511): Assign an adequate error code.
|
||||||
|
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
|
||||||
|
}
|
||||||
|
encoderInputAudioFormat = outputAudioFormat;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isSpeedChanging(BufferInfo bufferInfo) {
|
||||||
|
if (!transformation.flattenForSlowMotion) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
float newSpeed = speedProvider.getSpeed(bufferInfo.presentationTimeUs);
|
||||||
|
boolean speedChanging = newSpeed != currentSpeed;
|
||||||
|
currentSpeed = newSpeed;
|
||||||
|
return speedChanging;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void flushSonicAndSetSpeed(float speed) {
|
||||||
|
sonicAudioProcessor.setSpeed(speed);
|
||||||
|
sonicAudioProcessor.setPitch(speed);
|
||||||
|
sonicAudioProcessor.flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
|
||||||
|
return ExoPlaybackException.createForRenderer(
|
||||||
|
cause,
|
||||||
|
TAG,
|
||||||
|
rendererIndex,
|
||||||
|
decoderInputFormat,
|
||||||
|
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
|
||||||
|
/* isRecoverable= */ false,
|
||||||
|
errorCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
|
||||||
|
long framesWritten = bytesWritten / bytesPerFrame;
|
||||||
|
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,69 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
|
import androidx.media3.decoder.DecoderInputBuffer;
|
||||||
|
import androidx.media3.exoplayer.ExoPlaybackException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pipeline for processing {@link DecoderInputBuffer DecoderInputBuffers}.
|
||||||
|
*
|
||||||
|
* <p>This pipeline can be used to implement transformations of audio or video samples.
|
||||||
|
*/
|
||||||
|
/* package */ interface SamplePipeline {
|
||||||
|
|
||||||
|
/** Returns a buffer if the pipeline is ready to accept input, and {@code null} otherwise. */
|
||||||
|
@Nullable
|
||||||
|
DecoderInputBuffer dequeueInputBuffer();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Informs the pipeline that its input buffer contains new input.
|
||||||
|
*
|
||||||
|
* <p>Should be called after filling the input buffer from {@link #dequeueInputBuffer()} with new
|
||||||
|
* input.
|
||||||
|
*/
|
||||||
|
void queueInputBuffer();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process the input data and returns whether more data can be processed by calling this method
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
boolean processData() throws ExoPlaybackException;
|
||||||
|
|
||||||
|
/** Returns the output format of the pipeline if available, and {@code null} otherwise. */
|
||||||
|
@Nullable
|
||||||
|
Format getOutputFormat();
|
||||||
|
|
||||||
|
/** Returns an output buffer if the pipeline has produced output, and {@code null} otherwise */
|
||||||
|
@Nullable
|
||||||
|
DecoderInputBuffer getOutputBuffer();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Releases the pipeline's output buffer.
|
||||||
|
*
|
||||||
|
* <p>Should be called when the output buffer from {@link #getOutputBuffer()} is no longer needed.
|
||||||
|
*/
|
||||||
|
void releaseOutputBuffer();
|
||||||
|
|
||||||
|
/** Returns whether the pipeline has ended. */
|
||||||
|
boolean isEnded();
|
||||||
|
|
||||||
|
/** Releases all resources held by the pipeline. */
|
||||||
|
void release();
|
||||||
|
}
|
@ -17,25 +17,16 @@
|
|||||||
package androidx.media3.transformer;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
|
||||||
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||||
import static java.lang.Math.min;
|
|
||||||
|
|
||||||
import android.media.MediaCodec.BufferInfo;
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.annotation.RequiresApi;
|
import androidx.annotation.RequiresApi;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.PlaybackException;
|
|
||||||
import androidx.media3.decoder.DecoderInputBuffer;
|
import androidx.media3.decoder.DecoderInputBuffer;
|
||||||
import androidx.media3.exoplayer.ExoPlaybackException;
|
import androidx.media3.exoplayer.ExoPlaybackException;
|
||||||
import androidx.media3.exoplayer.FormatHolder;
|
import androidx.media3.exoplayer.FormatHolder;
|
||||||
import androidx.media3.exoplayer.audio.AudioProcessor;
|
|
||||||
import androidx.media3.exoplayer.audio.AudioProcessor.AudioFormat;
|
|
||||||
import androidx.media3.exoplayer.audio.SonicAudioProcessor;
|
|
||||||
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
|
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||||
@ -44,37 +35,18 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
/* package */ final class TransformerAudioRenderer extends TransformerBaseRenderer {
|
/* package */ final class TransformerAudioRenderer extends TransformerBaseRenderer {
|
||||||
|
|
||||||
private static final String TAG = "TransformerAudioRenderer";
|
private static final String TAG = "TransformerAudioRenderer";
|
||||||
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
|
|
||||||
private static final float SPEED_UNSET = -1f;
|
|
||||||
|
|
||||||
private final DecoderInputBuffer decoderInputBuffer;
|
private final DecoderInputBuffer decoderInputBuffer;
|
||||||
private final DecoderInputBuffer encoderInputBuffer;
|
|
||||||
private final SonicAudioProcessor sonicAudioProcessor;
|
|
||||||
|
|
||||||
@Nullable private MediaCodecAdapterWrapper decoder;
|
private @MonotonicNonNull SamplePipeline samplePipeline;
|
||||||
@Nullable private MediaCodecAdapterWrapper encoder;
|
private boolean muxerWrapperTrackAdded;
|
||||||
@Nullable private SpeedProvider speedProvider;
|
|
||||||
private @MonotonicNonNull Format decoderInputFormat;
|
|
||||||
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
|
|
||||||
|
|
||||||
private ByteBuffer sonicOutputBuffer;
|
|
||||||
private long nextEncoderInputBufferTimeUs;
|
|
||||||
private float currentSpeed;
|
|
||||||
private boolean muxerWrapperTrackEnded;
|
private boolean muxerWrapperTrackEnded;
|
||||||
private boolean hasEncoderOutputFormat;
|
|
||||||
private boolean drainingSonicForSpeedChange;
|
|
||||||
|
|
||||||
public TransformerAudioRenderer(
|
public TransformerAudioRenderer(
|
||||||
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
|
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
|
||||||
super(C.TRACK_TYPE_AUDIO, muxerWrapper, mediaClock, transformation);
|
super(C.TRACK_TYPE_AUDIO, muxerWrapper, mediaClock, transformation);
|
||||||
decoderInputBuffer =
|
decoderInputBuffer =
|
||||||
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
encoderInputBuffer =
|
|
||||||
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
|
||||||
sonicAudioProcessor = new SonicAudioProcessor();
|
|
||||||
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
|
|
||||||
nextEncoderInputBufferTimeUs = 0;
|
|
||||||
currentSpeed = SPEED_UNSET;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -89,201 +61,94 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onReset() {
|
protected void onReset() {
|
||||||
decoderInputBuffer.clear();
|
if (samplePipeline != null) {
|
||||||
decoderInputBuffer.data = null;
|
samplePipeline.release();
|
||||||
encoderInputBuffer.clear();
|
|
||||||
encoderInputBuffer.data = null;
|
|
||||||
sonicAudioProcessor.reset();
|
|
||||||
if (decoder != null) {
|
|
||||||
decoder.release();
|
|
||||||
decoder = null;
|
|
||||||
}
|
}
|
||||||
if (encoder != null) {
|
muxerWrapperTrackAdded = false;
|
||||||
encoder.release();
|
|
||||||
encoder = null;
|
|
||||||
}
|
|
||||||
speedProvider = null;
|
|
||||||
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
|
|
||||||
nextEncoderInputBufferTimeUs = 0;
|
|
||||||
currentSpeed = SPEED_UNSET;
|
|
||||||
muxerWrapperTrackEnded = false;
|
muxerWrapperTrackEnded = false;
|
||||||
hasEncoderOutputFormat = false;
|
|
||||||
drainingSonicForSpeedChange = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
||||||
if (!isRendererStarted || isEnded()) {
|
if (!isRendererStarted || isEnded() || !ensureRendererConfigured()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ensureDecoderConfigured()) {
|
while (feedMuxerFromPipeline() || samplePipeline.processData() || feedPipelineFromInput()) {}
|
||||||
MediaCodecAdapterWrapper decoder = this.decoder;
|
}
|
||||||
if (ensureEncoderAndAudioProcessingConfigured()) {
|
|
||||||
MediaCodecAdapterWrapper encoder = this.encoder;
|
/** Attempts to read the input format and to initialize the sample pipeline. */
|
||||||
while (feedMuxerFromEncoder(encoder)) {}
|
@EnsuresNonNullIf(expression = "samplePipeline", result = true)
|
||||||
if (sonicAudioProcessor.isActive()) {
|
private boolean ensureRendererConfigured() throws ExoPlaybackException {
|
||||||
while (feedEncoderFromSonic(decoder, encoder)) {}
|
if (samplePipeline != null) {
|
||||||
while (feedSonicFromDecoder(decoder)) {}
|
return true;
|
||||||
} else {
|
|
||||||
while (feedEncoderFromDecoder(decoder, encoder)) {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (feedDecoderFromInput(decoder)) {}
|
|
||||||
}
|
}
|
||||||
|
FormatHolder formatHolder = getFormatHolder();
|
||||||
|
@ReadDataResult
|
||||||
|
int result = readSource(formatHolder, decoderInputBuffer, /* readFlags= */ FLAG_REQUIRE_FORMAT);
|
||||||
|
if (result != C.RESULT_FORMAT_READ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
samplePipeline =
|
||||||
|
new AudioSamplePipeline(checkNotNull(formatHolder.format), transformation, getIndex());
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attempts to write encoder output data to the muxer, and returns whether it may be possible to
|
* Attempts to write sample pipeline output data to the muxer, and returns whether it may be
|
||||||
* write more data immediately by calling this method again.
|
* possible to write more data immediately by calling this method again.
|
||||||
*/
|
*/
|
||||||
private boolean feedMuxerFromEncoder(MediaCodecAdapterWrapper encoder) {
|
@RequiresNonNull("samplePipeline")
|
||||||
if (!hasEncoderOutputFormat) {
|
private boolean feedMuxerFromPipeline() {
|
||||||
@Nullable Format encoderOutputFormat = encoder.getOutputFormat();
|
if (!muxerWrapperTrackAdded) {
|
||||||
if (encoderOutputFormat == null) {
|
@Nullable Format samplePipelineOutputFormat = samplePipeline.getOutputFormat();
|
||||||
|
if (samplePipelineOutputFormat == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
hasEncoderOutputFormat = true;
|
muxerWrapperTrackAdded = true;
|
||||||
muxerWrapper.addTrackFormat(encoderOutputFormat);
|
muxerWrapper.addTrackFormat(samplePipelineOutputFormat);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (encoder.isEnded()) {
|
if (samplePipeline.isEnded()) {
|
||||||
muxerWrapper.endTrack(getTrackType());
|
muxerWrapper.endTrack(getTrackType());
|
||||||
muxerWrapperTrackEnded = true;
|
muxerWrapperTrackEnded = true;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@Nullable ByteBuffer encoderOutputBuffer = encoder.getOutputBuffer();
|
@Nullable DecoderInputBuffer samplePipelineOutputBuffer = samplePipeline.getOutputBuffer();
|
||||||
if (encoderOutputBuffer == null) {
|
if (samplePipelineOutputBuffer == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
BufferInfo encoderOutputBufferInfo = checkNotNull(encoder.getOutputBufferInfo());
|
|
||||||
if (!muxerWrapper.writeSample(
|
if (!muxerWrapper.writeSample(
|
||||||
getTrackType(),
|
getTrackType(),
|
||||||
encoderOutputBuffer,
|
samplePipelineOutputBuffer.data,
|
||||||
/* isKeyFrame= */ true,
|
/* isKeyFrame= */ true,
|
||||||
encoderOutputBufferInfo.presentationTimeUs)) {
|
samplePipelineOutputBuffer.timeUs)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
encoder.releaseOutputBuffer();
|
samplePipeline.releaseOutputBuffer();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
|
* Attempts to pass input data to the sample pipeline, and returns whether it may be possible to
|
||||||
* pass more data immediately by calling this method again.
|
* pass more data immediately by calling this method again.
|
||||||
*/
|
*/
|
||||||
@RequiresNonNull({"encoderInputAudioFormat"})
|
@RequiresNonNull("samplePipeline")
|
||||||
private boolean feedEncoderFromDecoder(
|
private boolean feedPipelineFromInput() {
|
||||||
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
|
@Nullable DecoderInputBuffer samplePipelineInputBuffer = samplePipeline.dequeueInputBuffer();
|
||||||
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
|
if (samplePipelineInputBuffer == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (decoder.isEnded()) {
|
|
||||||
queueEndOfStreamToEncoder(encoder);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
|
|
||||||
if (decoderOutputBuffer == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
|
|
||||||
flushSonicAndSetSpeed(currentSpeed);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
feedEncoder(encoder, decoderOutputBuffer);
|
|
||||||
if (!decoderOutputBuffer.hasRemaining()) {
|
|
||||||
decoder.releaseOutputBuffer();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
|
|
||||||
* possible to pass more data immediately by calling this method again.
|
|
||||||
*/
|
|
||||||
@RequiresNonNull({"encoderInputAudioFormat"})
|
|
||||||
private boolean feedEncoderFromSonic(
|
|
||||||
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
|
|
||||||
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sonicOutputBuffer.hasRemaining()) {
|
|
||||||
sonicOutputBuffer = sonicAudioProcessor.getOutput();
|
|
||||||
if (!sonicOutputBuffer.hasRemaining()) {
|
|
||||||
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
|
|
||||||
queueEndOfStreamToEncoder(encoder);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
feedEncoder(encoder, sonicOutputBuffer);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to process decoder output data, and returns whether it may be possible to process more
|
|
||||||
* data immediately by calling this method again.
|
|
||||||
*/
|
|
||||||
private boolean feedSonicFromDecoder(MediaCodecAdapterWrapper decoder) {
|
|
||||||
if (drainingSonicForSpeedChange) {
|
|
||||||
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
|
|
||||||
flushSonicAndSetSpeed(currentSpeed);
|
|
||||||
drainingSonicForSpeedChange = false;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
|
|
||||||
// there is output still to be processed.
|
|
||||||
if (sonicOutputBuffer.hasRemaining()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decoder.isEnded()) {
|
|
||||||
sonicAudioProcessor.queueEndOfStream();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
checkState(!sonicAudioProcessor.isEnded());
|
|
||||||
|
|
||||||
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
|
|
||||||
if (decoderOutputBuffer == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
|
|
||||||
sonicAudioProcessor.queueEndOfStream();
|
|
||||||
drainingSonicForSpeedChange = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
sonicAudioProcessor.queueInput(decoderOutputBuffer);
|
|
||||||
if (!decoderOutputBuffer.hasRemaining()) {
|
|
||||||
decoder.releaseOutputBuffer();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to pass input data to the decoder, and returns whether it may be possible to pass more
|
|
||||||
* data immediately by calling this method again.
|
|
||||||
*/
|
|
||||||
private boolean feedDecoderFromInput(MediaCodecAdapterWrapper decoder) {
|
|
||||||
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
decoderInputBuffer.clear();
|
|
||||||
@ReadDataResult
|
@ReadDataResult
|
||||||
int result = readSource(getFormatHolder(), decoderInputBuffer, /* readFlags= */ 0);
|
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
|
||||||
switch (result) {
|
switch (result) {
|
||||||
case C.RESULT_BUFFER_READ:
|
case C.RESULT_BUFFER_READ:
|
||||||
mediaClock.updateTimeForTrackType(getTrackType(), decoderInputBuffer.timeUs);
|
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
|
||||||
decoderInputBuffer.timeUs -= streamOffsetUs;
|
samplePipelineInputBuffer.timeUs -= streamOffsetUs;
|
||||||
decoderInputBuffer.flip();
|
samplePipelineInputBuffer.flip();
|
||||||
decoder.queueInputBuffer(decoderInputBuffer);
|
samplePipeline.queueInputBuffer();
|
||||||
return !decoderInputBuffer.isEndOfStream();
|
return !samplePipelineInputBuffer.isEndOfStream();
|
||||||
case C.RESULT_FORMAT_READ:
|
case C.RESULT_FORMAT_READ:
|
||||||
throw new IllegalStateException("Format changes are not supported.");
|
throw new IllegalStateException("Format changes are not supported.");
|
||||||
case C.RESULT_NOTHING_READ:
|
case C.RESULT_NOTHING_READ:
|
||||||
@ -291,150 +156,4 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Feeds as much data as possible between the current position and limit of the specified {@link
|
|
||||||
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
|
|
||||||
*/
|
|
||||||
@RequiresNonNull({"encoderInputAudioFormat"})
|
|
||||||
private void feedEncoder(MediaCodecAdapterWrapper encoder, ByteBuffer inputBuffer) {
|
|
||||||
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
|
|
||||||
int bufferLimit = inputBuffer.limit();
|
|
||||||
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
|
|
||||||
encoderInputBufferData.put(inputBuffer);
|
|
||||||
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
|
|
||||||
nextEncoderInputBufferTimeUs +=
|
|
||||||
getBufferDurationUs(
|
|
||||||
/* bytesWritten= */ encoderInputBufferData.position(),
|
|
||||||
encoderInputAudioFormat.bytesPerFrame,
|
|
||||||
encoderInputAudioFormat.sampleRate);
|
|
||||||
encoderInputBuffer.setFlags(0);
|
|
||||||
encoderInputBuffer.flip();
|
|
||||||
inputBuffer.limit(bufferLimit);
|
|
||||||
encoder.queueInputBuffer(encoderInputBuffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void queueEndOfStreamToEncoder(MediaCodecAdapterWrapper encoder) {
|
|
||||||
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
|
|
||||||
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
|
|
||||||
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
|
|
||||||
encoderInputBuffer.flip();
|
|
||||||
// Queuing EOS should only occur with an empty buffer.
|
|
||||||
encoder.queueInputBuffer(encoderInputBuffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
|
|
||||||
* configured yet, and returns whether they have been configured.
|
|
||||||
*/
|
|
||||||
@RequiresNonNull({"decoder", "decoderInputFormat"})
|
|
||||||
@EnsuresNonNullIf(
|
|
||||||
expression = {"encoder", "encoderInputAudioFormat"},
|
|
||||||
result = true)
|
|
||||||
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
|
|
||||||
if (encoder != null && encoderInputAudioFormat != null) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
MediaCodecAdapterWrapper decoder = this.decoder;
|
|
||||||
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
|
|
||||||
if (decoderOutputFormat == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
AudioFormat outputAudioFormat =
|
|
||||||
new AudioFormat(
|
|
||||||
decoderOutputFormat.sampleRate,
|
|
||||||
decoderOutputFormat.channelCount,
|
|
||||||
decoderOutputFormat.pcmEncoding);
|
|
||||||
if (transformation.flattenForSlowMotion) {
|
|
||||||
try {
|
|
||||||
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
|
|
||||||
flushSonicAndSetSpeed(currentSpeed);
|
|
||||||
} catch (AudioProcessor.UnhandledAudioFormatException e) {
|
|
||||||
// TODO(internal b/192864511): Assign an adequate error code.
|
|
||||||
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
String audioMimeType =
|
|
||||||
transformation.audioMimeType == null
|
|
||||||
? decoderInputFormat.sampleMimeType
|
|
||||||
: transformation.audioMimeType;
|
|
||||||
try {
|
|
||||||
encoder =
|
|
||||||
MediaCodecAdapterWrapper.createForAudioEncoding(
|
|
||||||
new Format.Builder()
|
|
||||||
.setSampleMimeType(audioMimeType)
|
|
||||||
.setSampleRate(outputAudioFormat.sampleRate)
|
|
||||||
.setChannelCount(outputAudioFormat.channelCount)
|
|
||||||
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
|
|
||||||
.build());
|
|
||||||
} catch (IOException e) {
|
|
||||||
// TODO(internal b/192864511): Assign an adequate error code.
|
|
||||||
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
|
|
||||||
}
|
|
||||||
encoderInputAudioFormat = outputAudioFormat;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to configure the {@link #decoder} if it has not been configured yet, and returns
|
|
||||||
* whether the decoder has been configured.
|
|
||||||
*/
|
|
||||||
@EnsuresNonNullIf(
|
|
||||||
expression = {"decoderInputFormat", "decoder"},
|
|
||||||
result = true)
|
|
||||||
private boolean ensureDecoderConfigured() throws ExoPlaybackException {
|
|
||||||
if (decoder != null && decoderInputFormat != null) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
FormatHolder formatHolder = getFormatHolder();
|
|
||||||
@ReadDataResult int result = readSource(formatHolder, decoderInputBuffer, FLAG_REQUIRE_FORMAT);
|
|
||||||
if (result != C.RESULT_FORMAT_READ) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
decoderInputFormat = checkNotNull(formatHolder.format);
|
|
||||||
MediaCodecAdapterWrapper decoder;
|
|
||||||
try {
|
|
||||||
decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
|
|
||||||
} catch (IOException e) {
|
|
||||||
// TODO (internal b/184262323): Assign an adequate error code.
|
|
||||||
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
|
|
||||||
}
|
|
||||||
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
|
|
||||||
currentSpeed = speedProvider.getSpeed(0);
|
|
||||||
this.decoder = decoder;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean isSpeedChanging(BufferInfo bufferInfo) {
|
|
||||||
if (!transformation.flattenForSlowMotion) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
float newSpeed = checkNotNull(speedProvider).getSpeed(bufferInfo.presentationTimeUs);
|
|
||||||
boolean speedChanging = newSpeed != currentSpeed;
|
|
||||||
currentSpeed = newSpeed;
|
|
||||||
return speedChanging;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void flushSonicAndSetSpeed(float speed) {
|
|
||||||
sonicAudioProcessor.setSpeed(speed);
|
|
||||||
sonicAudioProcessor.setPitch(speed);
|
|
||||||
sonicAudioProcessor.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
|
|
||||||
return ExoPlaybackException.createForRenderer(
|
|
||||||
cause,
|
|
||||||
TAG,
|
|
||||||
getIndex(),
|
|
||||||
decoderInputFormat,
|
|
||||||
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
|
|
||||||
/* isRecoverable= */ false,
|
|
||||||
errorCode);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
|
|
||||||
long framesWritten = bytesWritten / bytesPerFrame;
|
|
||||||
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user