Add float PCM support to TrimmingAudioProcessor

This was requested in Issue: androidx/media#2191.

PiperOrigin-RevId: 735375746
(cherry picked from commit 06163f3dfaf9eb13d84790273179157e762122ed)
This commit is contained in:
ivanbuper 2025-03-10 07:58:14 -07:00 committed by tonihei
parent b215670445
commit 97606094ca
4 changed files with 72 additions and 36 deletions

View File

@ -4,6 +4,7 @@
* Audio:
* Add support for float PCM to `ChannelMappingAudioProcessor`.
* Add support for float PCM to `TrimmingAudioProcessor`.
* Session:
* Fix bug where a stale notification stays visible when the playlist is
cleared ([#2211](https://github.com/androidx/media/issues/2211)).

View File

@ -602,7 +602,8 @@ public final class DefaultAudioSink implements AudioSink {
ImmutableList.of(
new ToInt16PcmAudioProcessor(), channelMappingAudioProcessor, trimmingAudioProcessor);
toFloatPcmAvailableAudioProcessors =
ImmutableList.of(new ToFloatPcmAudioProcessor(), channelMappingAudioProcessor);
ImmutableList.of(
new ToFloatPcmAudioProcessor(), channelMappingAudioProcessor, trimmingAudioProcessor);
volume = 1f;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
auxEffectInfo = new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, 0f);

View File

@ -26,8 +26,6 @@ import java.nio.ByteBuffer;
/** Audio processor for trimming samples from the start/end of data. */
/* package */ final class TrimmingAudioProcessor extends BaseAudioProcessor {
private static final @C.PcmEncoding int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT;
private int trimStartFrames;
private int trimEndFrames;
private boolean reconfigurationPending;
@ -80,7 +78,8 @@ import java.nio.ByteBuffer;
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != OUTPUT_ENCODING) {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT
&& inputAudioFormat.encoding != C.ENCODING_PCM_FLOAT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
reconfigurationPending = true;

View File

@ -15,14 +15,16 @@
*/
package androidx.media3.exoplayer.audio;
import static androidx.media3.test.utils.TestUtil.createByteBuffer;
import static androidx.media3.test.utils.TestUtil.createFloatArray;
import static androidx.media3.test.utils.TestUtil.createShortArray;
import static com.google.common.truth.Truth.assertThat;
import androidx.media3.common.C;
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.nio.ByteBuffer;
import org.junit.After;
import org.junit.Before;
import java.nio.ByteOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -30,8 +32,10 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public final class TrimmingAudioProcessorTest {
private static final AudioFormat AUDIO_FORMAT =
private static final AudioFormat STEREO_PCM16_FORMAT =
new AudioFormat(/* sampleRate= */ 44100, /* channelCount= */ 2, C.ENCODING_PCM_16BIT);
private static final AudioFormat STEREO_PCM_FLOAT_FORMAT =
new AudioFormat(/* sampleRate= */ 44100, /* channelCount= */ 2, C.ENCODING_PCM_FLOAT);
private static final int TRACK_ONE_UNTRIMMED_FRAME_COUNT = 1024;
private static final int TRACK_ONE_TRIM_START_FRAME_COUNT = 64;
private static final int TRACK_ONE_TRIM_END_FRAME_COUNT = 32;
@ -39,44 +43,21 @@ public final class TrimmingAudioProcessorTest {
private static final int TRACK_TWO_TRIM_END_FRAME_COUNT = 16;
private static final int TRACK_ONE_BUFFER_SIZE_BYTES =
AUDIO_FORMAT.bytesPerFrame * TRACK_ONE_UNTRIMMED_FRAME_COUNT;
STEREO_PCM16_FORMAT.bytesPerFrame * TRACK_ONE_UNTRIMMED_FRAME_COUNT;
private static final int TRACK_ONE_TRIMMED_BUFFER_SIZE_BYTES =
TRACK_ONE_BUFFER_SIZE_BYTES
- AUDIO_FORMAT.bytesPerFrame
- STEREO_PCM16_FORMAT.bytesPerFrame
* (TRACK_ONE_TRIM_START_FRAME_COUNT + TRACK_ONE_TRIM_END_FRAME_COUNT);
private TrimmingAudioProcessor trimmingAudioProcessor;
@Before
public void setUp() {
trimmingAudioProcessor = new TrimmingAudioProcessor();
}
@After
public void tearDown() {
trimmingAudioProcessor.reset();
}
@Test
public void flushTwice_trimsStartAndEnd() throws Exception {
TrimmingAudioProcessor trimmingAudioProcessor = new TrimmingAudioProcessor();
trimmingAudioProcessor.setTrimFrameCount(
TRACK_ONE_TRIM_START_FRAME_COUNT, TRACK_ONE_TRIM_END_FRAME_COUNT);
trimmingAudioProcessor.configure(AUDIO_FORMAT);
trimmingAudioProcessor.configure(STEREO_PCM16_FORMAT);
trimmingAudioProcessor.flush();
trimmingAudioProcessor.flush();
int outputSizeBytes = feedAndDrainAudioProcessorToEndOfTrackOne();
assertThat(trimmingAudioProcessor.getTrimmedFrameCount())
.isEqualTo(TRACK_ONE_TRIM_START_FRAME_COUNT + TRACK_ONE_TRIM_END_FRAME_COUNT);
assertThat(outputSizeBytes).isEqualTo(TRACK_ONE_TRIMMED_BUFFER_SIZE_BYTES);
}
/**
* Feeds and drains the audio processor up to the end of track one, returning the total output
* size in bytes.
*/
private int feedAndDrainAudioProcessorToEndOfTrackOne() throws Exception {
// Feed and drain the processor, simulating a gapless transition to another track.
ByteBuffer inputBuffer = ByteBuffer.allocate(TRACK_ONE_BUFFER_SIZE_BYTES);
int outputSize = 0;
@ -87,7 +68,7 @@ public final class TrimmingAudioProcessorTest {
// Reconfigure for a next track then begin draining.
trimmingAudioProcessor.setTrimFrameCount(
TRACK_TWO_TRIM_START_FRAME_COUNT, TRACK_TWO_TRIM_END_FRAME_COUNT);
trimmingAudioProcessor.configure(AUDIO_FORMAT);
trimmingAudioProcessor.configure(STEREO_PCM16_FORMAT);
trimmingAudioProcessor.queueEndOfStream();
}
}
@ -96,6 +77,60 @@ public final class TrimmingAudioProcessorTest {
outputBuffer.clear();
}
trimmingAudioProcessor.reset();
return outputSize;
assertThat(trimmingAudioProcessor.getTrimmedFrameCount())
.isEqualTo(TRACK_ONE_TRIM_START_FRAME_COUNT + TRACK_ONE_TRIM_END_FRAME_COUNT);
assertThat(outputSize).isEqualTo(TRACK_ONE_TRIMMED_BUFFER_SIZE_BYTES);
}
@Test
public void trim_withPcm16Samples_removesExpectedSamples() throws Exception {
TrimmingAudioProcessor trimmingAudioProcessor = new TrimmingAudioProcessor();
ByteBuffer resultBuffer = ByteBuffer.allocateDirect(8).order(ByteOrder.nativeOrder());
trimmingAudioProcessor.setTrimFrameCount(1, 2);
trimmingAudioProcessor.configure(STEREO_PCM16_FORMAT);
trimmingAudioProcessor.flush();
ByteBuffer inputBuffer = createByteBuffer(new short[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
while (!trimmingAudioProcessor.isEnded()) {
if (inputBuffer.hasRemaining()) {
trimmingAudioProcessor.queueInput(inputBuffer);
if (!inputBuffer.hasRemaining()) {
trimmingAudioProcessor.configure(STEREO_PCM16_FORMAT);
trimmingAudioProcessor.queueEndOfStream();
}
}
resultBuffer.put(trimmingAudioProcessor.getOutput());
}
resultBuffer.flip();
assertThat(trimmingAudioProcessor.getTrimmedFrameCount()).isEqualTo(3);
assertThat(createShortArray(resultBuffer)).isEqualTo(new short[] {3, 4, 5, 6});
}
@Test
public void trim_withPcmFloatSamples_removesExpectedSamples() throws Exception {
TrimmingAudioProcessor trimmingAudioProcessor = new TrimmingAudioProcessor();
ByteBuffer resultBuffer = ByteBuffer.allocateDirect(8).order(ByteOrder.nativeOrder());
trimmingAudioProcessor.setTrimFrameCount(2, 2);
trimmingAudioProcessor.configure(STEREO_PCM_FLOAT_FORMAT);
trimmingAudioProcessor.flush();
ByteBuffer inputBuffer =
createByteBuffer(new float[] {1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f});
while (!trimmingAudioProcessor.isEnded()) {
if (inputBuffer.hasRemaining()) {
trimmingAudioProcessor.queueInput(inputBuffer);
if (!inputBuffer.hasRemaining()) {
trimmingAudioProcessor.configure(STEREO_PCM_FLOAT_FORMAT);
trimmingAudioProcessor.queueEndOfStream();
}
}
resultBuffer.put(trimmingAudioProcessor.getOutput());
}
resultBuffer.flip();
assertThat(trimmingAudioProcessor.getTrimmedFrameCount()).isEqualTo(4);
assertThat(createFloatArray(resultBuffer)).isEqualTo(new float[] {5f, 6f});
}
}