Clean up AudioProcessor implementations

- Ensure that no memory is used by audio processors that are always inactive, by
  only allocating in flush() if active. If data was already allocated but a
  processor becomes inactive we assume that the allocation may be needed in
  future so do not remove it (e.g., in the case of ResamplingAudioProcessor).
- Make SilenceSkippingAudioProcessor set up its buffers in flush(), and clarify
  that it is always necessary to call flush() if configure() returns true.
- Make reset() reset all state for all processors.
- Use @Nullable state or empty arrays for inactive audio processor buffers.
- Miscellaneous style/consistency cleanup.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=190895783
This commit is contained in:
andrewlewis 2018-03-29 01:30:11 -07:00 committed by Oliver Woodman
parent acca4f238b
commit 78ff4af6a7
11 changed files with 112 additions and 126 deletions

View File

@ -26,6 +26,7 @@ android {
dependencies {
implementation project(modulePrefix + 'library-core')
implementation 'com.android.support:support-annotations:' + supportLibraryVersion
implementation 'com.google.vr:sdk-audio:1.80.0'
}

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.ext.gvr;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.Format;
@ -39,7 +40,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
private int sampleRateHz;
private int channelCount;
private GvrAudioSurround gvrAudioSurround;
@Nullable private GvrAudioSurround gvrAudioSurround;
private ByteBuffer buffer;
private boolean inputEnded;
@ -48,14 +49,13 @@ public final class GvrAudioProcessor implements AudioProcessor {
private float y;
private float z;
/**
* Creates a new GVR audio processor.
*/
/** Creates a new GVR audio processor. */
public GvrAudioProcessor() {
// Use the identity for the initial orientation.
w = 1f;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
}
/**
@ -77,9 +77,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
}
}
@SuppressWarnings("ReferenceEquality")
@Override
public synchronized boolean configure(int sampleRateHz, int channelCount,
@C.Encoding int encoding) throws UnhandledFormatException {
public synchronized boolean configure(
int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
maybeReleaseGvrAudioSurround();
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
@ -116,7 +118,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
gvrAudioSurround = new GvrAudioSurround(surroundFormat, sampleRateHz, channelCount,
FRAMES_PER_OUTPUT_BUFFER);
gvrAudioSurround.updateNativeOrientation(w, x, y, z);
if (buffer == null) {
if (buffer == EMPTY_BUFFER) {
buffer = ByteBuffer.allocateDirect(FRAMES_PER_OUTPUT_BUFFER * OUTPUT_FRAME_SIZE)
.order(ByteOrder.nativeOrder());
}
@ -179,10 +181,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
@Override
public synchronized void reset() {
maybeReleaseGvrAudioSurround();
updateOrientation(/* w= */ 1f, /* x= */ 0f, /* y= */ 0f, /* z= */ 0f);
inputEnded = false;
buffer = null;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
}
private void maybeReleaseGvrAudioSurround() {

View File

@ -22,19 +22,17 @@ import java.nio.ByteOrder;
/**
* Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate.
* <p>
* Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active.
* {@link #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()},
* {@link #isEnded()}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and
* {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call
* {@link #reset()} to reset the processor to its unconfigured state.
*
* <p>Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active. {@link
* #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()}, {@link #isEnded()},
* {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and {@link
* #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link #reset()} to
* reset the processor to its unconfigured state and release any resources.
*/
public interface AudioProcessor {
/**
* Exception thrown when a processor can't be configured for a given input audio format.
*/
/** Exception thrown when a processor can't be configured for a given input audio format. */
final class UnhandledFormatException extends Exception {
public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) {
@ -44,33 +42,26 @@ public interface AudioProcessor {
}
/**
* An empty, direct {@link ByteBuffer}.
*/
/** An empty, direct {@link ByteBuffer}. */
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
* Configures the processor to process input audio with the specified format. After calling this
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
* result of the call. If it's active, {@link #getOutputSampleRateHz()},
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return the processor's output
* format.
* Configures the processor to process input audio with the specified format and returns whether
* to {@link #flush()} it. After calling this method, {@link #isActive()} returns whether the
* processor needs to handle buffers; if not, the processor will not accept any buffers until it
* is reconfigured. If the processor is active, {@link #getOutputSampleRateHz()}, {@link
* #getOutputChannelCount()} and {@link #getOutputEncoding()} return its output format.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
* @return {@code true} if the processor must be flushed or the value returned by
* {@link #isActive()} has changed as a result of the call.
* @return Whether to {@link #flush()} the processor.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/
boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException;
/**
* Returns whether the processor is configured and active.
*/
/** Returns whether the processor is configured and active. */
boolean isActive();
/**
@ -130,14 +121,9 @@ public interface AudioProcessor {
*/
boolean isEnded();
/**
* Clears any state in preparation for receiving a new stream of input buffers.
*/
/** Clears any state in preparation for receiving a new stream of input buffers. */
void flush();
/**
* Resets the processor to its unconfigured state.
*/
/** Resets the processor to its unconfigured state. */
void reset();
}

View File

@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer2.audio;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
@ -30,17 +32,15 @@ import java.util.Arrays;
private int channelCount;
private int sampleRateHz;
private int[] pendingOutputChannels;
private @Nullable int[] pendingOutputChannels;
private boolean active;
private int[] outputChannels;
private @Nullable int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
/**
* Creates a new processor that applies a channel mapping.
*/
/** Creates a new processor that applies a channel mapping. */
public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
@ -52,9 +52,11 @@ import java.util.Arrays;
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map.
*
* @param outputChannels The mapping from input to output channel indices, or {@code null} to
* leave the input unchanged.
* @see AudioSink#configure(int, int, int, int, int[], int, int)
*/
public void setChannelMap(int[] outputChannels) {
public void setChannelMap(@Nullable int[] outputChannels) {
pendingOutputChannels = outputChannels;
}
@ -110,6 +112,7 @@ import java.util.Arrays;
@Override
public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(outputChannels != null);
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount);
@ -161,6 +164,7 @@ import java.util.Arrays;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputChannels = null;
pendingOutputChannels = null;
active = false;
}

View File

@ -147,10 +147,9 @@ public final class DefaultAudioSink implements AudioSink {
private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints;
@Nullable private Listener listener;
/**
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}).
*/
private AudioTrack keepSessionIdAudioTrack;
/** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */
@Nullable private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack;
private boolean isInputPcm;
private boolean shouldConvertHighResIntPcmToFloat;
@ -163,12 +162,12 @@ public final class DefaultAudioSink implements AudioSink {
private boolean canApplyPlaybackParameters;
private int bufferSize;
private PlaybackParameters drainingPlaybackParameters;
@Nullable private PlaybackParameters drainingPlaybackParameters;
private PlaybackParameters playbackParameters;
private long playbackParametersOffsetUs;
private long playbackParametersPositionUs;
private ByteBuffer avSyncHeader;
@Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync;
private int pcmFrameSize;
@ -184,8 +183,8 @@ public final class DefaultAudioSink implements AudioSink {
private AudioProcessor[] audioProcessors;
private ByteBuffer[] outputBuffers;
private ByteBuffer inputBuffer;
private ByteBuffer outputBuffer;
@Nullable private ByteBuffer inputBuffer;
@Nullable private ByteBuffer outputBuffer;
private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex;
@ -408,7 +407,7 @@ public final class DefaultAudioSink implements AudioSink {
}
}
private void resetAudioProcessors() {
private void setupAudioProcessors() {
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) {
if (audioProcessor.isActive()) {
@ -420,7 +419,11 @@ public final class DefaultAudioSink implements AudioSink {
int count = newAudioProcessors.size();
audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) {
flushAudioProcessors();
}
private void flushAudioProcessors() {
for (int i = 0; i < audioProcessors.length; i++) {
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
@ -436,13 +439,6 @@ public final class DefaultAudioSink implements AudioSink {
releasingConditionVariable.block();
audioTrack = initializeAudioTrack();
// The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters);
// Flush and reset active audio processors.
resetAudioProcessors();
int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) {
@ -464,6 +460,10 @@ public final class DefaultAudioSink implements AudioSink {
}
}
// The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters);
setupAudioProcessors();
audioTrackPositionTracker.setAudioTrack(
audioTrack, outputEncoding, outputPcmFrameSize, bufferSize);
setVolumeInternal();
@ -533,7 +533,7 @@ public final class DefaultAudioSink implements AudioSink {
drainingPlaybackParameters = null;
// The audio processors have drained, so flush them. This will cause any active speed
// adjustment audio processor to start producing audio with the new parameters.
resetAudioProcessors();
setupAudioProcessors();
}
if (startMediaTimeState == START_NOT_SET) {
@ -849,11 +849,7 @@ public final class DefaultAudioSink implements AudioSink {
playbackParametersPositionUs = 0;
inputBuffer = null;
outputBuffer = null;
for (int i = 0; i < audioProcessors.length; i++) {
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
flushAudioProcessors();
handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null;

View File

@ -17,7 +17,6 @@ package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
@ -86,8 +85,6 @@ import java.nio.ByteOrder;
@Override
public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(isActive());
boolean isInput32Bit = sourceEncoding == C.ENCODING_PCM_32BIT;
int position = inputBuffer.position();
int limit = inputBuffer.limit();
@ -150,10 +147,10 @@ import java.nio.ByteOrder;
@Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
sourceEncoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
}
/**

View File

@ -28,15 +28,12 @@ import java.nio.ByteOrder;
private int sampleRateHz;
private int channelCount;
@C.PcmEncoding
private int encoding;
private @C.PcmEncoding int encoding;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
/**
* Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
/** Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}. */
public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
@ -59,9 +56,6 @@ import java.nio.ByteOrder;
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
if (encoding == C.ENCODING_PCM_16BIT) {
buffer = EMPTY_BUFFER;
}
return true;
}
@ -139,6 +133,7 @@ import java.nio.ByteOrder;
}
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
@ -177,10 +172,10 @@ import java.nio.ByteOrder;
@Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
}
}

View File

@ -72,7 +72,6 @@ import java.nio.ByteOrder;
private int bytesPerFrame;
private boolean enabled;
private boolean isActive;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
@ -103,11 +102,13 @@ import java.nio.ByteOrder;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
maybeSilenceBuffer = new byte[0];
paddingBuffer = new byte[0];
}
/**
* Sets whether to skip silence in the input. After calling this method, call {@link
* #configure(int, int, int)} to apply the new setting.
* Sets whether to skip silence in the input. The new setting will take effect after calling
* {@link #flush()}.
*
* @param enabled Whether to skip silence in the input.
*/
@ -131,33 +132,18 @@ import java.nio.ByteOrder;
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
boolean wasActive = isActive;
isActive = enabled;
if (!isActive) {
return wasActive;
}
if (wasActive && this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
bytesPerFrame = channelCount * 2;
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer == null || maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
}
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
paddingBuffer = new byte[paddingSize];
state = STATE_NOISY;
return true;
}
@Override
public boolean isActive() {
return isActive;
return enabled;
}
@Override
@ -221,6 +207,17 @@ import java.nio.ByteOrder;
@Override
public void flush() {
if (isActive()) {
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
}
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
if (paddingBuffer.length != paddingSize) {
paddingBuffer = new byte[paddingSize];
}
}
state = STATE_NOISY;
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
skippedFrames = 0;
@ -230,11 +227,14 @@ import java.nio.ByteOrder;
@Override
public void reset() {
enabled = false;
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
maybeSilenceBuffer = null;
paddingSize = 0;
maybeSilenceBuffer = new byte[0];
paddingBuffer = new byte[0];
}
// Internal methods.

View File

@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer2.audio;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
@ -64,7 +66,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
private int channelCount;
private int sampleRateHz;
private Sonic sonic;
private @Nullable Sonic sonic;
private float speed;
private float pitch;
private int outputSampleRateHz;
@ -185,6 +187,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(sonic != null);
if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
int inputSize = inputBuffer.remaining();
@ -210,6 +213,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void queueEndOfStream() {
Assertions.checkState(sonic != null);
sonic.queueEndOfStream();
inputEnded = true;
}
@ -228,7 +232,8 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void flush() {
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
sonic =
isActive() ? new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz) : null;
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
@ -237,17 +242,19 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void reset() {
sonic = null;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
speed = 1f;
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
sonic = null;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
}

View File

@ -22,9 +22,7 @@ import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Audio processor for trimming samples from the start/end of data.
*/
/** Audio processor for trimming samples from the start/end of data. */
/* package */ final class TrimmingAudioProcessor implements AudioProcessor {
private boolean isActive;
@ -40,13 +38,13 @@ import java.nio.ByteOrder;
private int endBufferSize;
private boolean inputEnded;
/**
* Creates a new audio processor for trimming samples from the start/end of data.
*/
/** Creates a new audio processor for trimming samples from the start/end of data. */
public TrimmingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
endBuffer = new byte[0];
}
/**
@ -182,7 +180,7 @@ import java.nio.ByteOrder;
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
endBuffer = null;
endBuffer = new byte[0];
}
}

View File

@ -68,12 +68,10 @@ public final class SilenceSkippingAudioProcessorTest {
silenceSkippingAudioProcessor.setEnabled(false);
// When configuring it.
boolean reconfigured =
silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
// It's not active.
assertThat(reconfigured).isFalse();
assertThat(silenceSkippingAudioProcessor.isActive()).isFalse();
}
@ -81,12 +79,10 @@ public final class SilenceSkippingAudioProcessorTest {
public void testDefaultProcessor_isNotEnabled() throws Exception {
// Given a processor in its default state.
// When reconfigured.
boolean reconfigured =
silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
// It's not active.
assertThat(reconfigured).isFalse();
assertThat(silenceSkippingAudioProcessor.isActive()).isFalse();
}
@ -97,7 +93,9 @@ public final class SilenceSkippingAudioProcessorTest {
boolean reconfigured =
silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
assertThat(reconfigured).isTrue();
if (reconfigured) {
silenceSkippingAudioProcessor.flush();
}
// When reconfiguring it with a different sample rate.
reconfigured =
@ -305,6 +303,7 @@ public final class SilenceSkippingAudioProcessorTest {
InputBufferProvider inputBufferProvider,
int inputBufferSize)
throws UnhandledFormatException {
processor.flush();
long totalOutputFrames = 0;
while (inputBufferProvider.hasRemaining()) {
ByteBuffer inputBuffer = inputBufferProvider.getNextInputBuffer(inputBufferSize);