Clean up AudioProcessor implementations

- Ensure that no memory is used by audio processors that are always inactive, by
  only allocating in flush() if active. If data was already allocated but a
  processor becomes inactive we assume that the allocation may be needed in
  future so do not remove it (e.g., in the case of ResamplingAudioProcessor).
- Make SilenceSkippingAudioProcessor set up its buffers in flush(), and clarify
  that it is always necessary to call flush() if configure() returns true.
- Make reset() reset all state for all processors.
- Use @Nullable state or empty arrays for inactive audio processor buffers.
- Miscellaneous style/consistency cleanup.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=190895783
This commit is contained in:
andrewlewis 2018-03-29 01:30:11 -07:00 committed by Oliver Woodman
parent acca4f238b
commit 78ff4af6a7
11 changed files with 112 additions and 126 deletions

View File

@ -26,6 +26,7 @@ android {
dependencies { dependencies {
implementation project(modulePrefix + 'library-core') implementation project(modulePrefix + 'library-core')
implementation 'com.android.support:support-annotations:' + supportLibraryVersion
implementation 'com.google.vr:sdk-audio:1.80.0' implementation 'com.google.vr:sdk-audio:1.80.0'
} }

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.ext.gvr; package com.google.android.exoplayer2.ext.gvr;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
@ -39,7 +40,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
private int sampleRateHz; private int sampleRateHz;
private int channelCount; private int channelCount;
private GvrAudioSurround gvrAudioSurround; @Nullable private GvrAudioSurround gvrAudioSurround;
private ByteBuffer buffer; private ByteBuffer buffer;
private boolean inputEnded; private boolean inputEnded;
@ -48,14 +49,13 @@ public final class GvrAudioProcessor implements AudioProcessor {
private float y; private float y;
private float z; private float z;
/** /** Creates a new GVR audio processor. */
* Creates a new GVR audio processor.
*/
public GvrAudioProcessor() { public GvrAudioProcessor() {
// Use the identity for the initial orientation. // Use the identity for the initial orientation.
w = 1f; w = 1f;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
} }
/** /**
@ -77,9 +77,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
} }
} }
@SuppressWarnings("ReferenceEquality")
@Override @Override
public synchronized boolean configure(int sampleRateHz, int channelCount, public synchronized boolean configure(
@C.Encoding int encoding) throws UnhandledFormatException { int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) { if (encoding != C.ENCODING_PCM_16BIT) {
maybeReleaseGvrAudioSurround(); maybeReleaseGvrAudioSurround();
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
@ -116,7 +118,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
gvrAudioSurround = new GvrAudioSurround(surroundFormat, sampleRateHz, channelCount, gvrAudioSurround = new GvrAudioSurround(surroundFormat, sampleRateHz, channelCount,
FRAMES_PER_OUTPUT_BUFFER); FRAMES_PER_OUTPUT_BUFFER);
gvrAudioSurround.updateNativeOrientation(w, x, y, z); gvrAudioSurround.updateNativeOrientation(w, x, y, z);
if (buffer == null) { if (buffer == EMPTY_BUFFER) {
buffer = ByteBuffer.allocateDirect(FRAMES_PER_OUTPUT_BUFFER * OUTPUT_FRAME_SIZE) buffer = ByteBuffer.allocateDirect(FRAMES_PER_OUTPUT_BUFFER * OUTPUT_FRAME_SIZE)
.order(ByteOrder.nativeOrder()); .order(ByteOrder.nativeOrder());
} }
@ -179,10 +181,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
@Override @Override
public synchronized void reset() { public synchronized void reset() {
maybeReleaseGvrAudioSurround(); maybeReleaseGvrAudioSurround();
updateOrientation(/* w= */ 1f, /* x= */ 0f, /* y= */ 0f, /* z= */ 0f);
inputEnded = false; inputEnded = false;
buffer = null;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
} }
private void maybeReleaseGvrAudioSurround() { private void maybeReleaseGvrAudioSurround() {

View File

@ -22,19 +22,17 @@ import java.nio.ByteOrder;
/** /**
* Interface for audio processors, which take audio data as input and transform it, potentially * Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate. * modifying its channel count, encoding and/or sample rate.
* <p> *
* Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then * <p>Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active. * call {@link #isActive()} to determine whether the processor is active. {@link
* {@link #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()}, * #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()}, {@link #isEnded()},
* {@link #isEnded()}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and * {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and {@link
* {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call * #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link #reset()} to
* {@link #reset()} to reset the processor to its unconfigured state. * reset the processor to its unconfigured state and release any resources.
*/ */
public interface AudioProcessor { public interface AudioProcessor {
/** /** Exception thrown when a processor can't be configured for a given input audio format. */
* Exception thrown when a processor can't be configured for a given input audio format.
*/
final class UnhandledFormatException extends Exception { final class UnhandledFormatException extends Exception {
public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) { public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) {
@ -44,33 +42,26 @@ public interface AudioProcessor {
} }
/** /** An empty, direct {@link ByteBuffer}. */
* An empty, direct {@link ByteBuffer}.
*/
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder()); ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/** /**
* Configures the processor to process input audio with the specified format. After calling this * Configures the processor to process input audio with the specified format and returns whether
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the * to {@link #flush()} it. After calling this method, {@link #isActive()} returns whether the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the * processor needs to handle buffers; if not, the processor will not accept any buffers until it
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a * is reconfigured. If the processor is active, {@link #getOutputSampleRateHz()}, {@link
* result of the call. If it's active, {@link #getOutputSampleRateHz()}, * #getOutputChannelCount()} and {@link #getOutputEncoding()} return its output format.
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return the processor's output
* format.
* *
* @param sampleRateHz The sample rate of input audio in Hz. * @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio. * @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio. * @param encoding The encoding of input audio.
* @return {@code true} if the processor must be flushed or the value returned by * @return Whether to {@link #flush()} the processor.
* {@link #isActive()} has changed as a result of the call.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input. * @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/ */
boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding) boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException; throws UnhandledFormatException;
/** /** Returns whether the processor is configured and active. */
* Returns whether the processor is configured and active.
*/
boolean isActive(); boolean isActive();
/** /**
@ -130,14 +121,9 @@ public interface AudioProcessor {
*/ */
boolean isEnded(); boolean isEnded();
/** /** Clears any state in preparation for receiving a new stream of input buffers. */
* Clears any state in preparation for receiving a new stream of input buffers.
*/
void flush(); void flush();
/** /** Resets the processor to its unconfigured state. */
* Resets the processor to its unconfigured state.
*/
void reset(); void reset();
} }

View File

@ -15,9 +15,11 @@
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding; import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.util.Arrays; import java.util.Arrays;
@ -30,17 +32,15 @@ import java.util.Arrays;
private int channelCount; private int channelCount;
private int sampleRateHz; private int sampleRateHz;
private int[] pendingOutputChannels; private @Nullable int[] pendingOutputChannels;
private boolean active; private boolean active;
private int[] outputChannels; private @Nullable int[] outputChannels;
private ByteBuffer buffer; private ByteBuffer buffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private boolean inputEnded; private boolean inputEnded;
/** /** Creates a new processor that applies a channel mapping. */
* Creates a new processor that applies a channel mapping.
*/
public ChannelMappingAudioProcessor() { public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
@ -52,9 +52,11 @@ import java.util.Arrays;
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)} * Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map. * to start using the new channel map.
* *
* @param outputChannels The mapping from input to output channel indices, or {@code null} to
* leave the input unchanged.
* @see AudioSink#configure(int, int, int, int, int[], int, int) * @see AudioSink#configure(int, int, int, int, int[], int, int)
*/ */
public void setChannelMap(int[] outputChannels) { public void setChannelMap(@Nullable int[] outputChannels) {
pendingOutputChannels = outputChannels; pendingOutputChannels = outputChannels;
} }
@ -110,6 +112,7 @@ import java.util.Arrays;
@Override @Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(outputChannels != null);
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount); int frameCount = (limit - position) / (2 * channelCount);
@ -161,6 +164,7 @@ import java.util.Arrays;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
outputChannels = null; outputChannels = null;
pendingOutputChannels = null;
active = false; active = false;
} }

View File

@ -147,10 +147,9 @@ public final class DefaultAudioSink implements AudioSink {
private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints; private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints;
@Nullable private Listener listener; @Nullable private Listener listener;
/** /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). @Nullable private AudioTrack keepSessionIdAudioTrack;
*/
private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack; private AudioTrack audioTrack;
private boolean isInputPcm; private boolean isInputPcm;
private boolean shouldConvertHighResIntPcmToFloat; private boolean shouldConvertHighResIntPcmToFloat;
@ -163,12 +162,12 @@ public final class DefaultAudioSink implements AudioSink {
private boolean canApplyPlaybackParameters; private boolean canApplyPlaybackParameters;
private int bufferSize; private int bufferSize;
private PlaybackParameters drainingPlaybackParameters; @Nullable private PlaybackParameters drainingPlaybackParameters;
private PlaybackParameters playbackParameters; private PlaybackParameters playbackParameters;
private long playbackParametersOffsetUs; private long playbackParametersOffsetUs;
private long playbackParametersPositionUs; private long playbackParametersPositionUs;
private ByteBuffer avSyncHeader; @Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync; private int bytesUntilNextAvSync;
private int pcmFrameSize; private int pcmFrameSize;
@ -184,8 +183,8 @@ public final class DefaultAudioSink implements AudioSink {
private AudioProcessor[] audioProcessors; private AudioProcessor[] audioProcessors;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
private ByteBuffer inputBuffer; @Nullable private ByteBuffer inputBuffer;
private ByteBuffer outputBuffer; @Nullable private ByteBuffer outputBuffer;
private byte[] preV21OutputBuffer; private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset; private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex; private int drainingAudioProcessorIndex;
@ -408,7 +407,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
private void resetAudioProcessors() { private void setupAudioProcessors() {
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>(); ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) { for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) {
if (audioProcessor.isActive()) { if (audioProcessor.isActive()) {
@ -420,7 +419,11 @@ public final class DefaultAudioSink implements AudioSink {
int count = newAudioProcessors.size(); int count = newAudioProcessors.size();
audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]); audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count]; outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) { flushAudioProcessors();
}
private void flushAudioProcessors() {
for (int i = 0; i < audioProcessors.length; i++) {
AudioProcessor audioProcessor = audioProcessors[i]; AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush(); audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput(); outputBuffers[i] = audioProcessor.getOutput();
@ -436,13 +439,6 @@ public final class DefaultAudioSink implements AudioSink {
releasingConditionVariable.block(); releasingConditionVariable.block();
audioTrack = initializeAudioTrack(); audioTrack = initializeAudioTrack();
// The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters);
// Flush and reset active audio processors.
resetAudioProcessors();
int audioSessionId = audioTrack.getAudioSessionId(); int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) { if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) { if (Util.SDK_INT < 21) {
@ -464,6 +460,10 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
// The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters);
setupAudioProcessors();
audioTrackPositionTracker.setAudioTrack( audioTrackPositionTracker.setAudioTrack(
audioTrack, outputEncoding, outputPcmFrameSize, bufferSize); audioTrack, outputEncoding, outputPcmFrameSize, bufferSize);
setVolumeInternal(); setVolumeInternal();
@ -533,7 +533,7 @@ public final class DefaultAudioSink implements AudioSink {
drainingPlaybackParameters = null; drainingPlaybackParameters = null;
// The audio processors have drained, so flush them. This will cause any active speed // The audio processors have drained, so flush them. This will cause any active speed
// adjustment audio processor to start producing audio with the new parameters. // adjustment audio processor to start producing audio with the new parameters.
resetAudioProcessors(); setupAudioProcessors();
} }
if (startMediaTimeState == START_NOT_SET) { if (startMediaTimeState == START_NOT_SET) {
@ -849,11 +849,7 @@ public final class DefaultAudioSink implements AudioSink {
playbackParametersPositionUs = 0; playbackParametersPositionUs = 0;
inputBuffer = null; inputBuffer = null;
outputBuffer = null; outputBuffer = null;
for (int i = 0; i < audioProcessors.length; i++) { flushAudioProcessors();
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
handledEndOfStream = false; handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET; drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null; avSyncHeader = null;

View File

@ -17,7 +17,6 @@ package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -86,8 +85,6 @@ import java.nio.ByteOrder;
@Override @Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(isActive());
boolean isInput32Bit = sourceEncoding == C.ENCODING_PCM_32BIT; boolean isInput32Bit = sourceEncoding == C.ENCODING_PCM_32BIT;
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
@ -150,10 +147,10 @@ import java.nio.ByteOrder;
@Override @Override
public void reset() { public void reset() {
flush(); flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sourceEncoding = C.ENCODING_INVALID; sourceEncoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
} }
/** /**

View File

@ -28,15 +28,12 @@ import java.nio.ByteOrder;
private int sampleRateHz; private int sampleRateHz;
private int channelCount; private int channelCount;
@C.PcmEncoding private @C.PcmEncoding int encoding;
private int encoding;
private ByteBuffer buffer; private ByteBuffer buffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private boolean inputEnded; private boolean inputEnded;
/** /** Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}. */
* Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
public ResamplingAudioProcessor() { public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
@ -59,9 +56,6 @@ import java.nio.ByteOrder;
this.sampleRateHz = sampleRateHz; this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount; this.channelCount = channelCount;
this.encoding = encoding; this.encoding = encoding;
if (encoding == C.ENCODING_PCM_16BIT) {
buffer = EMPTY_BUFFER;
}
return true; return true;
} }
@ -139,6 +133,7 @@ import java.nio.ByteOrder;
} }
break; break;
case C.ENCODING_PCM_16BIT: case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID: case C.ENCODING_INVALID:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:
@ -177,10 +172,10 @@ import java.nio.ByteOrder;
@Override @Override
public void reset() { public void reset() {
flush(); flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID; encoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
} }
} }

View File

@ -72,7 +72,6 @@ import java.nio.ByteOrder;
private int bytesPerFrame; private int bytesPerFrame;
private boolean enabled; private boolean enabled;
private boolean isActive;
private ByteBuffer buffer; private ByteBuffer buffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
@ -103,11 +102,13 @@ import java.nio.ByteOrder;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
maybeSilenceBuffer = new byte[0];
paddingBuffer = new byte[0];
} }
/** /**
* Sets whether to skip silence in the input. After calling this method, call {@link * Sets whether to skip silence in the input. The new setting will take effect after calling
* #configure(int, int, int)} to apply the new setting. * {@link #flush()}.
* *
* @param enabled Whether to skip silence in the input. * @param enabled Whether to skip silence in the input.
*/ */
@ -131,33 +132,18 @@ import java.nio.ByteOrder;
if (encoding != C.ENCODING_PCM_16BIT) { if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
} }
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
boolean wasActive = isActive;
isActive = enabled;
if (!isActive) {
return wasActive;
}
if (wasActive && this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
return false; return false;
} }
this.sampleRateHz = sampleRateHz; this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount; this.channelCount = channelCount;
bytesPerFrame = channelCount * 2; bytesPerFrame = channelCount * 2;
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer == null || maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
}
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
paddingBuffer = new byte[paddingSize];
state = STATE_NOISY;
return true; return true;
} }
@Override @Override
public boolean isActive() { public boolean isActive() {
return isActive; return enabled;
} }
@Override @Override
@ -221,6 +207,17 @@ import java.nio.ByteOrder;
@Override @Override
public void flush() { public void flush() {
if (isActive()) {
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
}
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
if (paddingBuffer.length != paddingSize) {
paddingBuffer = new byte[paddingSize];
}
}
state = STATE_NOISY;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
inputEnded = false; inputEnded = false;
skippedFrames = 0; skippedFrames = 0;
@ -230,11 +227,14 @@ import java.nio.ByteOrder;
@Override @Override
public void reset() { public void reset() {
enabled = false;
flush(); flush();
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
maybeSilenceBuffer = null; paddingSize = 0;
maybeSilenceBuffer = new byte[0];
paddingBuffer = new byte[0];
} }
// Internal methods. // Internal methods.

View File

@ -15,9 +15,11 @@
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding; import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -64,7 +66,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
private int channelCount; private int channelCount;
private int sampleRateHz; private int sampleRateHz;
private Sonic sonic; private @Nullable Sonic sonic;
private float speed; private float speed;
private float pitch; private float pitch;
private int outputSampleRateHz; private int outputSampleRateHz;
@ -185,6 +187,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(sonic != null);
if (inputBuffer.hasRemaining()) { if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer(); ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
int inputSize = inputBuffer.remaining(); int inputSize = inputBuffer.remaining();
@ -210,6 +213,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void queueEndOfStream() { public void queueEndOfStream() {
Assertions.checkState(sonic != null);
sonic.queueEndOfStream(); sonic.queueEndOfStream();
inputEnded = true; inputEnded = true;
} }
@ -228,7 +232,8 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void flush() { public void flush() {
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz); sonic =
isActive() ? new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz) : null;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
inputBytes = 0; inputBytes = 0;
outputBytes = 0; outputBytes = 0;
@ -237,17 +242,19 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void reset() { public void reset() {
sonic = null; speed = 1f;
buffer = EMPTY_BUFFER; pitch = 1f;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE; outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
sonic = null;
inputBytes = 0; inputBytes = 0;
outputBytes = 0; outputBytes = 0;
inputEnded = false; inputEnded = false;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
} }
} }

View File

@ -22,9 +22,7 @@ import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /** Audio processor for trimming samples from the start/end of data. */
* Audio processor for trimming samples from the start/end of data.
*/
/* package */ final class TrimmingAudioProcessor implements AudioProcessor { /* package */ final class TrimmingAudioProcessor implements AudioProcessor {
private boolean isActive; private boolean isActive;
@ -40,13 +38,13 @@ import java.nio.ByteOrder;
private int endBufferSize; private int endBufferSize;
private boolean inputEnded; private boolean inputEnded;
/** /** Creates a new audio processor for trimming samples from the start/end of data. */
* Creates a new audio processor for trimming samples from the start/end of data.
*/
public TrimmingAudioProcessor() { public TrimmingAudioProcessor() {
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
endBuffer = new byte[0];
} }
/** /**
@ -182,7 +180,7 @@ import java.nio.ByteOrder;
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
endBuffer = null; endBuffer = new byte[0];
} }
} }

View File

@ -68,12 +68,10 @@ public final class SilenceSkippingAudioProcessorTest {
silenceSkippingAudioProcessor.setEnabled(false); silenceSkippingAudioProcessor.setEnabled(false);
// When configuring it. // When configuring it.
boolean reconfigured = silenceSkippingAudioProcessor.configure(
silenceSkippingAudioProcessor.configure( TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
// It's not active. // It's not active.
assertThat(reconfigured).isFalse();
assertThat(silenceSkippingAudioProcessor.isActive()).isFalse(); assertThat(silenceSkippingAudioProcessor.isActive()).isFalse();
} }
@ -81,12 +79,10 @@ public final class SilenceSkippingAudioProcessorTest {
public void testDefaultProcessor_isNotEnabled() throws Exception { public void testDefaultProcessor_isNotEnabled() throws Exception {
// Given a processor in its default state. // Given a processor in its default state.
// When reconfigured. // When reconfigured.
boolean reconfigured = silenceSkippingAudioProcessor.configure(
silenceSkippingAudioProcessor.configure( TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
// It's not active. // It's not active.
assertThat(reconfigured).isFalse();
assertThat(silenceSkippingAudioProcessor.isActive()).isFalse(); assertThat(silenceSkippingAudioProcessor.isActive()).isFalse();
} }
@ -97,7 +93,9 @@ public final class SilenceSkippingAudioProcessorTest {
boolean reconfigured = boolean reconfigured =
silenceSkippingAudioProcessor.configure( silenceSkippingAudioProcessor.configure(
TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT); TEST_SIGNAL_SAMPLE_RATE_HZ, TEST_SIGNAL_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
assertThat(reconfigured).isTrue(); if (reconfigured) {
silenceSkippingAudioProcessor.flush();
}
// When reconfiguring it with a different sample rate. // When reconfiguring it with a different sample rate.
reconfigured = reconfigured =
@ -305,6 +303,7 @@ public final class SilenceSkippingAudioProcessorTest {
InputBufferProvider inputBufferProvider, InputBufferProvider inputBufferProvider,
int inputBufferSize) int inputBufferSize)
throws UnhandledFormatException { throws UnhandledFormatException {
processor.flush();
long totalOutputFrames = 0; long totalOutputFrames = 0;
while (inputBufferProvider.hasRemaining()) { while (inputBufferProvider.hasRemaining()) {
ByteBuffer inputBuffer = inputBufferProvider.getNextInputBuffer(inputBufferSize); ByteBuffer inputBuffer = inputBufferProvider.getNextInputBuffer(inputBufferSize);