Rename BufferProcessor to AudioProcessor.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=148763781
This commit is contained in:
andrewlewis 2017-02-28 06:46:51 -08:00 committed by Oliver Woodman
parent 91639b26cd
commit d58008eeb7
12 changed files with 120 additions and 128 deletions

View File

@ -19,8 +19,8 @@ import android.os.Handler;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.util.MimeTypes;
@ -43,12 +43,11 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers
* before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public FfmpegAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) {
super(eventHandler, eventListener, bufferProcessors);
AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, audioProcessors);
}
@Override

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.ext.flac;
import android.os.Handler;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.util.MimeTypes;
@ -38,12 +38,11 @@ public class LibflacAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers
* before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public LibflacAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) {
super(eventHandler, eventListener, bufferProcessors);
AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, audioProcessors);
}
@Override

View File

@ -3,17 +3,16 @@
## Description ##
The GVR extension wraps the [Google VR SDK for Android][]. It provides a
GvrBufferProcessor, which uses [GvrAudioSurround][] to provide binaural
rendering of surround sound and ambisonic soundfields.
GvrAudioProcessor, which uses [GvrAudioSurround][] to provide binaural rendering
of surround sound and ambisonic soundfields.
## Instructions ##
If using SimpleExoPlayer, override SimpleExoPlayer.buildBufferProcessors to
return a GvrBufferProcessor.
If using SimpleExoPlayer, override SimpleExoPlayer.buildAudioProcessors to
return a GvrAudioProcessor.
If constructing renderers directly, pass a GvrBufferProcessor to
If constructing renderers directly, pass a GvrAudioProcessor to
MediaCodecAudioRenderer's constructor.
[Google VR SDK for Android]: https://developers.google.com/vr/android/
[GvrAudioSurround]: https://developers.google.com/vr/android/reference/com/google/vr/sdk/audio/GvrAudioSurround

View File

@ -17,16 +17,16 @@ package com.google.android.exoplayer2.ext.gvr;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.vr.sdk.audio.GvrAudioSurround;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Buffer processor that uses {@code GvrAudioSurround} to provide binaural rendering of surround
* sound and ambisonic soundfields.
* An {@link AudioProcessor} that uses {@code GvrAudioSurround} to provide binaural rendering of
* surround sound and ambisonic soundfields.
*/
public final class GvrBufferProcessor implements BufferProcessor {
public final class GvrAudioProcessor implements AudioProcessor {
private static final int FRAMES_PER_OUTPUT_BUFFER = 1024;
private static final int OUTPUT_CHANNEL_COUNT = 2;
@ -43,7 +43,7 @@ public final class GvrBufferProcessor implements BufferProcessor {
private float y;
private float z;
public GvrBufferProcessor() {
public GvrAudioProcessor() {
// Use the identity for the initial orientation.
w = 1f;
sampleRateHz = Format.NO_VALUE;

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.ext.opus;
import android.os.Handler;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
@ -40,26 +40,24 @@ public final class LibopusAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers
* before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) {
super(eventHandler, eventListener, bufferProcessors);
AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, audioProcessors);
}
/**
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio
* buffers before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys,
BufferProcessor... bufferProcessors) {
AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, null, drmSessionManager, playClearSamplesWithoutKeys,
bufferProcessors);
audioProcessors);
}
@Override

View File

@ -28,8 +28,8 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import com.google.android.exoplayer2.audio.AudioCapabilities;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.drm.DrmSessionManager;
@ -625,7 +625,7 @@ public class SimpleExoPlayer implements ExoPlayer {
buildVideoRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
componentListener, allowedVideoJoiningTimeMs, out);
buildAudioRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
componentListener, buildBufferProcessors(), out);
componentListener, buildAudioProcessors(), out);
buildTextRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
buildMetadataRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
buildMiscellaneousRenderers(context, mainHandler, extensionRendererMode, out);
@ -685,16 +685,16 @@ public class SimpleExoPlayer implements ExoPlayer {
* not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode.
* @param eventListener An event listener.
* @param bufferProcessors An array of {@link BufferProcessor}s which will process PCM audio
* buffers before they are output. May be empty.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers
* before output. May be empty.
* @param out An array to which the built renderers should be appended.
*/
protected void buildAudioRenderers(Context context, Handler mainHandler,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode, AudioRendererEventListener eventListener,
BufferProcessor[] bufferProcessors, ArrayList<Renderer> out) {
AudioProcessor[] audioProcessors, ArrayList<Renderer> out) {
out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
mainHandler, eventListener, AudioCapabilities.getCapabilities(context), bufferProcessors));
mainHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@ -708,9 +708,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class);
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors);
audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -723,9 +723,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class);
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors);
audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -738,9 +738,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class);
AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors);
audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -794,11 +794,10 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
* Builds an array of {@link BufferProcessor}s which will process PCM audio buffers before they
* are output.
* Builds an array of {@link AudioProcessor}s that will process PCM audio before output.
*/
protected BufferProcessor[] buildBufferProcessors() {
return new BufferProcessor[0];
protected AudioProcessor[] buildAudioProcessors() {
return new AudioProcessor[0];
}
// Internal methods.

View File

@ -20,12 +20,12 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Interface for processors of audio buffers.
* Interface for audio processors.
*/
public interface BufferProcessor {
public interface AudioProcessor {
/**
* Exception thrown when a processor can't be configured for a given input format.
* Exception thrown when a processor can't be configured for a given input audio format.
*/
final class UnhandledFormatException extends Exception {
@ -42,7 +42,7 @@ public interface BufferProcessor {
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
* Configures the processor to process input buffers with the specified format. After calling this
* Configures the processor to process input audio with the specified format. After calling this
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
@ -111,7 +111,7 @@ public interface BufferProcessor {
boolean isEnded();
/**
* Clears any state in preparation for receiving a new stream of buffers.
* Clears any state in preparation for receiving a new stream of input buffers.
*/
void flush();

View File

@ -270,8 +270,8 @@ public final class AudioTrack {
public static boolean failOnSpuriousAudioTimestamp = false;
private final AudioCapabilities audioCapabilities;
private final ChannelMappingBufferProcessor channelMappingBufferProcessor;
private final BufferProcessor[] availableBufferProcessors;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final AudioProcessor[] availableAudioProcessors;
private final Listener listener;
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
@ -319,13 +319,13 @@ public final class AudioTrack {
private long latencyUs;
private float volume;
private BufferProcessor[] bufferProcessors;
private AudioProcessor[] audioProcessors;
private ByteBuffer[] outputBuffers;
private ByteBuffer inputBuffer;
private ByteBuffer outputBuffer;
private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset;
private int drainingBufferProcessorIndex;
private int drainingAudioProcessorIndex;
private boolean handledEndOfStream;
private boolean playing;
@ -337,18 +337,18 @@ public final class AudioTrack {
/**
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param bufferProcessors An array of {@link BufferProcessor}s which will process PCM audio
* buffers before they are output. May be empty.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param listener Listener for audio track events.
*/
public AudioTrack(AudioCapabilities audioCapabilities, BufferProcessor[] bufferProcessors,
public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
Listener listener) {
this.audioCapabilities = audioCapabilities;
channelMappingBufferProcessor = new ChannelMappingBufferProcessor();
availableBufferProcessors = new BufferProcessor[bufferProcessors.length + 2];
availableBufferProcessors[0] = new ResamplingBufferProcessor();
availableBufferProcessors[1] = channelMappingBufferProcessor;
System.arraycopy(bufferProcessors, 0, availableBufferProcessors, 2, bufferProcessors.length);
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
availableAudioProcessors = new AudioProcessor[audioProcessors.length + 2];
availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
this.listener = listener;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
@ -371,8 +371,8 @@ public final class AudioTrack {
startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
drainingBufferProcessorIndex = C.INDEX_UNSET;
this.bufferProcessors = new BufferProcessor[0];
drainingAudioProcessorIndex = C.INDEX_UNSET;
this.audioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
}
@ -482,32 +482,32 @@ public final class AudioTrack {
if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
// Reconfigure the buffer processors.
channelMappingBufferProcessor.setChannelMap(outputChannels);
ArrayList<BufferProcessor> newBufferProcessors = new ArrayList<>();
for (BufferProcessor bufferProcessor : availableBufferProcessors) {
// Reconfigure the audio processors.
channelMappingAudioProcessor.setChannelMap(outputChannels);
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
flush |= bufferProcessor.configure(sampleRate, channelCount, encoding);
} catch (BufferProcessor.UnhandledFormatException e) {
flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
} catch (AudioProcessor.UnhandledFormatException e) {
throw new ConfigurationException(e);
}
if (bufferProcessor.isActive()) {
newBufferProcessors.add(bufferProcessor);
channelCount = bufferProcessor.getOutputChannelCount();
encoding = bufferProcessor.getOutputEncoding();
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
channelCount = audioProcessor.getOutputChannelCount();
encoding = audioProcessor.getOutputEncoding();
} else {
bufferProcessor.flush();
audioProcessor.flush();
}
}
if (flush) {
int count = newBufferProcessors.size();
bufferProcessors = newBufferProcessors.toArray(new BufferProcessor[count]);
int count = newAudioProcessors.size();
audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i];
bufferProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput();
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
}
}
@ -787,20 +787,20 @@ public final class AudioTrack {
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = bufferProcessors.length;
int count = audioProcessors.length;
int index = count;
while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1]
: (inputBuffer != null ? inputBuffer : BufferProcessor.EMPTY_BUFFER);
: (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs);
} else {
BufferProcessor bufferProcessor = bufferProcessors[index];
bufferProcessor.queueInput(input);
ByteBuffer output = bufferProcessor.getOutput();
AudioProcessor audioProcessor = audioProcessors[index];
audioProcessor.queueInput(input);
ByteBuffer output = audioProcessor.getOutput();
outputBuffers[index] = output;
if (output.hasRemaining()) {
// Handle the output as input to the next buffer processor or the AudioTrack.
// Handle the output as input to the next audio processor or the AudioTrack.
index++;
continue;
}
@ -889,23 +889,23 @@ public final class AudioTrack {
return;
}
// Drain the buffer processors.
boolean bufferProcessorNeedsEndOfStream = false;
if (drainingBufferProcessorIndex == C.INDEX_UNSET) {
drainingBufferProcessorIndex = passthrough ? bufferProcessors.length : 0;
bufferProcessorNeedsEndOfStream = true;
// Drain the audio processors.
boolean audioProcessorNeedsEndOfStream = false;
if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0;
audioProcessorNeedsEndOfStream = true;
}
while (drainingBufferProcessorIndex < bufferProcessors.length) {
BufferProcessor bufferProcessor = bufferProcessors[drainingBufferProcessorIndex];
if (bufferProcessorNeedsEndOfStream) {
bufferProcessor.queueEndOfStream();
while (drainingAudioProcessorIndex < audioProcessors.length) {
AudioProcessor audioProcessor = audioProcessors[drainingAudioProcessorIndex];
if (audioProcessorNeedsEndOfStream) {
audioProcessor.queueEndOfStream();
}
processBuffers(C.TIME_UNSET);
if (!bufferProcessor.isEnded()) {
if (!audioProcessor.isEnded()) {
return;
}
bufferProcessorNeedsEndOfStream = true;
drainingBufferProcessorIndex++;
audioProcessorNeedsEndOfStream = true;
drainingAudioProcessorIndex++;
}
// Finish writing any remaining output to the track.
@ -989,8 +989,8 @@ public final class AudioTrack {
* Enables tunneling. The audio track is reset if tunneling was previously disabled or if the
* audio session id has changed. Enabling tunneling requires platform API version 21 onwards.
* <p>
* If this instance has {@link BufferProcessor}s and tunneling is enabled, care must be taken that
* buffer processors do not output buffers with a different duration than their input, and buffer
* If this instance has {@link AudioProcessor}s and tunneling is enabled, care must be taken that
* audio processors do not output buffers with a different duration than their input, and buffer
* processors must produce output corresponding to their last input immediately after that input
* is queued.
*
@ -1067,13 +1067,13 @@ public final class AudioTrack {
framesPerEncodedSample = 0;
inputBuffer = null;
outputBuffer = null;
for (int i = 0; i < bufferProcessors.length; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i];
bufferProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput();
for (int i = 0; i < audioProcessors.length; i++) {
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
handledEndOfStream = false;
drainingBufferProcessorIndex = C.INDEX_UNSET;
drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null;
bytesUntilNextAvSync = 0;
startMediaTimeState = START_NOT_SET;
@ -1108,8 +1108,8 @@ public final class AudioTrack {
public void release() {
reset();
releaseKeepSessionIdAudioTrack();
for (BufferProcessor bufferProcessor : availableBufferProcessors) {
bufferProcessor.release();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
audioProcessor.release();
}
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;

View File

@ -22,10 +22,10 @@ import java.nio.ByteOrder;
import java.util.Arrays;
/**
* Buffer processor that applies a mapping from input channels onto specified output channels. This
* can be used to reorder, duplicate or discard channels.
* An {@link AudioProcessor} that applies a mapping from input channels onto specified output
* channels. This can be used to reorder, duplicate or discard channels.
*/
/* package */ final class ChannelMappingBufferProcessor implements BufferProcessor {
/* package */ final class ChannelMappingAudioProcessor implements AudioProcessor {
private int channelCount;
private int sampleRateHz;
@ -40,7 +40,7 @@ import java.util.Arrays;
/**
* Creates a new processor that applies a channel mapping.
*/
public ChannelMappingBufferProcessor() {
public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
}

View File

@ -123,16 +123,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers
* before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* output.
*/
public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
BufferProcessor... bufferProcessors) {
AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
audioTrack = new AudioTrack(audioCapabilities, bufferProcessors, new AudioTrackListener());
audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
}

View File

@ -21,9 +21,9 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* A {@link BufferProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
* An {@link AudioProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
/* package */ final class ResamplingBufferProcessor implements BufferProcessor {
/* package */ final class ResamplingAudioProcessor implements AudioProcessor {
private int sampleRateHz;
private int channelCount;
@ -34,9 +34,9 @@ import java.nio.ByteOrder;
private boolean inputEnded;
/**
* Creates a new buffer processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
* Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
public ResamplingBufferProcessor() {
public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID;

View File

@ -102,12 +102,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers
* before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, BufferProcessor... bufferProcessors) {
this(eventHandler, eventListener, null, null, false, bufferProcessors);
AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
this(eventHandler, eventListener, null, null, false, audioProcessors);
}
/**
@ -135,18 +134,17 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio
* buffers before they are output.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys,
BufferProcessor... bufferProcessors) {
AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO);
this.drmSessionManager = drmSessionManager;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioTrack = new AudioTrack(audioCapabilities, bufferProcessors, new AudioTrackListener());
audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
formatHolder = new FormatHolder();
flagsOnlyBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
decoderReinitializationState = REINITIALIZATION_STATE_NONE;