Rename BufferProcessor to AudioProcessor.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=148763781
This commit is contained in:
andrewlewis 2017-02-28 06:46:51 -08:00 committed by Oliver Woodman
parent 91639b26cd
commit d58008eeb7
12 changed files with 120 additions and 128 deletions

View File

@ -19,8 +19,8 @@ import android.os.Handler;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
@ -43,12 +43,11 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* before they are output.
*/ */
public FfmpegAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, public FfmpegAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, bufferProcessors); super(eventHandler, eventListener, audioProcessors);
} }
@Override @Override

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.ext.flac;
import android.os.Handler; import android.os.Handler;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
@ -38,12 +38,11 @@ public class LibflacAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* before they are output.
*/ */
public LibflacAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, public LibflacAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, bufferProcessors); super(eventHandler, eventListener, audioProcessors);
} }
@Override @Override

View File

@ -3,17 +3,16 @@
## Description ## ## Description ##
The GVR extension wraps the [Google VR SDK for Android][]. It provides a The GVR extension wraps the [Google VR SDK for Android][]. It provides a
GvrBufferProcessor, which uses [GvrAudioSurround][] to provide binaural GvrAudioProcessor, which uses [GvrAudioSurround][] to provide binaural rendering
rendering of surround sound and ambisonic soundfields. of surround sound and ambisonic soundfields.
## Instructions ## ## Instructions ##
If using SimpleExoPlayer, override SimpleExoPlayer.buildBufferProcessors to If using SimpleExoPlayer, override SimpleExoPlayer.buildAudioProcessors to
return a GvrBufferProcessor. return a GvrAudioProcessor.
If constructing renderers directly, pass a GvrBufferProcessor to If constructing renderers directly, pass a GvrAudioProcessor to
MediaCodecAudioRenderer's constructor. MediaCodecAudioRenderer's constructor.
[Google VR SDK for Android]: https://developers.google.com/vr/android/ [Google VR SDK for Android]: https://developers.google.com/vr/android/
[GvrAudioSurround]: https://developers.google.com/vr/android/reference/com/google/vr/sdk/audio/GvrAudioSurround [GvrAudioSurround]: https://developers.google.com/vr/android/reference/com/google/vr/sdk/audio/GvrAudioSurround

View File

@ -17,16 +17,16 @@ package com.google.android.exoplayer2.ext.gvr;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.BufferProcessor; import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.vr.sdk.audio.GvrAudioSurround; import com.google.vr.sdk.audio.GvrAudioSurround;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /**
* Buffer processor that uses {@code GvrAudioSurround} to provide binaural rendering of surround * An {@link AudioProcessor} that uses {@code GvrAudioSurround} to provide binaural rendering of
* sound and ambisonic soundfields. * surround sound and ambisonic soundfields.
*/ */
public final class GvrBufferProcessor implements BufferProcessor { public final class GvrAudioProcessor implements AudioProcessor {
private static final int FRAMES_PER_OUTPUT_BUFFER = 1024; private static final int FRAMES_PER_OUTPUT_BUFFER = 1024;
private static final int OUTPUT_CHANNEL_COUNT = 2; private static final int OUTPUT_CHANNEL_COUNT = 2;
@ -43,7 +43,7 @@ public final class GvrBufferProcessor implements BufferProcessor {
private float y; private float y;
private float z; private float z;
public GvrBufferProcessor() { public GvrAudioProcessor() {
// Use the identity for the initial orientation. // Use the identity for the initial orientation.
w = 1f; w = 1f;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.ext.opus;
import android.os.Handler; import android.os.Handler;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.drm.ExoMediaCrypto;
@ -40,26 +40,24 @@ public final class LibopusAudioRenderer extends SimpleDecoderAudioRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* before they are output.
*/ */
public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, bufferProcessors); super(eventHandler, eventListener, audioProcessors);
} }
/** /**
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* buffers before they are output.
*/ */
public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, public LibopusAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys, DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(eventHandler, eventListener, null, drmSessionManager, playClearSamplesWithoutKeys, super(eventHandler, eventListener, null, drmSessionManager, playClearSamplesWithoutKeys,
bufferProcessors); audioProcessors);
} }
@Override @Override

View File

@ -28,8 +28,8 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import android.view.TextureView; import android.view.TextureView;
import com.google.android.exoplayer2.audio.AudioCapabilities; import com.google.android.exoplayer2.audio.AudioCapabilities;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.BufferProcessor;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer; import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.DrmSessionManager;
@ -625,7 +625,7 @@ public class SimpleExoPlayer implements ExoPlayer {
buildVideoRenderers(context, mainHandler, drmSessionManager, extensionRendererMode, buildVideoRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
componentListener, allowedVideoJoiningTimeMs, out); componentListener, allowedVideoJoiningTimeMs, out);
buildAudioRenderers(context, mainHandler, drmSessionManager, extensionRendererMode, buildAudioRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
componentListener, buildBufferProcessors(), out); componentListener, buildAudioProcessors(), out);
buildTextRenderers(context, mainHandler, extensionRendererMode, componentListener, out); buildTextRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
buildMetadataRenderers(context, mainHandler, extensionRendererMode, componentListener, out); buildMetadataRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
buildMiscellaneousRenderers(context, mainHandler, extensionRendererMode, out); buildMiscellaneousRenderers(context, mainHandler, extensionRendererMode, out);
@ -685,16 +685,16 @@ public class SimpleExoPlayer implements ExoPlayer {
* not be used for DRM protected playbacks. * not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode. * @param extensionRendererMode The extension renderer mode.
* @param eventListener An event listener. * @param eventListener An event listener.
* @param bufferProcessors An array of {@link BufferProcessor}s which will process PCM audio * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers
* buffers before they are output. May be empty. * before output. May be empty.
* @param out An array to which the built renderers should be appended. * @param out An array to which the built renderers should be appended.
*/ */
protected void buildAudioRenderers(Context context, Handler mainHandler, protected void buildAudioRenderers(Context context, Handler mainHandler,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode, AudioRendererEventListener eventListener, @ExtensionRendererMode int extensionRendererMode, AudioRendererEventListener eventListener,
BufferProcessor[] bufferProcessors, ArrayList<Renderer> out) { AudioProcessor[] audioProcessors, ArrayList<Renderer> out) {
out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true, out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
mainHandler, eventListener, AudioCapabilities.getCapabilities(context), bufferProcessors)); mainHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) { if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return; return;
@ -708,9 +708,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz = Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer"); Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class); AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors); audioProcessors);
out.add(extensionRendererIndex++, renderer); out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer."); Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
@ -723,9 +723,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz = Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer"); Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class); AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors); audioProcessors);
out.add(extensionRendererIndex++, renderer); out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer."); Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
@ -738,9 +738,9 @@ public class SimpleExoPlayer implements ExoPlayer {
Class<?> clazz = Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer"); Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, Constructor<?> constructor = clazz.getConstructor(Handler.class,
AudioRendererEventListener.class, BufferProcessor[].class); AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener,
bufferProcessors); audioProcessors);
out.add(extensionRendererIndex++, renderer); out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer."); Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
@ -794,11 +794,10 @@ public class SimpleExoPlayer implements ExoPlayer {
} }
/** /**
* Builds an array of {@link BufferProcessor}s which will process PCM audio buffers before they * Builds an array of {@link AudioProcessor}s that will process PCM audio before output.
* are output.
*/ */
protected BufferProcessor[] buildBufferProcessors() { protected AudioProcessor[] buildAudioProcessors() {
return new BufferProcessor[0]; return new AudioProcessor[0];
} }
// Internal methods. // Internal methods.

View File

@ -20,12 +20,12 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /**
* Interface for processors of audio buffers. * Interface for audio processors.
*/ */
public interface BufferProcessor { public interface AudioProcessor {
/** /**
* Exception thrown when a processor can't be configured for a given input format. * Exception thrown when a processor can't be configured for a given input audio format.
*/ */
final class UnhandledFormatException extends Exception { final class UnhandledFormatException extends Exception {
@ -42,7 +42,7 @@ public interface BufferProcessor {
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder()); ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/** /**
* Configures the processor to process input buffers with the specified format. After calling this * Configures the processor to process input audio with the specified format. After calling this
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the * method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the * processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a * processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
@ -111,7 +111,7 @@ public interface BufferProcessor {
boolean isEnded(); boolean isEnded();
/** /**
* Clears any state in preparation for receiving a new stream of buffers. * Clears any state in preparation for receiving a new stream of input buffers.
*/ */
void flush(); void flush();

View File

@ -270,8 +270,8 @@ public final class AudioTrack {
public static boolean failOnSpuriousAudioTimestamp = false; public static boolean failOnSpuriousAudioTimestamp = false;
private final AudioCapabilities audioCapabilities; private final AudioCapabilities audioCapabilities;
private final ChannelMappingBufferProcessor channelMappingBufferProcessor; private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final BufferProcessor[] availableBufferProcessors; private final AudioProcessor[] availableAudioProcessors;
private final Listener listener; private final Listener listener;
private final ConditionVariable releasingConditionVariable; private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets; private final long[] playheadOffsets;
@ -319,13 +319,13 @@ public final class AudioTrack {
private long latencyUs; private long latencyUs;
private float volume; private float volume;
private BufferProcessor[] bufferProcessors; private AudioProcessor[] audioProcessors;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
private ByteBuffer inputBuffer; private ByteBuffer inputBuffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private byte[] preV21OutputBuffer; private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset; private int preV21OutputBufferOffset;
private int drainingBufferProcessorIndex; private int drainingAudioProcessorIndex;
private boolean handledEndOfStream; private boolean handledEndOfStream;
private boolean playing; private boolean playing;
@ -337,18 +337,18 @@ public final class AudioTrack {
/** /**
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed. * default capabilities (no encoded audio passthrough support) should be assumed.
* @param bufferProcessors An array of {@link BufferProcessor}s which will process PCM audio * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* buffers before they are output. May be empty. * output. May be empty.
* @param listener Listener for audio track events. * @param listener Listener for audio track events.
*/ */
public AudioTrack(AudioCapabilities audioCapabilities, BufferProcessor[] bufferProcessors, public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
Listener listener) { Listener listener) {
this.audioCapabilities = audioCapabilities; this.audioCapabilities = audioCapabilities;
channelMappingBufferProcessor = new ChannelMappingBufferProcessor(); channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
availableBufferProcessors = new BufferProcessor[bufferProcessors.length + 2]; availableAudioProcessors = new AudioProcessor[audioProcessors.length + 2];
availableBufferProcessors[0] = new ResamplingBufferProcessor(); availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableBufferProcessors[1] = channelMappingBufferProcessor; availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(bufferProcessors, 0, availableBufferProcessors, 2, bufferProcessors.length); System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
this.listener = listener; this.listener = listener;
releasingConditionVariable = new ConditionVariable(true); releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) { if (Util.SDK_INT >= 18) {
@ -371,8 +371,8 @@ public final class AudioTrack {
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT; streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET; audioSessionId = C.AUDIO_SESSION_ID_UNSET;
drainingBufferProcessorIndex = C.INDEX_UNSET; drainingAudioProcessorIndex = C.INDEX_UNSET;
this.bufferProcessors = new BufferProcessor[0]; this.audioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0]; outputBuffers = new ByteBuffer[0];
} }
@ -482,32 +482,32 @@ public final class AudioTrack {
if (!passthrough) { if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount); pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
// Reconfigure the buffer processors. // Reconfigure the audio processors.
channelMappingBufferProcessor.setChannelMap(outputChannels); channelMappingAudioProcessor.setChannelMap(outputChannels);
ArrayList<BufferProcessor> newBufferProcessors = new ArrayList<>(); ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (BufferProcessor bufferProcessor : availableBufferProcessors) { for (AudioProcessor audioProcessor : availableAudioProcessors) {
try { try {
flush |= bufferProcessor.configure(sampleRate, channelCount, encoding); flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
} catch (BufferProcessor.UnhandledFormatException e) { } catch (AudioProcessor.UnhandledFormatException e) {
throw new ConfigurationException(e); throw new ConfigurationException(e);
} }
if (bufferProcessor.isActive()) { if (audioProcessor.isActive()) {
newBufferProcessors.add(bufferProcessor); newAudioProcessors.add(audioProcessor);
channelCount = bufferProcessor.getOutputChannelCount(); channelCount = audioProcessor.getOutputChannelCount();
encoding = bufferProcessor.getOutputEncoding(); encoding = audioProcessor.getOutputEncoding();
} else { } else {
bufferProcessor.flush(); audioProcessor.flush();
} }
} }
if (flush) { if (flush) {
int count = newBufferProcessors.size(); int count = newAudioProcessors.size();
bufferProcessors = newBufferProcessors.toArray(new BufferProcessor[count]); audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count]; outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i]; AudioProcessor audioProcessor = audioProcessors[i];
bufferProcessor.flush(); audioProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput(); outputBuffers[i] = audioProcessor.getOutput();
} }
} }
} }
@ -787,20 +787,20 @@ public final class AudioTrack {
} }
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException { private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = bufferProcessors.length; int count = audioProcessors.length;
int index = count; int index = count;
while (index >= 0) { while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1] ByteBuffer input = index > 0 ? outputBuffers[index - 1]
: (inputBuffer != null ? inputBuffer : BufferProcessor.EMPTY_BUFFER); : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
if (index == count) { if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs); writeBuffer(input, avSyncPresentationTimeUs);
} else { } else {
BufferProcessor bufferProcessor = bufferProcessors[index]; AudioProcessor audioProcessor = audioProcessors[index];
bufferProcessor.queueInput(input); audioProcessor.queueInput(input);
ByteBuffer output = bufferProcessor.getOutput(); ByteBuffer output = audioProcessor.getOutput();
outputBuffers[index] = output; outputBuffers[index] = output;
if (output.hasRemaining()) { if (output.hasRemaining()) {
// Handle the output as input to the next buffer processor or the AudioTrack. // Handle the output as input to the next audio processor or the AudioTrack.
index++; index++;
continue; continue;
} }
@ -889,23 +889,23 @@ public final class AudioTrack {
return; return;
} }
// Drain the buffer processors. // Drain the audio processors.
boolean bufferProcessorNeedsEndOfStream = false; boolean audioProcessorNeedsEndOfStream = false;
if (drainingBufferProcessorIndex == C.INDEX_UNSET) { if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingBufferProcessorIndex = passthrough ? bufferProcessors.length : 0; drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0;
bufferProcessorNeedsEndOfStream = true; audioProcessorNeedsEndOfStream = true;
} }
while (drainingBufferProcessorIndex < bufferProcessors.length) { while (drainingAudioProcessorIndex < audioProcessors.length) {
BufferProcessor bufferProcessor = bufferProcessors[drainingBufferProcessorIndex]; AudioProcessor audioProcessor = audioProcessors[drainingAudioProcessorIndex];
if (bufferProcessorNeedsEndOfStream) { if (audioProcessorNeedsEndOfStream) {
bufferProcessor.queueEndOfStream(); audioProcessor.queueEndOfStream();
} }
processBuffers(C.TIME_UNSET); processBuffers(C.TIME_UNSET);
if (!bufferProcessor.isEnded()) { if (!audioProcessor.isEnded()) {
return; return;
} }
bufferProcessorNeedsEndOfStream = true; audioProcessorNeedsEndOfStream = true;
drainingBufferProcessorIndex++; drainingAudioProcessorIndex++;
} }
// Finish writing any remaining output to the track. // Finish writing any remaining output to the track.
@ -989,8 +989,8 @@ public final class AudioTrack {
* Enables tunneling. The audio track is reset if tunneling was previously disabled or if the * Enables tunneling. The audio track is reset if tunneling was previously disabled or if the
* audio session id has changed. Enabling tunneling requires platform API version 21 onwards. * audio session id has changed. Enabling tunneling requires platform API version 21 onwards.
* <p> * <p>
* If this instance has {@link BufferProcessor}s and tunneling is enabled, care must be taken that * If this instance has {@link AudioProcessor}s and tunneling is enabled, care must be taken that
* buffer processors do not output buffers with a different duration than their input, and buffer * audio processors do not output buffers with a different duration than their input, and buffer
* processors must produce output corresponding to their last input immediately after that input * processors must produce output corresponding to their last input immediately after that input
* is queued. * is queued.
* *
@ -1067,13 +1067,13 @@ public final class AudioTrack {
framesPerEncodedSample = 0; framesPerEncodedSample = 0;
inputBuffer = null; inputBuffer = null;
outputBuffer = null; outputBuffer = null;
for (int i = 0; i < bufferProcessors.length; i++) { for (int i = 0; i < audioProcessors.length; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i]; AudioProcessor audioProcessor = audioProcessors[i];
bufferProcessor.flush(); audioProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput(); outputBuffers[i] = audioProcessor.getOutput();
} }
handledEndOfStream = false; handledEndOfStream = false;
drainingBufferProcessorIndex = C.INDEX_UNSET; drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null; avSyncHeader = null;
bytesUntilNextAvSync = 0; bytesUntilNextAvSync = 0;
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
@ -1108,8 +1108,8 @@ public final class AudioTrack {
public void release() { public void release() {
reset(); reset();
releaseKeepSessionIdAudioTrack(); releaseKeepSessionIdAudioTrack();
for (BufferProcessor bufferProcessor : availableBufferProcessors) { for (AudioProcessor audioProcessor : availableAudioProcessors) {
bufferProcessor.release(); audioProcessor.release();
} }
audioSessionId = C.AUDIO_SESSION_ID_UNSET; audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false; playing = false;

View File

@ -22,10 +22,10 @@ import java.nio.ByteOrder;
import java.util.Arrays; import java.util.Arrays;
/** /**
* Buffer processor that applies a mapping from input channels onto specified output channels. This * An {@link AudioProcessor} that applies a mapping from input channels onto specified output
* can be used to reorder, duplicate or discard channels. * channels. This can be used to reorder, duplicate or discard channels.
*/ */
/* package */ final class ChannelMappingBufferProcessor implements BufferProcessor { /* package */ final class ChannelMappingAudioProcessor implements AudioProcessor {
private int channelCount; private int channelCount;
private int sampleRateHz; private int sampleRateHz;
@ -40,7 +40,7 @@ import java.util.Arrays;
/** /**
* Creates a new processor that applies a channel mapping. * Creates a new processor that applies a channel mapping.
*/ */
public ChannelMappingBufferProcessor() { public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
} }

View File

@ -123,16 +123,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed. * default capabilities (no encoded audio passthrough support) should be assumed.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers * @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* before they are output. * output.
*/ */
public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector, public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, boolean playClearSamplesWithoutKeys, Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities, AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys); super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
audioTrack = new AudioTrack(audioCapabilities, bufferProcessors, new AudioTrackListener()); audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
} }

View File

@ -21,9 +21,9 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /**
* A {@link BufferProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}. * An {@link AudioProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/ */
/* package */ final class ResamplingBufferProcessor implements BufferProcessor { /* package */ final class ResamplingAudioProcessor implements AudioProcessor {
private int sampleRateHz; private int sampleRateHz;
private int channelCount; private int channelCount;
@ -34,9 +34,9 @@ import java.nio.ByteOrder;
private boolean inputEnded; private boolean inputEnded;
/** /**
* Creates a new buffer processor that converts audio data to {@link C#ENCODING_PCM_16BIT}. * Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/ */
public ResamplingBufferProcessor() { public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID; encoding = C.ENCODING_INVALID;

View File

@ -102,12 +102,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio buffers * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* before they are output.
*/ */
public SimpleDecoderAudioRenderer(Handler eventHandler, public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, BufferProcessor... bufferProcessors) { AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
this(eventHandler, eventListener, null, null, false, bufferProcessors); this(eventHandler, eventListener, null, null, false, audioProcessors);
} }
/** /**
@ -135,18 +134,17 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* begin in parallel with key acquisition. This parameter specifies whether the renderer is * begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager} * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media. * has obtained the keys necessary to decrypt encrypted regions of the media.
* @param bufferProcessors Optional {@link BufferProcessor}s which will process PCM audio * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
* buffers before they are output.
*/ */
public SimpleDecoderAudioRenderer(Handler eventHandler, public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities, AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys, DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys,
BufferProcessor... bufferProcessors) { AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO); super(C.TRACK_TYPE_AUDIO);
this.drmSessionManager = drmSessionManager; this.drmSessionManager = drmSessionManager;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioTrack = new AudioTrack(audioCapabilities, bufferProcessors, new AudioTrackListener()); audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
formatHolder = new FormatHolder(); formatHolder = new FormatHolder();
flagsOnlyBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED); flagsOnlyBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
decoderReinitializationState = REINITIALIZATION_STATE_NONE; decoderReinitializationState = REINITIALIZATION_STATE_NONE;