diff --git a/demos/main/src/main/java/com/google/android/exoplayer2/demo/EventLogger.java b/demos/main/src/main/java/com/google/android/exoplayer2/demo/EventLogger.java index 83ba61fff1..5c2b40e630 100644 --- a/demos/main/src/main/java/com/google/android/exoplayer2/demo/EventLogger.java +++ b/demos/main/src/main/java/com/google/android/exoplayer2/demo/EventLogger.java @@ -248,7 +248,7 @@ import java.util.Locale; } @Override - public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + public void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { printInternalError("audioTrackUnderrun [" + bufferSize + ", " + bufferSizeMs + ", " + elapsedSinceLastFeedMs + "]", null); } diff --git a/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java b/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java index 1c35adb917..5a5a948d58 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java @@ -976,10 +976,10 @@ public class SimpleExoPlayer implements ExoPlayer { } @Override - public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, + public void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { if (audioDebugListener != null) { - audioDebugListener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); + audioDebugListener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); } } diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java index 5f9f599f01..7a4958a61a 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java @@ -63,15 +63,15 @@ public interface AudioRendererEventListener { void onAudioInputFormatChanged(Format format); /** - * Called when an {@link AudioTrack} underrun occurs. + * Called when an {@link AudioSink} underrun occurs. * - * @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes. - * @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is + * @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes. + * @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is * configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, * as the buffered media can have a variable bitrate so the duration may be unknown. - * @param elapsedSinceLastFeedMs The time since the {@link AudioTrack} was last fed data. + * @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data. */ - void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); + void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); /** * Called when the renderer is disabled. @@ -144,7 +144,7 @@ public interface AudioRendererEventListener { } /** - * Invokes {@link AudioRendererEventListener#onAudioTrackUnderrun(int, long, long)}. + * Invokes {@link AudioRendererEventListener#onAudioSinkUnderrun(int, long, long)}. */ public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs, final long elapsedSinceLastFeedMs) { @@ -152,7 +152,7 @@ public interface AudioRendererEventListener { handler.post(new Runnable() { @Override public void run() { - listener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); + listener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); } }); } diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java new file mode 100644 index 0000000000..879769b0e2 --- /dev/null +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java @@ -0,0 +1,332 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import android.media.AudioTrack; +import android.support.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackParameters; +import java.nio.ByteBuffer; + +/** + * A sink that consumes audio data. + *

+ * Before starting playback, specify the input audio format by calling + * {@link #configure(String, int, int, int, int, int[], int, int)}. + *

+ * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()} + * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data. + *

+ * Call {@link #configure(String, int, int, int, int, int[], int, int)} whenever the input format + * changes. The sink will be reinitialized on the next call to + * {@link #handleBuffer(ByteBuffer, long)}. + *

+ * Call {@link #reset()} to prepare the sink to receive audio data from a new playback position. + *

+ * Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will + * be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset()}. Call + * {@link #release()} when the instance is no longer required. + *

+ * The implementation may be backed by a platform {@link AudioTrack}. In this case, + * {@link #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)}, + * {@link #enableTunnelingV21(int)} and/or {@link #disableTunneling()} may be called before writing + * data to the sink. These methods may also be called after writing data to the sink, in which case + * it will be reinitialized as required. For implementations that are not based on platform + * {@link AudioTrack}s, calling methods relating to audio sessions, audio attributes, and tunneling + * may have no effect. + */ +public interface AudioSink { + + /** + * Listener for audio sink events. + */ + interface Listener { + + /** + * Called if the audio sink has started rendering audio to a new platform audio session. + * + * @param audioSessionId The newly generated audio session's identifier. + */ + void onAudioSessionId(int audioSessionId); + + /** + * Called when the audio sink handles a buffer whose timestamp is discontinuous with the last + * buffer handled since it was reset. + */ + void onPositionDiscontinuity(); + + /** + * Called when the audio sink runs out of data. + *

+ * An audio sink implementation may never call this method (for example, if audio data is + * consumed in batches rather than based on the sink's own clock). + * + * @param bufferSize The size of the sink's buffer, in bytes. + * @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for + * PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, as the + * buffered media can have a variable bitrate so the duration may be unknown. + * @param elapsedSinceLastFeedMs The time since the sink was last fed data, in milliseconds. + */ + void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); + + } + + /** + * Thrown when a failure occurs configuring the sink. + */ + final class ConfigurationException extends Exception { + + /** + * Creates a new configuration exception with the specified {@code cause} and no message. + */ + public ConfigurationException(Throwable cause) { + super(cause); + } + + /** + * Creates a new configuration exception with the specified {@code message} and no cause. + */ + public ConfigurationException(String message) { + super(message); + } + + } + + /** + * Thrown when a failure occurs initializing the sink. + */ + final class InitializationException extends Exception { + + /** + * The underlying {@link AudioTrack}'s state, if applicable. + */ + public final int audioTrackState; + + /** + * @param audioTrackState The underlying {@link AudioTrack}'s state, if applicable. + * @param sampleRate The requested sample rate in Hz. + * @param channelConfig The requested channel configuration. + * @param bufferSize The requested buffer size in bytes. + */ + public InitializationException(int audioTrackState, int sampleRate, int channelConfig, + int bufferSize) { + super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", " + + channelConfig + ", " + bufferSize + ")"); + this.audioTrackState = audioTrackState; + } + + } + + /** + * Thrown when a failure occurs writing to the sink. + */ + final class WriteException extends Exception { + + /** + * The error value returned from the sink implementation. If the sink writes to a platform + * {@link AudioTrack}, this will be the error value returned from + * {@link AudioTrack#write(byte[], int, int)} or {@link AudioTrack#write(ByteBuffer, int, int)}. + * Otherwise, the meaning of the error code depends on the sink implementation. + */ + public final int errorCode; + + /** + * @param errorCode The error value returned from the sink implementation. + */ + public WriteException(int errorCode) { + super("AudioTrack write failed: " + errorCode); + this.errorCode = errorCode; + } + + } + + /** + * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set. + */ + long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE; + + /** + * Sets the listener for sink events, which should be the audio renderer. + * + * @param listener The listener for sink events, which should be the audio renderer. + */ + void setListener(Listener listener); + + /** + * Returns whether it's possible to play audio in the specified format using encoded audio + * passthrough. + * + * @param mimeType The format mime type. + * @return Whether it's possible to play audio in the format using encoded audio passthrough. + */ + boolean isPassthroughSupported(String mimeType); + + /** + * Returns the playback position in the stream starting at zero, in microseconds, or + * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available. + * + * @param sourceEnded Specify {@code true} if no more input buffers will be provided. + * @return The playback position relative to the start of playback, in microseconds. + */ + long getCurrentPositionUs(boolean sourceEnded); + + /** + * Configures (or reconfigures) the sink. + * + * @param mimeType The MIME type of audio data provided in the input buffers. + * @param channelCount The number of channels. + * @param sampleRate The sample rate in Hz. + * @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT}, + * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and + * {@link C#ENCODING_PCM_32BIT}. + * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a + * suitable buffer size. + * @param outputChannels A mapping from input to output channels that is applied to this sink's + * input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the + * input unchanged. Otherwise, the element at index {@code i} specifies index of the input + * channel to map to output channel {@code i} when preprocessing input buffers. After the + * map is applied the audio data will have {@code outputChannels.length} channels. + * @param trimStartSamples The number of audio samples to trim from the start of data written to + * the sink after this call. + * @param trimEndSamples The number of audio samples to trim from data written to the sink + * immediately preceding the next call to {@link #reset()} or + * {@link #configure(String, int, int, int, int, int[], int, int)}. + * @throws ConfigurationException If an error occurs configuring the sink. + */ + void configure(String mimeType, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding, + int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples, + int trimEndSamples) throws ConfigurationException; + + /** + * Starts or resumes consuming audio if initialized. + */ + void play(); + + /** + * Signals to the sink that the next buffer is discontinuous with the previous buffer. + */ + void handleDiscontinuity(); + + /** + * Attempts to process data from a {@link ByteBuffer}, starting from its current position and + * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the + * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if + * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset. + *

+ * Returns whether the data was handled in full. If the data was not handled in full then the same + * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed, + * except in the case of an intervening call to {@link #reset()} (or to + * {@link #configure(String, int, int, int, int, int[], int, int)} that causes the sink to be + * reset). + * + * @param buffer The buffer containing audio data. + * @param presentationTimeUs The presentation timestamp of the buffer in microseconds. + * @return Whether the buffer was handled fully. + * @throws InitializationException If an error occurs initializing the sink. + * @throws WriteException If an error occurs writing the audio data. + */ + boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) + throws InitializationException, WriteException; + + /** + * Processes any remaining data. {@link #isEnded()} will return {@code true} when no data remains. + * + * @throws WriteException If an error occurs draining data to the sink. + */ + void playToEndOfStream() throws WriteException; + + /** + * Returns whether {@link #playToEndOfStream} has been called and all buffers have been processed. + */ + boolean isEnded(); + + /** + * Returns whether the sink has data pending that has not been consumed yet. + */ + boolean hasPendingData(); + + /** + * Attempts to set the playback parameters and returns the active playback parameters, which may + * differ from those passed in. + * + * @param playbackParameters The new playback parameters to attempt to set. + * @return The active playback parameters. + */ + PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters); + + /** + * Gets the active {@link PlaybackParameters}. + */ + PlaybackParameters getPlaybackParameters(); + + /** + * Sets attributes for audio playback. If the attributes have changed and if the sink is not + * configured for use with tunneling, then it is reset and the audio session id is cleared. + *

+ * If the sink is configured for use with tunneling then the audio attributes are ignored. The + * sink is not reset and the audio session id is not cleared. The passed attributes will be used + * if the sink is later re-configured into non-tunneled mode. + * + * @param audioAttributes The attributes for audio playback. + */ + void setAudioAttributes(AudioAttributes audioAttributes); + + /** + * Sets the audio session id. + */ + void setAudioSessionId(int audioSessionId); + + /** + * Enables tunneling, if possible. The sink is reset if tunneling was previously disabled or if + * the audio session id has changed. Enabling tunneling is only possible if the sink is based on a + * platform {@link AudioTrack}, and requires platform API version 21 onwards. + * + * @param tunnelingAudioSessionId The audio session id to use. + * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21. + */ + void enableTunnelingV21(int tunnelingAudioSessionId); + + /** + * Disables tunneling. If tunneling was previously enabled then the sink is reset and any audio + * session id is cleared. + */ + void disableTunneling(); + + /** + * Sets the playback volume. + * + * @param volume A volume in the range [0.0, 1.0]. + */ + void setVolume(float volume); + + /** + * Pauses playback. + */ + void pause(); + + /** + * Resets the sink, after which it is ready to receive buffers from a new playback position. + *

+ * The audio session may remain active until {@link #release()} is called. + */ + void reset(); + + /** + * Releases any resources associated with this instance. + */ + void release(); + +} diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java index ef85985f1f..03bbd5817b 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java @@ -52,7 +52,7 @@ import java.util.Arrays; * Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)} * to start using the new channel map. * - * @see AudioTrack#configure(String, int, int, int, int, int[], int, int) + * @see AudioSink#configure(String, int, int, int, int, int[], int, int) */ public void setChannelMap(int[] outputChannels) { pendingOutputChannels = outputChannels; diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java similarity index 76% rename from library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java rename to library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java index 25813aefc0..34ea173deb 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java @@ -20,6 +20,7 @@ import android.annotation.TargetApi; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTimestamp; +import android.media.AudioTrack; import android.os.ConditionVariable; import android.os.SystemClock; import android.support.annotation.IntDef; @@ -39,131 +40,19 @@ import java.util.ArrayList; import java.util.LinkedList; /** - * Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles - * playback position smoothing, non-blocking writes and reconfiguration. + * Plays audio data. The implementation delegates to an {@link AudioTrack} and handles playback + * position smoothing, non-blocking writes and reconfiguration. *

- * Before starting playback, specify the input format by calling - * {@link #configure(String, int, int, int, int, int[], int, int)}. Optionally call - * {@link #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)}, - * {@link #enableTunnelingV21(int)} and {@link #disableTunneling()} to configure audio playback. - * These methods may be called after writing data to the track, in which case it will be - * reinitialized as required. - *

- * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()} - * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data. - *

- * Call {@link #configure(String, int, int, int, int, int[], int, int)} whenever the input format - * changes. The track will be reinitialized on the next call to - * {@link #handleBuffer(ByteBuffer, long)}. - *

- * Calling {@link #reset()} releases the underlying {@link android.media.AudioTrack} (and so does - * calling {@link #configure(String, int, int, int, int, int[], int, int)} unless the format is - * unchanged). It is safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()} - * without calling {@link #configure(String, int, int, int, int, int[], int, int)}. - *

- * Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will - * be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset}. Call - * {@link #release()} when the instance is no longer required. + * If tunneling mode is enabled, care must be taken that audio processors do not output buffers with + * a different duration than their input, and buffer processors must produce output corresponding to + * their last input immediately after that input is queued. This means that, for example, speed + * adjustment is not possible while using tunneling. */ -public final class AudioTrack { +public final class DefaultAudioSink implements AudioSink { /** - * Listener for audio track events. - */ - public interface Listener { - - /** - * Called when the audio track has been initialized with a newly generated audio session id. - * - * @param audioSessionId The newly generated audio session id. - */ - void onAudioSessionId(int audioSessionId); - - /** - * Called when the audio track handles a buffer whose timestamp is discontinuous with the last - * buffer handled since it was reset. - */ - void onPositionDiscontinuity(); - - /** - * Called when the audio track underruns. - * - * @param bufferSize The size of the track's buffer, in bytes. - * @param bufferSizeMs The size of the track's buffer, in milliseconds, if it is configured for - * PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, as the - * buffered media can have a variable bitrate so the duration may be unknown. - * @param elapsedSinceLastFeedMs The time since the track was last fed data, in milliseconds. - */ - void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); - - } - - /** - * Thrown when a failure occurs configuring the track. - */ - public static final class ConfigurationException extends Exception { - - public ConfigurationException(Throwable cause) { - super(cause); - } - - public ConfigurationException(String message) { - super(message); - } - - } - - /** - * Thrown when a failure occurs initializing an {@link android.media.AudioTrack}. - */ - public static final class InitializationException extends Exception { - - /** - * The state as reported by {@link android.media.AudioTrack#getState()}. - */ - public final int audioTrackState; - - /** - * @param audioTrackState The state as reported by {@link android.media.AudioTrack#getState()}. - * @param sampleRate The requested sample rate in Hz. - * @param channelConfig The requested channel configuration. - * @param bufferSize The requested buffer size in bytes. - */ - public InitializationException(int audioTrackState, int sampleRate, int channelConfig, - int bufferSize) { - super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", " - + channelConfig + ", " + bufferSize + ")"); - this.audioTrackState = audioTrackState; - } - - } - - /** - * Thrown when a failure occurs writing to an {@link android.media.AudioTrack}. - */ - public static final class WriteException extends Exception { - - /** - * The error value returned from {@link android.media.AudioTrack#write(byte[], int, int)} or - * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}. - */ - public final int errorCode; - - /** - * @param errorCode The error value returned from - * {@link android.media.AudioTrack#write(byte[], int, int)} or - * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}. - */ - public WriteException(int errorCode) { - super("AudioTrack write failed: " + errorCode); - this.errorCode = errorCode; - } - - } - - /** - * Thrown when {@link android.media.AudioTrack#getTimestamp} returns a spurious timestamp, if - * {@code AudioTrack#failOnSpuriousAudioTimestamp} is set. + * Thrown when {@link AudioTrack#getTimestamp} returns a spurious timestamp, if + * {@link #failOnSpuriousAudioTimestamp} is set. */ public static final class InvalidAudioTrackTimestampException extends RuntimeException { @@ -177,61 +66,56 @@ public final class AudioTrack { } /** - * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set. - */ - public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE; - - /** - * A minimum length for the {@link android.media.AudioTrack} buffer, in microseconds. + * A minimum length for the {@link AudioTrack} buffer, in microseconds. */ private static final long MIN_BUFFER_DURATION_US = 250000; /** - * A maximum length for the {@link android.media.AudioTrack} buffer, in microseconds. + * A maximum length for the {@link AudioTrack} buffer, in microseconds. */ private static final long MAX_BUFFER_DURATION_US = 750000; /** - * The length for passthrough {@link android.media.AudioTrack} buffers, in microseconds. + * The length for passthrough {@link AudioTrack} buffers, in microseconds. */ private static final long PASSTHROUGH_BUFFER_DURATION_US = 250000; /** * A multiplication factor to apply to the minimum buffer size requested by the underlying - * {@link android.media.AudioTrack}. + * {@link AudioTrack}. */ private static final int BUFFER_MULTIPLICATION_FACTOR = 4; /** - * @see android.media.AudioTrack#PLAYSTATE_STOPPED + * @see AudioTrack#PLAYSTATE_STOPPED */ - private static final int PLAYSTATE_STOPPED = android.media.AudioTrack.PLAYSTATE_STOPPED; + private static final int PLAYSTATE_STOPPED = AudioTrack.PLAYSTATE_STOPPED; /** - * @see android.media.AudioTrack#PLAYSTATE_PAUSED + * @see AudioTrack#PLAYSTATE_PAUSED */ - private static final int PLAYSTATE_PAUSED = android.media.AudioTrack.PLAYSTATE_PAUSED; + private static final int PLAYSTATE_PAUSED = AudioTrack.PLAYSTATE_PAUSED; /** - * @see android.media.AudioTrack#PLAYSTATE_PLAYING + * @see AudioTrack#PLAYSTATE_PLAYING */ - private static final int PLAYSTATE_PLAYING = android.media.AudioTrack.PLAYSTATE_PLAYING; + private static final int PLAYSTATE_PLAYING = AudioTrack.PLAYSTATE_PLAYING; /** - * @see android.media.AudioTrack#ERROR_BAD_VALUE + * @see AudioTrack#ERROR_BAD_VALUE */ - private static final int ERROR_BAD_VALUE = android.media.AudioTrack.ERROR_BAD_VALUE; + private static final int ERROR_BAD_VALUE = AudioTrack.ERROR_BAD_VALUE; /** - * @see android.media.AudioTrack#MODE_STATIC + * @see AudioTrack#MODE_STATIC */ - private static final int MODE_STATIC = android.media.AudioTrack.MODE_STATIC; + private static final int MODE_STATIC = AudioTrack.MODE_STATIC; /** - * @see android.media.AudioTrack#MODE_STREAM + * @see AudioTrack#MODE_STREAM */ - private static final int MODE_STREAM = android.media.AudioTrack.MODE_STREAM; + private static final int MODE_STREAM = AudioTrack.MODE_STREAM; /** - * @see android.media.AudioTrack#STATE_INITIALIZED + * @see AudioTrack#STATE_INITIALIZED */ - private static final int STATE_INITIALIZED = android.media.AudioTrack.STATE_INITIALIZED; + private static final int STATE_INITIALIZED = AudioTrack.STATE_INITIALIZED; /** - * @see android.media.AudioTrack#WRITE_NON_BLOCKING + * @see AudioTrack#WRITE_NON_BLOCKING */ @SuppressLint("InlinedApi") - private static final int WRITE_NON_BLOCKING = android.media.AudioTrack.WRITE_NON_BLOCKING; + private static final int WRITE_NON_BLOCKING = AudioTrack.WRITE_NON_BLOCKING; private static final String TAG = "AudioTrack"; @@ -282,7 +166,7 @@ public final class AudioTrack { /** * Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is - * reported from {@link android.media.AudioTrack#getTimestamp}. + * reported from {@link AudioTrack#getTimestamp}. *

* The flag must be set before creating a player. Should be set to {@code true} for testing and * debugging purposes only. @@ -294,18 +178,17 @@ public final class AudioTrack { private final TrimmingAudioProcessor trimmingAudioProcessor; private final SonicAudioProcessor sonicAudioProcessor; private final AudioProcessor[] availableAudioProcessors; - private final Listener listener; private final ConditionVariable releasingConditionVariable; private final long[] playheadOffsets; private final AudioTrackUtil audioTrackUtil; private final LinkedList playbackParametersCheckpoints; + @Nullable private Listener listener; /** * Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */ - private android.media.AudioTrack keepSessionIdAudioTrack; - - private android.media.AudioTrack audioTrack; + private AudioTrack keepSessionIdAudioTrack; + private AudioTrack audioTrack; private int sampleRate; private int channelConfig; private @C.Encoding int encoding; @@ -364,17 +247,15 @@ public final class AudioTrack { * default capabilities (no encoded audio passthrough support) should be assumed. * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before * output. May be empty. - * @param listener Listener for audio track events. */ - public AudioTrack(@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors, - Listener listener) { + public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities, + AudioProcessor[] audioProcessors) { this.audioCapabilities = audioCapabilities; - this.listener = listener; releasingConditionVariable = new ConditionVariable(true); if (Util.SDK_INT >= 18) { try { getLatencyMethod = - android.media.AudioTrack.class.getMethod("getLatency", (Class[]) null); + AudioTrack.class.getMethod("getLatency", (Class[]) null); } catch (NoSuchMethodException e) { // There's no guarantee this method exists. Do nothing. } @@ -405,29 +286,21 @@ public final class AudioTrack { playbackParametersCheckpoints = new LinkedList<>(); } - /** - * Returns whether it's possible to play audio in the specified format using encoded passthrough. - * - * @param mimeType The format mime type. - * @return Whether it's possible to play audio in the format using encoded passthrough. - */ + @Override + public void setListener(Listener listener) { + this.listener = listener; + } + + @Override public boolean isPassthroughSupported(String mimeType) { return audioCapabilities != null && audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType)); } - /** - * Returns the playback position in the stream starting at zero, in microseconds, or - * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available. - * - *

If the device supports it, the method uses the playback timestamp from - * {@link android.media.AudioTrack#getTimestamp}. Otherwise, it derives a smoothed position by - * sampling the {@link android.media.AudioTrack}'s frame position. - * - * @param sourceEnded Specify {@code true} if no more input buffers will be provided. - * @return The playback position relative to the start of playback, in microseconds. - */ + @Override public long getCurrentPositionUs(boolean sourceEnded) { + // If the device supports it, use the playback timestamp from AudioTrack.getTimestamp. + // Otherwise, derive a smoothed position by sampling the track's frame position. if (!hasCurrentPositionUs()) { return CURRENT_POSITION_NOT_SET; } @@ -462,29 +335,7 @@ public final class AudioTrack { return startMediaTimeUs + applySpeedup(positionUs); } - /** - * Configures (or reconfigures) the audio track. - * - * @param mimeType The mime type. - * @param channelCount The number of channels. - * @param sampleRate The sample rate in Hz. - * @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT}, - * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and - * {@link C#ENCODING_PCM_32BIT}. - * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a - * suitable buffer size automatically. - * @param outputChannels A mapping from input to output channels that is applied to this track's - * input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the - * input unchanged. Otherwise, the element at index {@code i} specifies index of the input - * channel to map to output channel {@code i} when preprocessing input buffers. After the - * map is applied the audio data will have {@code outputChannels.length} channels. - * @param trimStartSamples The number of audio samples to trim from the start of data written to - * the track after this call. - * @param trimEndSamples The number of audio samples to trim from data written to the track - * immediately preceding the next call to {@link #reset()} or - * {@link #configure(String, int, int, int, int, int[], int, int)}. - * @throws ConfigurationException If an error occurs configuring the track. - */ + @Override public void configure(String mimeType, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples, int trimEndSamples) throws ConfigurationException { @@ -590,8 +441,7 @@ public final class AudioTrack { bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND); } } else { - int minBufferSize = - android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding); + int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding); Assertions.checkState(minBufferSize != ERROR_BAD_VALUE); int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR; int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize; @@ -651,7 +501,9 @@ public final class AudioTrack { } if (this.audioSessionId != audioSessionId) { this.audioSessionId = audioSessionId; - listener.onAudioSessionId(audioSessionId); + if (listener != null) { + listener.onAudioSessionId(audioSessionId); + } } audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds()); @@ -659,9 +511,7 @@ public final class AudioTrack { hasData = false; } - /** - * Starts or resumes playing audio if the audio track has been initialized. - */ + @Override public void play() { playing = true; if (isInitialized()) { @@ -670,9 +520,7 @@ public final class AudioTrack { } } - /** - * Signals to the audio track that the next buffer is discontinuous with the previous buffer. - */ + @Override public void handleDiscontinuity() { // Force resynchronization after a skipped buffer. if (startMediaTimeState == START_IN_SYNC) { @@ -680,24 +528,7 @@ public final class AudioTrack { } } - /** - * Attempts to process data from a {@link ByteBuffer}, starting from its current position and - * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the - * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if - * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset. - *

- * Returns whether the data was handled in full. If the data was not handled in full then the same - * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed, - * except in the case of an interleaving call to {@link #reset()} (or an interleaving call to - * {@link #configure(String, int, int, int, int, int[], int, int)} that caused the track to be - * reset). - * - * @param buffer The buffer containing audio data. - * @param presentationTimeUs The presentation timestamp of the buffer in microseconds. - * @return Whether the buffer was handled fully. - * @throws InitializationException If an error occurs initializing the track. - * @throws WriteException If an error occurs writing the audio data. - */ + @Override @SuppressWarnings("ReferenceEquality") public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws InitializationException, WriteException { @@ -729,7 +560,7 @@ public final class AudioTrack { boolean hadData = hasData; hasData = hasPendingData(); - if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) { + if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED && listener != null) { long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs; listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs), elapsedSinceLastFeedMs); } @@ -779,7 +610,9 @@ public final class AudioTrack { // number of bytes submitted. startMediaTimeUs += (presentationTimeUs - expectedPresentationTimeUs); startMediaTimeState = START_IN_SYNC; - listener.onPositionDiscontinuity(); + if (listener != null) { + listener.onPositionDiscontinuity(); + } } } @@ -899,11 +732,7 @@ public final class AudioTrack { return false; } - /** - * Plays out remaining audio. {@link #isEnded()} will return {@code true} when playback has ended. - * - * @throws WriteException If an error occurs draining data to the track. - */ + @Override public void playToEndOfStream() throws WriteException { if (handledEndOfStream || !isInitialized()) { return; @@ -947,30 +776,19 @@ public final class AudioTrack { return true; } - /** - * Returns whether all buffers passed to {@link #handleBuffer(ByteBuffer, long)} have been - * completely processed and played. - */ + @Override public boolean isEnded() { return !isInitialized() || (handledEndOfStream && !hasPendingData()); } - /** - * Returns whether the audio track has more data pending that will be played back. - */ + @Override public boolean hasPendingData() { return isInitialized() && (getWrittenFrames() > audioTrackUtil.getPlaybackHeadPosition() || overrideHasPendingData()); } - /** - * Attempts to set the playback parameters and returns the active playback parameters, which may - * differ from those passed in. - * - * @param playbackParameters The new playback parameters to attempt to set. - * @return The active playback parameters. - */ + @Override public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { if (passthrough) { // The playback parameters are always the default in passthrough mode. @@ -997,24 +815,12 @@ public final class AudioTrack { return this.playbackParameters; } - /** - * Gets the {@link PlaybackParameters}. - */ + @Override public PlaybackParameters getPlaybackParameters() { return playbackParameters; } - /** - * Sets the attributes for audio playback. If the attributes have changed and if the audio track - * is not configured for use with tunneling, then the audio track is reset and the audio session - * id is cleared. - *

- * If the audio track is configured for use with tunneling then the audio attributes are ignored. - * The audio track is not reset and the audio session id is not cleared. The passed attributes - * will be used if the audio track is later re-configured into non-tunneled mode. - * - * @param audioAttributes The attributes for audio playback. - */ + @Override public void setAudioAttributes(AudioAttributes audioAttributes) { if (this.audioAttributes.equals(audioAttributes)) { return; @@ -1028,9 +834,7 @@ public final class AudioTrack { audioSessionId = C.AUDIO_SESSION_ID_UNSET; } - /** - * Sets the audio session id. The audio track is reset if the audio session id has changed. - */ + @Override public void setAudioSessionId(int audioSessionId) { if (this.audioSessionId != audioSessionId) { this.audioSessionId = audioSessionId; @@ -1038,18 +842,7 @@ public final class AudioTrack { } } - /** - * Enables tunneling. The audio track is reset if tunneling was previously disabled or if the - * audio session id has changed. Enabling tunneling requires platform API version 21 onwards. - *

- * If this instance has {@link AudioProcessor}s and tunneling is enabled, care must be taken that - * audio processors do not output buffers with a different duration than their input, and buffer - * processors must produce output corresponding to their last input immediately after that input - * is queued. - * - * @param tunnelingAudioSessionId The audio session id to use. - * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21. - */ + @Override public void enableTunnelingV21(int tunnelingAudioSessionId) { Assertions.checkState(Util.SDK_INT >= 21); if (!tunneling || audioSessionId != tunnelingAudioSessionId) { @@ -1059,10 +852,7 @@ public final class AudioTrack { } } - /** - * Disables tunneling. If tunneling was previously enabled then the audio track is reset and the - * audio session id is cleared. - */ + @Override public void disableTunneling() { if (tunneling) { tunneling = false; @@ -1071,11 +861,7 @@ public final class AudioTrack { } } - /** - * Sets the playback volume. - * - * @param volume A volume in the range [0.0, 1.0]. - */ + @Override public void setVolume(float volume) { if (this.volume != volume) { this.volume = volume; @@ -1093,9 +879,7 @@ public final class AudioTrack { } } - /** - * Pauses playback. - */ + @Override public void pause() { playing = false; if (isInitialized()) { @@ -1104,13 +888,7 @@ public final class AudioTrack { } } - /** - * Releases the underlying audio track asynchronously. - *

- * Calling {@link #handleBuffer(ByteBuffer, long)} will block until the audio track has been - * released, so it is safe to use the audio track immediately after a reset. The audio session may - * remain active until {@link #release()} is called. - */ + @Override public void reset() { if (isInitialized()) { submittedPcmBytes = 0; @@ -1146,7 +924,7 @@ public final class AudioTrack { audioTrack.pause(); } // AudioTrack.release can take some time, so we call it on a background thread. - final android.media.AudioTrack toRelease = audioTrack; + final AudioTrack toRelease = audioTrack; audioTrack = null; audioTrackUtil.reconfigure(null, false); releasingConditionVariable.close(); @@ -1164,9 +942,7 @@ public final class AudioTrack { } } - /** - * Releases all resources associated with this instance. - */ + @Override public void release() { reset(); releaseKeepSessionIdAudioTrack(); @@ -1186,7 +962,7 @@ public final class AudioTrack { } // AudioTrack.release can take some time, so we call it on a background thread. - final android.media.AudioTrack toRelease = keepSessionIdAudioTrack; + final AudioTrack toRelease = keepSessionIdAudioTrack; keepSessionIdAudioTrack = null; new Thread() { @Override @@ -1367,19 +1143,19 @@ public final class AudioTrack { && audioTrack.getPlaybackHeadPosition() == 0; } - private android.media.AudioTrack initializeAudioTrack() throws InitializationException { - android.media.AudioTrack audioTrack; + private AudioTrack initializeAudioTrack() throws InitializationException { + AudioTrack audioTrack; if (Util.SDK_INT >= 21) { audioTrack = createAudioTrackV21(); } else { int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage); if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) { - audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig, - outputEncoding, bufferSize, MODE_STREAM); + audioTrack = new AudioTrack(streamType, sampleRate, channelConfig, outputEncoding, + bufferSize, MODE_STREAM); } else { // Re-attach to the same audio session. - audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig, - outputEncoding, bufferSize, MODE_STREAM, audioSessionId); + audioTrack = new AudioTrack(streamType, sampleRate, channelConfig, outputEncoding, + bufferSize, MODE_STREAM, audioSessionId); } } @@ -1397,7 +1173,7 @@ public final class AudioTrack { } @TargetApi(21) - private android.media.AudioTrack createAudioTrackV21() { + private AudioTrack createAudioTrackV21() { android.media.AudioAttributes attributes; if (tunneling) { attributes = new android.media.AudioAttributes.Builder() @@ -1415,17 +1191,16 @@ public final class AudioTrack { .build(); int audioSessionId = this.audioSessionId != C.AUDIO_SESSION_ID_UNSET ? this.audioSessionId : AudioManager.AUDIO_SESSION_ID_GENERATE; - return new android.media.AudioTrack(attributes, format, bufferSize, MODE_STREAM, - audioSessionId); + return new AudioTrack(attributes, format, bufferSize, MODE_STREAM, audioSessionId); } - private android.media.AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) { - int sampleRate = 4000; // Equal to private android.media.AudioTrack.MIN_SAMPLE_RATE. + private AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) { + int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE. int channelConfig = AudioFormat.CHANNEL_OUT_MONO; @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT; int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback. - return new android.media.AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, - bufferSize, MODE_STATIC, audioSessionId); + return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize, + MODE_STATIC, audioSessionId); } @C.Encoding @@ -1457,14 +1232,13 @@ public final class AudioTrack { } @TargetApi(21) - private static int writeNonBlockingV21(android.media.AudioTrack audioTrack, ByteBuffer buffer, - int size) { + private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) { return audioTrack.write(buffer, size, WRITE_NON_BLOCKING); } @TargetApi(21) - private int writeNonBlockingWithAvSyncV21(android.media.AudioTrack audioTrack, - ByteBuffer buffer, int size, long presentationTimeUs) { + private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer buffer, int size, + long presentationTimeUs) { // TODO: Uncomment this when [Internal ref: b/33627517] is clarified or fixed. // if (Util.SDK_INT >= 23) { // // The underlying platform AudioTrack writes AV sync headers directly. @@ -1502,21 +1276,21 @@ public final class AudioTrack { } @TargetApi(21) - private static void setVolumeInternalV21(android.media.AudioTrack audioTrack, float volume) { + private static void setVolumeInternalV21(AudioTrack audioTrack, float volume) { audioTrack.setVolume(volume); } @SuppressWarnings("deprecation") - private static void setVolumeInternalV3(android.media.AudioTrack audioTrack, float volume) { + private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) { audioTrack.setStereoVolume(volume, volume); } /** - * Wraps an {@link android.media.AudioTrack} to expose useful utility methods. + * Wraps an {@link AudioTrack} to expose useful utility methods. */ private static class AudioTrackUtil { - protected android.media.AudioTrack audioTrack; + protected AudioTrack audioTrack; private boolean needsPassthroughWorkaround; private int sampleRate; private long lastRawPlaybackHeadPosition; @@ -1534,8 +1308,7 @@ public final class AudioTrack { * @param needsPassthroughWorkaround Whether to workaround issues with pausing AC-3 passthrough * audio tracks on platform API version 21/22. */ - public void reconfigure(android.media.AudioTrack audioTrack, - boolean needsPassthroughWorkaround) { + public void reconfigure(AudioTrack audioTrack, boolean needsPassthroughWorkaround) { this.audioTrack = audioTrack; this.needsPassthroughWorkaround = needsPassthroughWorkaround; stopTimestampUs = C.TIME_UNSET; @@ -1574,9 +1347,9 @@ public final class AudioTrack { } /** - * {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be - * interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method - * returns the playback head position as a long that will only wrap around if the value exceeds + * {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as an + * unsigned 32 bit integer, which also wraps around periodically. This method returns the + * playback head position as a long that will only wrap around if the value exceeds * {@link Long#MAX_VALUE} (which in practice will never happen). * * @return The playback head position, in frames. @@ -1676,8 +1449,7 @@ public final class AudioTrack { } @Override - public void reconfigure(android.media.AudioTrack audioTrack, - boolean needsPassthroughWorkaround) { + public void reconfigure(AudioTrack audioTrack, boolean needsPassthroughWorkaround) { super.reconfigure(audioTrack, needsPassthroughWorkaround); rawTimestampFramePositionWrapCount = 0; lastRawTimestampFramePosition = 0; diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java index cbb3a4944d..f8206e94cf 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java @@ -40,13 +40,13 @@ import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; /** - * Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}. + * Decodes and renders audio using {@link MediaCodec} and an {@link AudioSink}. */ @TargetApi(16) public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock { private final EventDispatcher eventDispatcher; - private final AudioTrack audioTrack; + private final AudioSink audioSink; private boolean passthroughEnabled; private boolean codecNeedsDiscardChannelsWorkaround; @@ -110,7 +110,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener) { this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, - eventListener, null); + eventListener, (AudioCapabilities) null); } /** @@ -135,9 +135,32 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, @Nullable AudioCapabilities audioCapabilities, AudioProcessor... audioProcessors) { + this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, + eventHandler, eventListener, new DefaultAudioSink(audioCapabilities, audioProcessors)); + } + + /** + * @param mediaCodecSelector A decoder selector. + * @param drmSessionManager For use with encrypted content. May be null if support for encrypted + * content is not required. + * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. + * For example a media file may start with a short clear region so as to allow playback to + * begin in parallel with key acquisition. This parameter specifies whether the renderer is + * permitted to play clear regions of encrypted media files before {@code drmSessionManager} + * has obtained the keys necessary to decrypt encrypted regions of the media. + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param audioSink The sink to which audio will be output. + */ + public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector, + @Nullable DrmSessionManager drmSessionManager, + boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, + @Nullable AudioRendererEventListener eventListener, AudioSink audioSink) { super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys); eventDispatcher = new EventDispatcher(eventHandler, eventListener); - audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener()); + this.audioSink = audioSink; + audioSink.setListener(new AudioSinkListener()); } @Override @@ -196,14 +219,14 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media /** * Returns whether encoded audio passthrough should be used for playing back the input format. - * This implementation returns true if the {@link AudioTrack}'s audio capabilities indicate that - * passthrough is supported. + * This implementation returns true if the {@link AudioSink} indicates that passthrough is + * supported. * * @param mimeType The type of input media. - * @return Whether passthrough playback should be used. + * @return Whether passthrough playback is supported. */ protected boolean allowPassthrough(String mimeType) { - return audioTrack.isPassthroughSupported(mimeType); + return audioSink.isPassthroughSupported(mimeType); } @Override @@ -266,9 +289,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media } try { - audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap, + audioSink.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap, encoderDelay, encoderPadding); - } catch (AudioTrack.ConfigurationException e) { + } catch (AudioSink.ConfigurationException e) { throw ExoPlaybackException.createForRenderer(e, getIndex()); } } @@ -279,21 +302,21 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances * should be released in {@link #onDisabled()} (if not before). * - * @see AudioTrack.Listener#onAudioSessionId(int) + * @see AudioSink.Listener#onAudioSessionId(int) */ protected void onAudioSessionId(int audioSessionId) { // Do nothing. } /** - * @see AudioTrack.Listener#onPositionDiscontinuity() + * @see AudioSink.Listener#onPositionDiscontinuity() */ protected void onAudioTrackPositionDiscontinuity() { // Do nothing. } /** - * @see AudioTrack.Listener#onUnderrun(int, long, long) + * @see AudioSink.Listener#onUnderrun(int, long, long) */ protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { @@ -306,16 +329,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media eventDispatcher.enabled(decoderCounters); int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) { - audioTrack.enableTunnelingV21(tunnelingAudioSessionId); + audioSink.enableTunnelingV21(tunnelingAudioSessionId); } else { - audioTrack.disableTunneling(); + audioSink.disableTunneling(); } } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { super.onPositionReset(positionUs, joining); - audioTrack.reset(); + audioSink.reset(); currentPositionUs = positionUs; allowPositionDiscontinuity = true; } @@ -323,19 +346,19 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media @Override protected void onStarted() { super.onStarted(); - audioTrack.play(); + audioSink.play(); } @Override protected void onStopped() { - audioTrack.pause(); + audioSink.pause(); super.onStopped(); } @Override protected void onDisabled() { try { - audioTrack.release(); + audioSink.release(); } finally { try { super.onDisabled(); @@ -348,18 +371,18 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media @Override public boolean isEnded() { - return super.isEnded() && audioTrack.isEnded(); + return super.isEnded() && audioSink.isEnded(); } @Override public boolean isReady() { - return audioTrack.hasPendingData() || super.isReady(); + return audioSink.hasPendingData() || super.isReady(); } @Override public long getPositionUs() { - long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded()); - if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) { + long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded()); + if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) { currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs : Math.max(currentPositionUs, newCurrentPositionUs); allowPositionDiscontinuity = false; @@ -369,12 +392,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media @Override public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { - return audioTrack.setPlaybackParameters(playbackParameters); + return audioSink.setPlaybackParameters(playbackParameters); } @Override public PlaybackParameters getPlaybackParameters() { - return audioTrack.getPlaybackParameters(); + return audioSink.getPlaybackParameters(); } @Override @@ -390,17 +413,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media if (shouldSkip) { codec.releaseOutputBuffer(bufferIndex, false); decoderCounters.skippedOutputBufferCount++; - audioTrack.handleDiscontinuity(); + audioSink.handleDiscontinuity(); return true; } try { - if (audioTrack.handleBuffer(buffer, bufferPresentationTimeUs)) { + if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) { codec.releaseOutputBuffer(bufferIndex, false); decoderCounters.renderedOutputBufferCount++; return true; } - } catch (AudioTrack.InitializationException | AudioTrack.WriteException e) { + } catch (AudioSink.InitializationException | AudioSink.WriteException e) { throw ExoPlaybackException.createForRenderer(e, getIndex()); } return false; @@ -409,8 +432,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media @Override protected void renderToEndOfStream() throws ExoPlaybackException { try { - audioTrack.playToEndOfStream(); - } catch (AudioTrack.WriteException e) { + audioSink.playToEndOfStream(); + } catch (AudioSink.WriteException e) { throw ExoPlaybackException.createForRenderer(e, getIndex()); } } @@ -419,11 +442,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media public void handleMessage(int messageType, Object message) throws ExoPlaybackException { switch (messageType) { case C.MSG_SET_VOLUME: - audioTrack.setVolume((Float) message); + audioSink.setVolume((Float) message); break; case C.MSG_SET_AUDIO_ATTRIBUTES: AudioAttributes audioAttributes = (AudioAttributes) message; - audioTrack.setAudioAttributes(audioAttributes); + audioSink.setAudioAttributes(audioAttributes); break; default: super.handleMessage(messageType, message); @@ -445,7 +468,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media || Util.DEVICE.startsWith("heroqlte")); } - private final class AudioTrackListener implements AudioTrack.Listener { + private final class AudioSinkListener implements AudioSink.Listener { @Override public void onAudioSessionId(int audioSessionId) { diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java index 557421e4b3..98a84fdff8 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java @@ -72,7 +72,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements private final DrmSessionManager drmSessionManager; private final boolean playClearSamplesWithoutKeys; private final EventDispatcher eventDispatcher; - private final AudioTrack audioTrack; + private final AudioSink audioSink; private final FormatHolder formatHolder; private final DecoderInputBuffer flagsOnlyBuffer; @@ -107,8 +107,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. */ - public SimpleDecoderAudioRenderer(Handler eventHandler, - AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) { + public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, + AudioProcessor... audioProcessors) { this(eventHandler, eventListener, null, null, false, audioProcessors); } @@ -119,8 +119,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements * @param audioCapabilities The audio capabilities for playback on this device. May be null if the * default capabilities (no encoded audio passthrough support) should be assumed. */ - public SimpleDecoderAudioRenderer(Handler eventHandler, - AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) { + public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, + AudioCapabilities audioCapabilities) { this(eventHandler, eventListener, audioCapabilities, null, false); } @@ -139,15 +139,35 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements * has obtained the keys necessary to decrypt encrypted regions of the media. * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. */ - public SimpleDecoderAudioRenderer(Handler eventHandler, - AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities, + public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, + AudioCapabilities audioCapabilities, DrmSessionManager drmSessionManager, + boolean playClearSamplesWithoutKeys, AudioProcessor... audioProcessors) { + this(eventHandler, eventListener, drmSessionManager, + playClearSamplesWithoutKeys, new DefaultAudioSink(audioCapabilities, audioProcessors)); + } + + /** + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param drmSessionManager For use with encrypted media. May be null if support for encrypted + * media is not required. + * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. + * For example a media file may start with a short clear region so as to allow playback to + * begin in parallel with key acquisition. This parameter specifies whether the renderer is + * permitted to play clear regions of encrypted media files before {@code drmSessionManager} + * has obtained the keys necessary to decrypt encrypted regions of the media. + * @param audioSink The sink to which audio will be output. + */ + public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener, DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys, - AudioProcessor... audioProcessors) { + AudioSink audioSink) { super(C.TRACK_TYPE_AUDIO); this.drmSessionManager = drmSessionManager; this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; eventDispatcher = new EventDispatcher(eventHandler, eventListener); - audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener()); + this.audioSink = audioSink; + audioSink.setListener(new AudioSinkListener()); formatHolder = new FormatHolder(); flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); decoderReinitializationState = REINITIALIZATION_STATE_NONE; @@ -184,8 +204,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (outputStreamEnded) { try { - audioTrack.playToEndOfStream(); - } catch (AudioTrack.WriteException e) { + audioSink.playToEndOfStream(); + } catch (AudioSink.WriteException e) { throw ExoPlaybackException.createForRenderer(e, getIndex()); } return; @@ -220,8 +240,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements while (drainOutputBuffer()) {} while (feedInputBuffer()) {} TraceUtil.endSection(); - } catch (AudioDecoderException | AudioTrack.ConfigurationException - | AudioTrack.InitializationException | AudioTrack.WriteException e) { + } catch (AudioDecoderException | AudioSink.ConfigurationException + | AudioSink.InitializationException | AudioSink.WriteException e) { throw ExoPlaybackException.createForRenderer(e, getIndex()); } decoderCounters.ensureUpdated(); @@ -234,21 +254,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances * should be released in {@link #onDisabled()} (if not before). * - * @see AudioTrack.Listener#onAudioSessionId(int) + * @see AudioSink.Listener#onAudioSessionId(int) */ protected void onAudioSessionId(int audioSessionId) { // Do nothing. } /** - * @see AudioTrack.Listener#onPositionDiscontinuity() + * @see AudioSink.Listener#onPositionDiscontinuity() */ protected void onAudioTrackPositionDiscontinuity() { // Do nothing. } /** - * @see AudioTrack.Listener#onUnderrun(int, long, long) + * @see AudioSink.Listener#onUnderrun(int, long, long) */ protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { @@ -282,8 +302,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements } private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException, - AudioTrack.ConfigurationException, AudioTrack.InitializationException, - AudioTrack.WriteException { + AudioSink.ConfigurationException, AudioSink.InitializationException, + AudioSink.WriteException { if (outputBuffer == null) { outputBuffer = decoder.dequeueOutputBuffer(); if (outputBuffer == null) { @@ -309,12 +329,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements if (audioTrackNeedsConfigure) { Format outputFormat = getOutputFormat(); - audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount, + audioSink.configure(outputFormat.sampleMimeType, outputFormat.channelCount, outputFormat.sampleRate, outputFormat.pcmEncoding, 0, null, encoderDelay, encoderPadding); audioTrackNeedsConfigure = false; } - if (audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) { + if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) { decoderCounters.renderedOutputBufferCount++; outputBuffer.release(); outputBuffer = null; @@ -394,8 +414,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements private void processEndOfStream() throws ExoPlaybackException { outputStreamEnded = true; try { - audioTrack.playToEndOfStream(); - } catch (AudioTrack.WriteException e) { + audioSink.playToEndOfStream(); + } catch (AudioSink.WriteException e) { throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex()); } } @@ -418,19 +438,19 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements @Override public boolean isEnded() { - return outputStreamEnded && audioTrack.isEnded(); + return outputStreamEnded && audioSink.isEnded(); } @Override public boolean isReady() { - return audioTrack.hasPendingData() + return audioSink.hasPendingData() || (inputFormat != null && !waitingForKeys && (isSourceReady() || outputBuffer != null)); } @Override public long getPositionUs() { - long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded()); - if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) { + long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded()); + if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) { currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs : Math.max(currentPositionUs, newCurrentPositionUs); allowPositionDiscontinuity = false; @@ -440,12 +460,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements @Override public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { - return audioTrack.setPlaybackParameters(playbackParameters); + return audioSink.setPlaybackParameters(playbackParameters); } @Override public PlaybackParameters getPlaybackParameters() { - return audioTrack.getPlaybackParameters(); + return audioSink.getPlaybackParameters(); } @Override @@ -454,15 +474,15 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements eventDispatcher.enabled(decoderCounters); int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) { - audioTrack.enableTunnelingV21(tunnelingAudioSessionId); + audioSink.enableTunnelingV21(tunnelingAudioSessionId); } else { - audioTrack.disableTunneling(); + audioSink.disableTunneling(); } } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { - audioTrack.reset(); + audioSink.reset(); currentPositionUs = positionUs; allowPositionDiscontinuity = true; inputStreamEnded = false; @@ -474,12 +494,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements @Override protected void onStarted() { - audioTrack.play(); + audioSink.play(); } @Override protected void onStopped() { - audioTrack.pause(); + audioSink.pause(); } @Override @@ -489,7 +509,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements waitingForKeys = false; try { releaseDecoder(); - audioTrack.release(); + audioSink.release(); } finally { try { if (drmSession != null) { @@ -599,11 +619,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements public void handleMessage(int messageType, Object message) throws ExoPlaybackException { switch (messageType) { case C.MSG_SET_VOLUME: - audioTrack.setVolume((Float) message); + audioSink.setVolume((Float) message); break; case C.MSG_SET_AUDIO_ATTRIBUTES: AudioAttributes audioAttributes = (AudioAttributes) message; - audioTrack.setAudioAttributes(audioAttributes); + audioSink.setAudioAttributes(audioAttributes); break; default: super.handleMessage(messageType, message); @@ -611,7 +631,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements } } - private final class AudioTrackListener implements AudioTrack.Listener { + private final class AudioSinkListener implements AudioSink.Listener { @Override public void onAudioSessionId(int audioSessionId) { diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java index c66cbf4882..9338c24b76 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java @@ -55,7 +55,7 @@ import java.nio.ByteOrder; * * @param trimStartSamples The number of audio samples to trim from the start of audio. * @param trimEndSamples The number of audio samples to trim from the end of audio. - * @see AudioTrack#configure(String, int, int, int, int, int[], int, int) + * @see AudioSink#configure(String, int, int, int, int, int[], int, int) */ public void setTrimSampleCount(int trimStartSamples, int trimEndSamples) { this.trimStartSamples = trimStartSamples; diff --git a/testutils/src/main/java/com/google/android/exoplayer2/testutil/ExoHostedTest.java b/testutils/src/main/java/com/google/android/exoplayer2/testutil/ExoHostedTest.java index 142c5060b5..ee4018ba0e 100644 --- a/testutils/src/main/java/com/google/android/exoplayer2/testutil/ExoHostedTest.java +++ b/testutils/src/main/java/com/google/android/exoplayer2/testutil/ExoHostedTest.java @@ -29,7 +29,7 @@ import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.RenderersFactory; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.audio.AudioRendererEventListener; -import com.google.android.exoplayer2.audio.AudioTrack; +import com.google.android.exoplayer2.audio.DefaultAudioSink; import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; @@ -53,10 +53,10 @@ public abstract class ExoHostedTest extends Player.DefaultEventListener implemen AudioRendererEventListener, VideoRendererEventListener { static { - // ExoPlayer's AudioTrack class is able to work around spurious timestamps reported by the - // platform (by ignoring them). Disable this workaround, since we're interested in testing - // that the underlying platform is behaving correctly. - AudioTrack.failOnSpuriousAudioTimestamp = true; + // DefaultAudioSink is able to work around spurious timestamps reported by the platform (by + // ignoring them). Disable this workaround, since we're interested in testing that the + // underlying platform is behaving correctly. + DefaultAudioSink.failOnSpuriousAudioTimestamp = true; } public static final long MAX_PLAYING_TIME_DISCREPANCY_MS = 2000; @@ -253,7 +253,7 @@ public abstract class ExoHostedTest extends Player.DefaultEventListener implemen } @Override - public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + public void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { Log.e(tag, "audioTrackUnderrun [" + bufferSize + ", " + bufferSizeMs + ", " + elapsedSinceLastFeedMs + "]", null); }