Fix up a bunch more Javadoc.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=128565942
This commit is contained in:
olly 2016-07-27 02:47:14 -07:00 committed by Oliver Woodman
parent 3cb5862621
commit ac59bde2be
29 changed files with 263 additions and 230 deletions

View File

@ -56,7 +56,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
private final boolean scaleToFit; private final boolean scaleToFit;
private final long allowedJoiningTimeMs; private final long allowedJoiningTimeMs;
private final int maxDroppedFrameCountToNotify; private final int maxDroppedFramesToNotify;
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
private final FormatHolder formatHolder; private final FormatHolder formatHolder;
@ -81,7 +81,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
private int previousHeight; private int previousHeight;
private long droppedFrameAccumulationStartTimeMs; private long droppedFrameAccumulationStartTimeMs;
private int droppedFrameCount; private int droppedFrames;
private int consecutiveDroppedFrameCount; private int consecutiveDroppedFrameCount;
/** /**
@ -100,16 +100,16 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
* invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
*/ */
public LibvpxVideoRenderer(boolean scaleToFit, long allowedJoiningTimeMs, public LibvpxVideoRenderer(boolean scaleToFit, long allowedJoiningTimeMs,
Handler eventHandler, VideoRendererEventListener eventListener, Handler eventHandler, VideoRendererEventListener eventListener,
int maxDroppedFrameCountToNotify) { int maxDroppedFramesToNotify) {
super(C.TRACK_TYPE_VIDEO); super(C.TRACK_TYPE_VIDEO);
this.scaleToFit = scaleToFit; this.scaleToFit = scaleToFit;
this.allowedJoiningTimeMs = allowedJoiningTimeMs; this.allowedJoiningTimeMs = allowedJoiningTimeMs;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
joiningDeadlineMs = -1; joiningDeadlineMs = -1;
previousWidth = -1; previousWidth = -1;
previousHeight = -1; previousHeight = -1;
@ -169,7 +169,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
long codecInitializedTimestamp = SystemClock.elapsedRealtime(); long codecInitializedTimestamp = SystemClock.elapsedRealtime();
eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp, eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
codecInitializedTimestamp - codecInitializingTimestamp); codecInitializedTimestamp - codecInitializingTimestamp);
decoderCounters.codecInitCount++; decoderCounters.decoderInitCount++;
} }
TraceUtil.beginSection("drainAndFeed"); TraceUtil.beginSection("drainAndFeed");
while (drainOutputBuffer(positionUs)) {} while (drainOutputBuffer(positionUs)) {}
@ -220,13 +220,13 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|| (nextOutputBuffer != null && !nextOutputBuffer.isEndOfStream() || (nextOutputBuffer != null && !nextOutputBuffer.isEndOfStream()
&& nextOutputBuffer.timeUs < positionUs)) { && nextOutputBuffer.timeUs < positionUs)) {
decoderCounters.droppedOutputBufferCount++; decoderCounters.droppedOutputBufferCount++;
droppedFrameCount++; droppedFrames++;
consecutiveDroppedFrameCount++; consecutiveDroppedFrameCount++;
decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max( decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max(
consecutiveDroppedFrameCount, consecutiveDroppedFrameCount,
decoderCounters.maxConsecutiveDroppedOutputBufferCount); decoderCounters.maxConsecutiveDroppedOutputBufferCount);
if (droppedFrameCount == maxDroppedFrameCountToNotify) { if (droppedFrames == maxDroppedFramesToNotify) {
maybeNotifyDroppedFrameCount(); maybeNotifyDroppedFrames();
} }
outputBuffer.release(); outputBuffer.release();
outputBuffer = null; outputBuffer = null;
@ -374,14 +374,14 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
@Override @Override
protected void onStarted() { protected void onStarted() {
droppedFrameCount = 0; droppedFrames = 0;
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
} }
@Override @Override
protected void onStopped() { protected void onStopped() {
joiningDeadlineMs = -1; joiningDeadlineMs = -1;
maybeNotifyDroppedFrameCount(); maybeNotifyDroppedFrames();
} }
@Override @Override
@ -401,7 +401,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
if (decoder != null) { if (decoder != null) {
decoder.release(); decoder.release();
decoder = null; decoder = null;
decoderCounters.codecReleaseCount++; decoderCounters.decoderReleaseCount++;
} }
} }
@ -474,12 +474,12 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
} }
} }
private void maybeNotifyDroppedFrameCount() { private void maybeNotifyDroppedFrames() {
if (droppedFrameCount > 0) { if (droppedFrames > 0) {
long now = SystemClock.elapsedRealtime(); long now = SystemClock.elapsedRealtime();
long elapsedMs = now - droppedFrameAccumulationStartTimeMs; long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
eventDispatcher.droppedFrameCount(droppedFrameCount, elapsedMs); eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
droppedFrameCount = 0; droppedFrames = 0;
droppedFrameAccumulationStartTimeMs = now; droppedFrameAccumulationStartTimeMs = now;
} }
} }

View File

@ -97,7 +97,7 @@ import com.google.android.exoplayer2.source.Timeline;
public interface ExoPlayer { public interface ExoPlayer {
/** /**
* Interface definition for a callback to be notified of changes in player state. * Listener of changes in player state.
*/ */
interface EventListener { interface EventListener {

View File

@ -214,10 +214,11 @@ public final class Ac3Util {
} }
/** /**
* Like {@link #parseEAc3SyncframeAudioSampleCount(byte[])} but reads from a byte buffer. The * Like {@link #parseEAc3SyncframeAudioSampleCount(byte[])} but reads from a {@link ByteBuffer}.
* buffer position is not modified. * The buffer's position is not modified.
* *
* @see #parseEAc3SyncframeAudioSampleCount(byte[]) * @param buffer The {@link ByteBuffer} from which to read.
* @return The number of audio samples represented by the syncframe.
*/ */
public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) { public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 366 subsection E.1.2.2. // See ETSI TS 102 366 subsection E.1.2.2.

View File

@ -26,7 +26,7 @@ import android.media.AudioManager;
import java.util.Arrays; import java.util.Arrays;
/** /**
* Represents the set of audio formats a device is capable of playing back. * Represents the set of audio formats that a device is capable of playing.
*/ */
@TargetApi(21) @TargetApi(21)
public final class AudioCapabilities { public final class AudioCapabilities {
@ -38,11 +38,10 @@ public final class AudioCapabilities {
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2); new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
/** /**
* Gets the current audio capabilities. Note that to be notified when audio capabilities change, * Returns the current audio capabilities for the device.
* you can create an instance of {@link AudioCapabilitiesReceiver} and register a listener.
* *
* @param context Context for receiving the initial broadcast. * @param context A context for obtaining the current audio capabilities.
* @return Current audio capabilities for the device. * @return The current audio capabilities for the device.
*/ */
@SuppressWarnings("InlinedApi") @SuppressWarnings("InlinedApi")
public static AudioCapabilities getCapabilities(Context context) { public static AudioCapabilities getCapabilities(Context context) {
@ -90,7 +89,9 @@ public final class AudioCapabilities {
return Arrays.binarySearch(supportedEncodings, encoding) >= 0; return Arrays.binarySearch(supportedEncodings, encoding) >= 0;
} }
/** Returns the maximum number of channels the device can play at the same time. */ /**
* Returns the maximum number of channels the device can play at the same time.
*/
public int getMaxChannelCount() { public int getMaxChannelCount() {
return maxChannelCount; return maxChannelCount;
} }

View File

@ -25,8 +25,8 @@ import android.content.IntentFilter;
import android.media.AudioManager; import android.media.AudioManager;
/** /**
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start * Receives broadcast events indicating changes to the device's audio capabilities, notifying a
* (or resume) receiving notifications, and {@link #unregister} to stop. * {@link Listener} when audio capability changes occur.
*/ */
public final class AudioCapabilitiesReceiver { public final class AudioCapabilitiesReceiver {
@ -38,7 +38,7 @@ public final class AudioCapabilitiesReceiver {
/** /**
* Called when the audio capabilities change. * Called when the audio capabilities change.
* *
* @param audioCapabilities Current audio capabilities for the device. * @param audioCapabilities The current audio capabilities for the device.
*/ */
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities); void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
@ -51,10 +51,8 @@ public final class AudioCapabilitiesReceiver {
/* package */ AudioCapabilities audioCapabilities; /* package */ AudioCapabilities audioCapabilities;
/** /**
* Constructs a new audio capabilities receiver. * @param context A context for registering the receiver.
* * @param listener The listener to notify when audio capabilities change.
* @param context Context for registering to receive broadcasts.
* @param listener Listener to notify when audio capabilities change.
*/ */
public AudioCapabilitiesReceiver(Context context, Listener listener) { public AudioCapabilitiesReceiver(Context context, Listener listener) {
this.context = Assertions.checkNotNull(context); this.context = Assertions.checkNotNull(context);
@ -63,11 +61,11 @@ public final class AudioCapabilitiesReceiver {
} }
/** /**
* Registers to notify the listener when audio capabilities change. The current capabilities will * Registers the receiver, meaning it will notify the listener when audio capability changes
* be returned. It is important to call {@link #unregister} so that the listener can be garbage * occur. The current audio capabilities will be returned. It is important to call
* collected. * {@link #unregister} when the receiver is no longer required.
* *
* @return Current audio capabilities for the device. * @return The current audio capabilities for the device.
*/ */
@SuppressWarnings("InlinedApi") @SuppressWarnings("InlinedApi")
public AudioCapabilities register() { public AudioCapabilities register() {
@ -78,7 +76,8 @@ public final class AudioCapabilitiesReceiver {
} }
/** /**
* Unregisters to stop notifying the listener when audio capabilities change. * Unregisters the receiver, meaning it will no longer notify the listener when audio capability
* changes occur.
*/ */
public void unregister() { public void unregister() {
if (receiver != null) { if (receiver != null) {

View File

@ -20,6 +20,9 @@ package com.google.android.exoplayer2.audio;
*/ */
public abstract class AudioDecoderException extends Exception { public abstract class AudioDecoderException extends Exception {
/**
* @param detailMessage The detail message for this exception.
*/
public AudioDecoderException(String detailMessage) { public AudioDecoderException(String detailMessage) {
super(detailMessage); super(detailMessage);
} }

View File

@ -24,12 +24,12 @@ import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
/** /**
* Interface definition for a callback to be notified of audio {@link Renderer} events. * Listener of audio {@link Renderer} events.
*/ */
public interface AudioRendererEventListener { public interface AudioRendererEventListener {
/** /**
* Invoked when the renderer is enabled. * Called when the renderer is enabled.
* *
* @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it * @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
* remains enabled. * remains enabled.
@ -37,14 +37,14 @@ public interface AudioRendererEventListener {
void onAudioEnabled(DecoderCounters counters); void onAudioEnabled(DecoderCounters counters);
/** /**
* Invoked when the audio session is set. * Called when the audio session is set.
* *
* @param audioSessionId The audio session id. * @param audioSessionId The audio session id.
*/ */
void onAudioSessionId(int audioSessionId); void onAudioSessionId(int audioSessionId);
/** /**
* Invoked when a decoder is created. * Called when a decoder is created.
* *
* @param decoderName The decoder that was created. * @param decoderName The decoder that was created.
* @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
@ -55,14 +55,14 @@ public interface AudioRendererEventListener {
long initializationDurationMs); long initializationDurationMs);
/** /**
* Invoked when the format of the media being consumed by the renderer changes. * Called when the format of the media being consumed by the renderer changes.
* *
* @param format The new format. * @param format The new format.
*/ */
void onAudioInputFormatChanged(Format format); void onAudioInputFormatChanged(Format format);
/** /**
* Invoked when an {@link AudioTrack} underrun occurs. * Called when an {@link AudioTrack} underrun occurs.
* *
* @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes. * @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is * @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
@ -73,7 +73,7 @@ public interface AudioRendererEventListener {
void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
/** /**
* Invoked when the renderer is disabled. * Called when the renderer is disabled.
* *
* @param counters {@link DecoderCounters} that were updated by the renderer. * @param counters {@link DecoderCounters} that were updated by the renderer.
*/ */
@ -87,11 +87,19 @@ public interface AudioRendererEventListener {
private final Handler handler; private final Handler handler;
private final AudioRendererEventListener listener; private final AudioRendererEventListener listener;
/**
* @param handler A handler for dispatching events, or null if creating a dummy instance.
* @param listener The listener to which events should be dispatched, or null if creating a
* dummy instance.
*/
public EventDispatcher(Handler handler, AudioRendererEventListener listener) { public EventDispatcher(Handler handler, AudioRendererEventListener listener) {
this.handler = listener != null ? Assertions.checkNotNull(handler) : null; this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
this.listener = listener; this.listener = listener;
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}.
*/
public void enabled(final DecoderCounters decoderCounters) { public void enabled(final DecoderCounters decoderCounters) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -103,6 +111,9 @@ public interface AudioRendererEventListener {
} }
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioDecoderInitialized(String, long, long)}.
*/
public void decoderInitialized(final String decoderName, public void decoderInitialized(final String decoderName,
final long initializedTimestampMs, final long initializationDurationMs) { final long initializedTimestampMs, final long initializationDurationMs) {
if (listener != null) { if (listener != null) {
@ -116,6 +127,9 @@ public interface AudioRendererEventListener {
} }
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}.
*/
public void inputFormatChanged(final Format format) { public void inputFormatChanged(final Format format) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -127,6 +141,9 @@ public interface AudioRendererEventListener {
} }
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioTrackUnderrun(int, long, long)}.
*/
public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs, public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs,
final long elapsedSinceLastFeedMs) { final long elapsedSinceLastFeedMs) {
if (listener != null) { if (listener != null) {
@ -139,6 +156,9 @@ public interface AudioRendererEventListener {
} }
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioDisabled(DecoderCounters)}.
*/
public void disabled(final DecoderCounters counters) { public void disabled(final DecoderCounters counters) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -151,6 +171,9 @@ public interface AudioRendererEventListener {
} }
} }
/**
* Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}.
*/
public void audioSessionId(final int audioSessionId) { public void audioSessionId(final int audioSessionId) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {

View File

@ -35,28 +35,28 @@ import java.nio.ByteBuffer;
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles * Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
* playback position smoothing, non-blocking writes and reconfiguration. * playback position smoothing, non-blocking writes and reconfiguration.
* <p> * <p>
* Before starting playback, specify the input audio format by calling one of the {@link #configure} * Before starting playback, specify the input format by calling
* methods and {@link #initialize} the instance, optionally specifying an audio session. * {@link #configure(String, int, int, int, int)}. Next call {@link #initialize(int)}, optionally
* specifying an audio session.
* <p> * <p>
* Call {@link #handleBuffer(ByteBuffer, long)} to write data to play back, and * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* {@link #handleDiscontinuity()} when a buffer is skipped. Call {@link #play()} to start playing * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
* back written data.
* <p> * <p>
* Call {@link #configure} again whenever the input format changes. If {@link #isInitialized()} * Call {@link #configure(String, int, int, int, int)} whenever the input format changes. If
* returns false after calling {@link #configure}, it is necessary to re-{@link #initialize} the * {@link #isInitialized()} returns {@code false} after the call, it is necessary to call
* instance before writing more data. * {@link #initialize(int)} before writing more data.
* <p> * <p>
* The underlying framework audio track is created by {@link #initialize} and released * The underlying {@link android.media.AudioTrack} is created by {@link #initialize(int)} and
* asynchronously by {@link #reset} (and {@link #configure}, unless the format is unchanged). * released by {@link #reset()} (and {@link #configure(String, int, int, int, int)} unless the input
* Reinitialization blocks until releasing the old audio track completes. It is safe to * format is unchanged). It is safe to call {@link #initialize(int)} after calling {@link #reset()}
* re-{@link #initialize} the instance after calling {@link #reset()}, without reconfiguration. * without reconfiguration.
* <p> * <p>
* Call {@link #release()} when the instance will no longer be used. * Call {@link #release()} when the instance is no longer required.
*/ */
public final class AudioTrack { public final class AudioTrack {
/** /**
* Thrown when a failure occurs instantiating an {@link android.media.AudioTrack}. * Thrown when a failure occurs initializing an {@link android.media.AudioTrack}.
*/ */
public static final class InitializationException extends Exception { public static final class InitializationException extends Exception {
@ -65,8 +65,14 @@ public final class AudioTrack {
*/ */
public final int audioTrackState; public final int audioTrackState;
public InitializationException( /**
int audioTrackState, int sampleRate, int channelConfig, int bufferSize) { * @param audioTrackState The state as reported by {@link android.media.AudioTrack#getState()}.
* @param sampleRate The requested sample rate in Hz.
* @param channelConfig The requested channel configuration.
* @param bufferSize The requested buffer size in bytes.
*/
public InitializationException(int audioTrackState, int sampleRate, int channelConfig,
int bufferSize) {
super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", " super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
+ channelConfig + ", " + bufferSize + ")"); + channelConfig + ", " + bufferSize + ")");
this.audioTrackState = audioTrackState; this.audioTrackState = audioTrackState;
@ -80,10 +86,14 @@ public final class AudioTrack {
public static final class WriteException extends Exception { public static final class WriteException extends Exception {
/** /**
* The value returned from {@link android.media.AudioTrack#write(byte[], int, int)}. * An error value returned from {@link android.media.AudioTrack#write(byte[], int, int)}.
*/ */
public final int errorCode; public final int errorCode;
/**
* @param errorCode An error value returned from
* {@link android.media.AudioTrack#write(byte[], int, int)}.
*/
public WriteException(int errorCode) { public WriteException(int errorCode) {
super("AudioTrack write failed: " + errorCode); super("AudioTrack write failed: " + errorCode);
this.errorCode = errorCode; this.errorCode = errorCode;
@ -97,8 +107,11 @@ public final class AudioTrack {
*/ */
public static final class InvalidAudioTrackTimestampException extends RuntimeException { public static final class InvalidAudioTrackTimestampException extends RuntimeException {
public InvalidAudioTrackTimestampException(String message) { /**
super(message); * @param detailMessage The detail message for this exception.
*/
public InvalidAudioTrackTimestampException(String detailMessage) {
super(detailMessage);
} }
} }
@ -189,7 +202,7 @@ public final class AudioTrack {
private final AudioTrackUtil audioTrackUtil; private final AudioTrackUtil audioTrackUtil;
/** /**
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). * Used to keep the audio session active on pre-V21 builds (see {@link #initialize(int)}).
*/ */
private android.media.AudioTrack keepSessionIdAudioTrack; private android.media.AudioTrack keepSessionIdAudioTrack;
@ -228,9 +241,7 @@ public final class AudioTrack {
private boolean useResampledBuffer; private boolean useResampledBuffer;
/** /**
* Creates an audio track using the specified audio capabilities and stream type. * @param audioCapabilities The current audio capabilities.
*
* @param audioCapabilities The current audio playback capabilities.
* @param streamType The type of audio stream for the underlying {@link android.media.AudioTrack}. * @param streamType The type of audio stream for the underlying {@link android.media.AudioTrack}.
*/ */
public AudioTrack(AudioCapabilities audioCapabilities, int streamType) { public AudioTrack(AudioCapabilities audioCapabilities, int streamType) {
@ -324,20 +335,6 @@ public final class AudioTrack {
return currentPositionUs; return currentPositionUs;
} }
/**
* Configures (or reconfigures) the audio track, inferring a suitable buffer size automatically.
*
* @param mimeType The mime type.
* @param channelCount The number of channels.
* @param sampleRate The sample rate in Hz.
* @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
* {@link C#ENCODING_PCM_32BIT}.
*/
public void configure(String mimeType, int channelCount, int sampleRate, int pcmEncoding) {
configure(mimeType, channelCount, sampleRate, pcmEncoding, 0);
}
/** /**
* Configures (or reconfigures) the audio track. * Configures (or reconfigures) the audio track.
* *
@ -435,15 +432,6 @@ public final class AudioTrack {
bufferSizeUs = passthrough ? C.UNSET_TIME_US : framesToDurationUs(pcmBytesToFrames(bufferSize)); bufferSizeUs = passthrough ? C.UNSET_TIME_US : framesToDurationUs(pcmBytesToFrames(bufferSize));
} }
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @return The audio track session identifier.
*/
public int initialize() throws InitializationException {
return initialize(SESSION_ID_NOT_SET);
}
/** /**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}. * Initializes the audio track for writing new buffers using {@link #handleBuffer}.
* *
@ -491,7 +479,6 @@ public final class AudioTrack {
audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds()); audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
setAudioTrackVolume(); setAudioTrackVolume();
return sessionId; return sessionId;
} }
@ -705,6 +692,8 @@ public final class AudioTrack {
/** /**
* Sets the playback volume. * Sets the playback volume.
*
* @param volume A volume in the range [0.0, 1.0].
*/ */
public void setVolume(float volume) { public void setVolume(float volume) {
if (this.volume != volume) { if (this.volume != volume) {
@ -734,9 +723,11 @@ public final class AudioTrack {
} }
/** /**
* Releases the underlying audio track asynchronously. Calling {@link #initialize} will block * Releases the underlying audio track asynchronously.
* until the audio track has been released, so it is safe to initialize immediately after * <p>
* resetting. The audio session may remain active until the instance is {@link #release}d. * Calling {@link #initialize(int)} will block until the audio track has been released, so it is
* safe to initialize immediately after a reset. The audio session may remain active until
* {@link #release()} is called.
*/ */
public void reset() { public void reset() {
if (isInitialized()) { if (isInitialized()) {

View File

@ -46,13 +46,9 @@ public final class DtsUtil {
384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816, 384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816,
2823, 2944, 3072, 3840, 4096, 6144, 7680}; 2823, 2944, 3072, 3840, 4096, 6144, 7680};
private static final ParsableBitArray SCRATCH_BITS = new ParsableBitArray();
/** /**
* Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114 * Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114
* subsections 5.3/5.4. * subsections 5.3/5.4.
* <p>
* This method may only be called from one thread at a time.
* *
* @param frame The DTS frame to parse. * @param frame The DTS frame to parse.
* @param trackId The track identifier to set on the format, or null. * @param trackId The track identifier to set on the format, or null.
@ -62,8 +58,7 @@ public final class DtsUtil {
*/ */
public static Format parseDtsFormat(byte[] frame, String trackId, String language, public static Format parseDtsFormat(byte[] frame, String trackId, String language,
DrmInitData drmInitData) { DrmInitData drmInitData) {
ParsableBitArray frameBits = SCRATCH_BITS; ParsableBitArray frameBits = new ParsableBitArray(frame);
frameBits.reset(frame);
frameBits.skipBits(4 * 8 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE frameBits.skipBits(4 * 8 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
int amode = frameBits.readBits(6); int amode = frameBits.readBits(6);
int channelCount = CHANNELS_BY_AMODE[amode]; int channelCount = CHANNELS_BY_AMODE[amode];
@ -91,14 +86,17 @@ public final class DtsUtil {
} }
/** /**
* Like {@link #parseDtsAudioSampleCount(byte[])} but reads from a byte buffer. The buffer * Like {@link #parseDtsAudioSampleCount(byte[])} but reads from a {@link ByteBuffer}. The
* position is not modified. * buffer's position is not modified.
*
* @param buffer The {@link ByteBuffer} from which to read.
* @return The number of audio samples represented by the syncframe.
*/ */
public static int parseDtsAudioSampleCount(ByteBuffer data) { public static int parseDtsAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 114 subsection 5.4.1. // See ETSI TS 102 114 subsection 5.4.1.
int position = data.position(); int position = buffer.position();
int nblks = ((data.get(position + 4) & 0x01) << 6) int nblks = ((buffer.get(position + 4) & 0x01) << 6)
| ((data.get(position + 5) & 0xFC) >> 2); | ((buffer.get(position + 5) & 0xFC) >> 2);
return (nblks + 1) * 32; return (nblks + 1) * 32;
} }

View File

@ -41,7 +41,7 @@ import android.os.SystemClock;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
* Decodes and renders audio using {@link MediaCodec} and {@link android.media.AudioTrack}. * Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
*/ */
@TargetApi(16) @TargetApi(16)
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock { public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
@ -226,18 +226,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override @Override
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) { protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) {
boolean passthrough = passthroughMediaFormat != null; boolean passthrough = passthroughMediaFormat != null;
String mimeType = passthrough String mimeType = passthrough ? passthroughMediaFormat.getString(MediaFormat.KEY_MIME)
? passthroughMediaFormat.getString(android.media.MediaFormat.KEY_MIME)
: MimeTypes.AUDIO_RAW; : MimeTypes.AUDIO_RAW;
android.media.MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat; MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
int channelCount = format.getInteger(android.media.MediaFormat.KEY_CHANNEL_COUNT); int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(android.media.MediaFormat.KEY_SAMPLE_RATE); int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding); audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0);
} }
/** /**
* Invoked when the audio session id becomes known. Once the id is known it will not change * Called when the audio session id becomes known. Once the id is known it will not change (and
* (and hence this method will not be invoked again) unless the renderer is disabled and then * hence this method will not be invoked again) unless the renderer is disabled and then
* subsequently re-enabled. * subsequently re-enabled.
* <p> * <p>
* The default implementation is a no-op. One reason for overriding this method would be to * The default implementation is a no-op. One reason for overriding this method would be to
@ -333,12 +332,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (!audioTrack.isInitialized()) { if (!audioTrack.isInitialized()) {
// Initialize the AudioTrack now. // Initialize the AudioTrack now.
try { try {
if (audioSessionId != AudioTrack.SESSION_ID_NOT_SET) { if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
audioTrack.initialize(audioSessionId); audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
} else {
audioSessionId = audioTrack.initialize();
eventDispatcher.audioSessionId(audioSessionId); eventDispatcher.audioSessionId(audioSessionId);
onAudioSessionId(audioSessionId); onAudioSessionId(audioSessionId);
} else {
audioTrack.initialize(audioSessionId);
} }
audioTrackHasData = false; audioTrackHasData = false;
} catch (AudioTrack.InitializationException e) { } catch (AudioTrack.InitializationException e) {

View File

@ -119,7 +119,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
long codecInitializedTimestamp = SystemClock.elapsedRealtime(); long codecInitializedTimestamp = SystemClock.elapsedRealtime();
eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp, eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
codecInitializedTimestamp - codecInitializingTimestamp); codecInitializedTimestamp - codecInitializingTimestamp);
decoderCounters.codecInitCount++; decoderCounters.decoderInitCount++;
} catch (AudioDecoderException e) { } catch (AudioDecoderException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw ExoPlaybackException.createForRenderer(e, getIndex());
} }
@ -138,6 +138,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
decoderCounters.ensureUpdated(); decoderCounters.ensureUpdated();
} }
/**
* Creates a decoder for the given format.
*
* @param format The format for which a decoder is required.
* @return The decoder.
* @throws AudioDecoderException If an error occurred creating a suitable decoder.
*/
protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer, protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer,
? extends AudioDecoderException> createDecoder(Format format) throws AudioDecoderException; ? extends AudioDecoderException> createDecoder(Format format) throws AudioDecoderException;
@ -179,13 +186,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (!audioTrack.isInitialized()) { if (!audioTrack.isInitialized()) {
Format outputFormat = getOutputFormat(); Format outputFormat = getOutputFormat();
audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount, audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
outputFormat.sampleRate, outputFormat.pcmEncoding); outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
if (audioSessionId != AudioTrack.SESSION_ID_NOT_SET) { if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
audioTrack.initialize(audioSessionId); audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
} else {
audioSessionId = audioTrack.initialize();
eventDispatcher.audioSessionId(audioSessionId); eventDispatcher.audioSessionId(audioSessionId);
onAudioSessionId(audioSessionId); onAudioSessionId(audioSessionId);
} else {
audioTrack.initialize(audioSessionId);
} }
audioTrackHasData = false; audioTrackHasData = false;
if (getState() == STATE_STARTED) { if (getState() == STATE_STARTED) {
@ -338,7 +345,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (decoder != null) { if (decoder != null) {
decoder.release(); decoder.release();
decoder = null; decoder = null;
decoderCounters.codecReleaseCount++; decoderCounters.decoderReleaseCount++;
} }
audioTrack.release(); audioTrack.release();
} finally { } finally {

View File

@ -16,22 +16,22 @@
package com.google.android.exoplayer2.decoder; package com.google.android.exoplayer2.decoder;
/** /**
* Maintains codec event counts, for debugging purposes only. * Maintains decoder event counts, for debugging purposes only.
* <p> * <p>
* Counters should be written from the playback thread only. Counters may be read from any thread. * Counters should be written from the playback thread only. Counters may be read from any thread.
* To ensure that the counter values are correctly reflected between threads, users of this class * To ensure that the counter values are made visible across threads, users of this class should
* should invoke {@link #ensureUpdated()} prior to reading and after writing. * invoke {@link #ensureUpdated()} prior to reading and after writing.
*/ */
public final class DecoderCounters { public final class DecoderCounters {
/** /**
* The number of times the codec has been initialized. * The number of times a decoder has been initialized.
*/ */
public int codecInitCount; public int decoderInitCount;
/** /**
* The number of times the codec has been released. * The number of times a decoder has been released.
*/ */
public int codecReleaseCount; public int decoderReleaseCount;
/** /**
* The number of queued input buffers. * The number of queued input buffers.
*/ */
@ -76,8 +76,8 @@ public final class DecoderCounters {
* @param other The {@link DecoderCounters} to merge into this instance. * @param other The {@link DecoderCounters} to merge into this instance.
*/ */
public void merge(DecoderCounters other) { public void merge(DecoderCounters other) {
codecInitCount += other.codecInitCount; decoderInitCount += other.decoderInitCount;
codecReleaseCount += other.codecReleaseCount; decoderReleaseCount += other.decoderReleaseCount;
inputBufferCount += other.inputBufferCount; inputBufferCount += other.inputBufferCount;
renderedOutputBufferCount += other.renderedOutputBufferCount; renderedOutputBufferCount += other.renderedOutputBufferCount;
skippedOutputBufferCount += other.skippedOutputBufferCount; skippedOutputBufferCount += other.skippedOutputBufferCount;

View File

@ -47,18 +47,17 @@ import java.util.UUID;
public class StreamingDrmSessionManager implements DrmSessionManager, DrmSession { public class StreamingDrmSessionManager implements DrmSessionManager, DrmSession {
/** /**
* Interface definition for a callback to be notified of {@link StreamingDrmSessionManager} * Listener of {@link StreamingDrmSessionManager} events.
* events.
*/ */
public interface EventListener { public interface EventListener {
/** /**
* Invoked each time keys are loaded. * Called each time keys are loaded.
*/ */
void onDrmKeysLoaded(); void onDrmKeysLoaded();
/** /**
* Invoked when a drm error occurs. * Called when a drm error occurs.
* *
* @param e The corresponding exception. * @param e The corresponding exception.
*/ */

View File

@ -363,7 +363,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
? (SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS) : -1; ? (SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS) : -1;
inputIndex = -1; inputIndex = -1;
outputIndex = -1; outputIndex = -1;
decoderCounters.codecInitCount++; decoderCounters.decoderInitCount++;
} }
private void throwDecoderInitError(DecoderInitializationException e) private void throwDecoderInitError(DecoderInitializationException e)
@ -436,7 +436,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecReceivedEos = false; codecReceivedEos = false;
codecReconfigurationState = RECONFIGURATION_STATE_NONE; codecReconfigurationState = RECONFIGURATION_STATE_NONE;
codecReinitializationState = REINITIALIZATION_STATE_NONE; codecReinitializationState = REINITIALIZATION_STATE_NONE;
decoderCounters.codecReleaseCount++; decoderCounters.decoderReleaseCount++;
try { try {
codec.stop(); codec.stop();
} finally { } finally {

View File

@ -16,7 +16,7 @@
package com.google.android.exoplayer2.metadata; package com.google.android.exoplayer2.metadata;
/** /**
* Parses objects of type <T> from binary data. * Decodes objects of type <T> from binary data.
* *
* @param <T> The type of the metadata. * @param <T> The type of the metadata.
*/ */

View File

@ -21,15 +21,15 @@ package com.google.android.exoplayer2.metadata;
public class MetadataDecoderException extends Exception { public class MetadataDecoderException extends Exception {
/** /**
* @param message The detail message. * @param message The detail message for this exception.
*/ */
public MetadataDecoderException(String message) { public MetadataDecoderException(String message) {
super(message); super(message);
} }
/** /**
* @param message The detail message. * @param message The detail message for this exception.
* @param cause The cause. * @param cause The cause of this exception.
*/ */
public MetadataDecoderException(String message, Throwable cause) { public MetadataDecoderException(String message, Throwable cause) {
super(message, cause); super(message, cause);

View File

@ -66,12 +66,12 @@ public final class ExtractorMediaSource implements MediaPeriod, MediaSource,
UpstreamFormatChangedListener { UpstreamFormatChangedListener {
/** /**
* Interface definition for a callback to be notified of {@link ExtractorMediaSource} events. * Listener of {@link ExtractorMediaSource} events.
*/ */
public interface EventListener { public interface EventListener {
/** /**
* Invoked when an error occurs loading media data. * Called when an error occurs loading media data.
* *
* @param error The load error. * @param error The load error.
*/ */

View File

@ -41,12 +41,12 @@ public final class SingleSampleMediaSource implements MediaPeriod, MediaSource,
Loader.Callback<SingleSampleMediaSource.SourceLoadable> { Loader.Callback<SingleSampleMediaSource.SourceLoadable> {
/** /**
* Interface definition for a callback to be notified of {@link SingleSampleMediaSource} events. * Listener of {@link SingleSampleMediaSource} events.
*/ */
public interface EventListener { public interface EventListener {
/** /**
* Invoked when an error occurs loading media data. * Called when an error occurs loading media data.
* *
* @param sourceId The id of the reporting {@link SingleSampleMediaSource}. * @param sourceId The id of the reporting {@link SingleSampleMediaSource}.
* @param e The cause of the failure. * @param e The cause of the failure.

View File

@ -18,15 +18,15 @@ package com.google.android.exoplayer2.text;
import com.google.android.exoplayer2.decoder.Decoder; import com.google.android.exoplayer2.decoder.Decoder;
/** /**
* Parses {@link Subtitle}s from {@link SubtitleInputBuffer}s. * Decodes {@link Subtitle}s from {@link SubtitleInputBuffer}s.
*/ */
public interface SubtitleDecoder extends public interface SubtitleDecoder extends
Decoder<SubtitleInputBuffer, SubtitleOutputBuffer, SubtitleDecoderException> { Decoder<SubtitleInputBuffer, SubtitleOutputBuffer, SubtitleDecoderException> {
/** /**
* Informs the parser of the current playback position. * Informs the decoder of the current playback position.
* <p> * <p>
* Must be called prior to each attempt to dequeue output buffers from the parser. * Must be called prior to each attempt to dequeue output buffers from the decoder.
* *
* @param positionUs The current playback position in microseconds. * @param positionUs The current playback position in microseconds.
*/ */

View File

@ -21,15 +21,15 @@ package com.google.android.exoplayer2.text;
public class SubtitleDecoderException extends Exception { public class SubtitleDecoderException extends Exception {
/** /**
* @param message The detail message. * @param message The detail message for this exception.
*/ */
public SubtitleDecoderException(String message) { public SubtitleDecoderException(String message) {
super(message); super(message);
} }
/** /**
* @param message The detail message. * @param message The detail message for this exception.
* @param cause The cause. * @param cause The cause of this exception.
*/ */
public SubtitleDecoderException(String message, Throwable cause) { public SubtitleDecoderException(String message, Throwable cause) {
super(message, cause); super(message, cause);

View File

@ -40,12 +40,12 @@ import java.util.concurrent.CopyOnWriteArraySet;
public abstract class MappingTrackSelector extends TrackSelector { public abstract class MappingTrackSelector extends TrackSelector {
/** /**
* Interface definition for a callback to be notified of {@link MappingTrackSelector} events. * Listener of {@link MappingTrackSelector} events.
*/ */
public interface EventListener { public interface EventListener {
/** /**
* Invoked when the track information has changed. * Called when the track information has changed.
* *
* @param trackInfo Contains the new track and track selection information. * @param trackInfo Contains the new track and track selection information.
*/ */

View File

@ -47,9 +47,8 @@ public final class DebugTextViewHelper implements Runnable, ExoPlayer.EventListe
} }
/** /**
* Starts periodic updates of the {@link TextView}. * Starts periodic updates of the {@link TextView}. Must be called from the application's main
* <p> * thread.
* Should be called from the application's main thread.
*/ */
public void start() { public void start() {
if (started) { if (started) {
@ -61,9 +60,8 @@ public final class DebugTextViewHelper implements Runnable, ExoPlayer.EventListe
} }
/** /**
* Stops periodic updates of the {@link TextView}. * Stops periodic updates of the {@link TextView}. Must be called from the application's main
* <p> * thread.
* Should be called from the application's main thread.
*/ */
public void stop() { public void stop() {
if (!started) { if (!started) {

View File

@ -36,11 +36,11 @@ import android.util.DisplayMetrics;
import android.util.Log; import android.util.Log;
/** /**
* Draws subtitle {@link Cue}s. * Paints subtitle {@link Cue}s.
*/ */
/* package */ final class SubtitleCuePainter { /* package */ final class SubtitlePainter {
private static final String TAG = "SubtitleCuePainter"; private static final String TAG = "SubtitlePainter";
/** /**
* Ratio of inner padding to font size. * Ratio of inner padding to font size.
@ -91,7 +91,7 @@ import android.util.Log;
private int textTop; private int textTop;
private int textPaddingX; private int textPaddingX;
public SubtitleCuePainter(Context context) { public SubtitlePainter(Context context) {
int[] viewAttr = {android.R.attr.lineSpacingExtra, android.R.attr.lineSpacingMultiplier}; int[] viewAttr = {android.R.attr.lineSpacingExtra, android.R.attr.lineSpacingMultiplier};
TypedArray styledAttributes = context.obtainStyledAttributes(null, viewAttr, 0, 0); TypedArray styledAttributes = context.obtainStyledAttributes(null, viewAttr, 0, 0);
spacingAdd = styledAttributes.getDimensionPixelSize(0, 0); spacingAdd = styledAttributes.getDimensionPixelSize(0, 0);

View File

@ -29,7 +29,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
/** /**
* A view for rendering rich-formatted captions. * A view for displaying subtitle {@link Cue}s.
*/ */
public final class SubtitleView extends View { public final class SubtitleView extends View {
@ -52,7 +52,7 @@ public final class SubtitleView extends View {
private static final int FRACTIONAL_IGNORE_PADDING = 1; private static final int FRACTIONAL_IGNORE_PADDING = 1;
private static final int ABSOLUTE = 2; private static final int ABSOLUTE = 2;
private final List<SubtitleCuePainter> painters; private final List<SubtitlePainter> painters;
private List<Cue> cues; private List<Cue> cues;
private int textSizeType; private int textSizeType;
@ -88,7 +88,7 @@ public final class SubtitleView extends View {
// Ensure we have sufficient painters. // Ensure we have sufficient painters.
int cueCount = (cues == null) ? 0 : cues.size(); int cueCount = (cues == null) ? 0 : cues.size();
while (painters.size() < cueCount) { while (painters.size() < cueCount) {
painters.add(new SubtitleCuePainter(getContext())); painters.add(new SubtitlePainter(getContext()));
} }
// Invalidate to trigger drawing. // Invalidate to trigger drawing.
invalidate(); invalidate();

View File

@ -25,12 +25,12 @@ import java.util.Set;
public interface Cache { public interface Cache {
/** /**
* Interface definition for a callback to be notified of {@link Cache} events. * Listener of {@link Cache} events.
*/ */
interface Listener { interface Listener {
/** /**
* Invoked when a {@link CacheSpan} is added to the cache. * Called when a {@link CacheSpan} is added to the cache.
* *
* @param cache The source of the event. * @param cache The source of the event.
* @param span The added {@link CacheSpan}. * @param span The added {@link CacheSpan}.
@ -38,7 +38,7 @@ public interface Cache {
void onSpanAdded(Cache cache, CacheSpan span); void onSpanAdded(Cache cache, CacheSpan span);
/** /**
* Invoked when a {@link CacheSpan} is removed from the cache. * Called when a {@link CacheSpan} is removed from the cache.
* *
* @param cache The source of the event. * @param cache The source of the event.
* @param span The removed {@link CacheSpan}. * @param span The removed {@link CacheSpan}.
@ -46,7 +46,7 @@ public interface Cache {
void onSpanRemoved(Cache cache, CacheSpan span); void onSpanRemoved(Cache cache, CacheSpan span);
/** /**
* Invoked when an existing {@link CacheSpan} is accessed, causing it to be replaced. The new * Called when an existing {@link CacheSpan} is accessed, causing it to be replaced. The new
* {@link CacheSpan} is guaranteed to represent the same data as the one it replaces, however * {@link CacheSpan} is guaranteed to represent the same data as the one it replaces, however
* {@link CacheSpan#file} and {@link CacheSpan#lastAccessTimestamp} may have changed. * {@link CacheSpan#file} and {@link CacheSpan#lastAccessTimestamp} may have changed.
* <p> * <p>

View File

@ -37,12 +37,12 @@ import java.io.InterruptedIOException;
public final class CacheDataSource implements DataSource { public final class CacheDataSource implements DataSource {
/** /**
* Interface definition for a callback to be notified of {@link CacheDataSource} events. * Listener of {@link CacheDataSource} events.
*/ */
public interface EventListener { public interface EventListener {
/** /**
* Invoked when bytes have been read from the cache. * Called when bytes have been read from the cache.
* *
* @param cacheSizeBytes Current cache size in bytes. * @param cacheSizeBytes Current cache size in bytes.
* @param cachedBytesRead Total bytes read from the cache since this method was last invoked. * @param cachedBytesRead Total bytes read from the cache since this method was last invoked.

View File

@ -59,7 +59,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
private final long allowedJoiningTimeMs; private final long allowedJoiningTimeMs;
private final int videoScalingMode; private final int videoScalingMode;
private final int maxDroppedFrameCountToNotify; private final int maxDroppedFramesToNotify;
private final boolean deviceNeedsAutoFrcWorkaround; private final boolean deviceNeedsAutoFrcWorkaround;
private int adaptiveMaxWidth; private int adaptiveMaxWidth;
@ -70,7 +70,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private boolean renderedFirstFrame; private boolean renderedFirstFrame;
private long joiningDeadlineMs; private long joiningDeadlineMs;
private long droppedFrameAccumulationStartTimeMs; private long droppedFrameAccumulationStartTimeMs;
private int droppedFrameCount; private int droppedFrames;
private int consecutiveDroppedFrameCount; private int consecutiveDroppedFrameCount;
private int pendingRotationDegrees; private int pendingRotationDegrees;
@ -145,17 +145,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
* invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
*/ */
public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector, public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector,
int videoScalingMode, long allowedJoiningTimeMs, DrmSessionManager drmSessionManager, int videoScalingMode, long allowedJoiningTimeMs, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, boolean playClearSamplesWithoutKeys, Handler eventHandler,
VideoRendererEventListener eventListener, int maxDroppedFrameCountToNotify) { VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) {
super(C.TRACK_TYPE_VIDEO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys); super(C.TRACK_TYPE_VIDEO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
this.videoScalingMode = videoScalingMode; this.videoScalingMode = videoScalingMode;
this.allowedJoiningTimeMs = allowedJoiningTimeMs; this.allowedJoiningTimeMs = allowedJoiningTimeMs;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context); frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context);
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
deviceNeedsAutoFrcWorkaround = deviceNeedsAutoFrcWorkaround(); deviceNeedsAutoFrcWorkaround = deviceNeedsAutoFrcWorkaround();
@ -268,14 +268,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
protected void onStarted() { protected void onStarted() {
super.onStarted(); super.onStarted();
droppedFrameCount = 0; droppedFrames = 0;
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
} }
@Override @Override
protected void onStopped() { protected void onStopped() {
joiningDeadlineMs = -1; joiningDeadlineMs = -1;
maybeNotifyDroppedFrameCount(); maybeNotifyDroppedFrames();
super.onStopped(); super.onStopped();
} }
@ -306,10 +306,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
} }
/**
* @param surface The surface to set.
* @throws ExoPlaybackException
*/
private void setSurface(Surface surface) throws ExoPlaybackException { private void setSurface(Surface surface) throws ExoPlaybackException {
if (this.surface == surface) { if (this.surface == surface) {
return; return;
@ -350,13 +346,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
: newFormat.rotationDegrees; : newFormat.rotationDegrees;
} }
/**
* @return True if the first frame has been rendered (playback has not necessarily begun).
*/
protected final boolean haveRenderedFirstFrame() {
return renderedFirstFrame;
}
@Override @Override
protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) { protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) {
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT) boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
@ -464,28 +453,28 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return false; return false;
} }
protected void skipOutputBuffer(MediaCodec codec, int bufferIndex) { private void skipOutputBuffer(MediaCodec codec, int bufferIndex) {
TraceUtil.beginSection("skipVideoBuffer"); TraceUtil.beginSection("skipVideoBuffer");
codec.releaseOutputBuffer(bufferIndex, false); codec.releaseOutputBuffer(bufferIndex, false);
TraceUtil.endSection(); TraceUtil.endSection();
decoderCounters.skippedOutputBufferCount++; decoderCounters.skippedOutputBufferCount++;
} }
protected void dropOutputBuffer(MediaCodec codec, int bufferIndex) { private void dropOutputBuffer(MediaCodec codec, int bufferIndex) {
TraceUtil.beginSection("dropVideoBuffer"); TraceUtil.beginSection("dropVideoBuffer");
codec.releaseOutputBuffer(bufferIndex, false); codec.releaseOutputBuffer(bufferIndex, false);
TraceUtil.endSection(); TraceUtil.endSection();
decoderCounters.droppedOutputBufferCount++; decoderCounters.droppedOutputBufferCount++;
droppedFrameCount++; droppedFrames++;
consecutiveDroppedFrameCount++; consecutiveDroppedFrameCount++;
decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max(consecutiveDroppedFrameCount,
decoderCounters.maxConsecutiveDroppedOutputBufferCount); decoderCounters.maxConsecutiveDroppedOutputBufferCount);
if (droppedFrameCount == maxDroppedFrameCountToNotify) { if (droppedFrames == maxDroppedFramesToNotify) {
maybeNotifyDroppedFrameCount(); maybeNotifyDroppedFrames();
} }
} }
protected void renderOutputBuffer(MediaCodec codec, int bufferIndex) { private void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
maybeNotifyVideoSizeChanged(); maybeNotifyVideoSizeChanged();
TraceUtil.beginSection("releaseOutputBuffer"); TraceUtil.beginSection("releaseOutputBuffer");
codec.releaseOutputBuffer(bufferIndex, true); codec.releaseOutputBuffer(bufferIndex, true);
@ -497,7 +486,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
@TargetApi(21) @TargetApi(21)
protected void renderOutputBufferV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) { private void renderOutputBufferV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
maybeNotifyVideoSizeChanged(); maybeNotifyVideoSizeChanged();
TraceUtil.beginSection("releaseOutputBuffer"); TraceUtil.beginSection("releaseOutputBuffer");
codec.releaseOutputBuffer(bufferIndex, releaseTimeNs); codec.releaseOutputBuffer(bufferIndex, releaseTimeNs);
@ -588,12 +577,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
} }
private void maybeNotifyDroppedFrameCount() { private void maybeNotifyDroppedFrames() {
if (droppedFrameCount > 0) { if (droppedFrames > 0) {
long now = SystemClock.elapsedRealtime(); long now = SystemClock.elapsedRealtime();
long elapsedMs = now - droppedFrameAccumulationStartTimeMs; long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
eventDispatcher.droppedFrameCount(droppedFrameCount, elapsedMs); eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
droppedFrameCount = 0; droppedFrames = 0;
droppedFrameAccumulationStartTimeMs = now; droppedFrameAccumulationStartTimeMs = now;
} }
} }

View File

@ -51,16 +51,16 @@ public final class VideoFrameReleaseTimeHelper {
private long frameCount; private long frameCount;
/** /**
* Constructs an instance that smoothes frame release but does not snap release to the default * Constructs an instance that smoothes frame release timestamps but does not align them with
* display's vsync signal. * the default display's vsync signal.
*/ */
public VideoFrameReleaseTimeHelper() { public VideoFrameReleaseTimeHelper() {
this(-1, false); this(-1, false);
} }
/** /**
* Constructs an instance that smoothes frame release and snaps release to the default display's * Constructs an instance that smoothes frame release timestamps and aligns them with the default
* vsync signal. * display's vsync signal.
* *
* @param context A context from which information about the default display can be retrieved. * @param context A context from which information about the default display can be retrieved.
*/ */
@ -102,12 +102,12 @@ public final class VideoFrameReleaseTimeHelper {
} }
/** /**
* Called to make a fine-grained adjustment to a frame release time. * Adjusts a frame release timestamp.
* *
* @param framePresentationTimeUs The frame's media presentation time, in microseconds. * @param framePresentationTimeUs The frame's presentation time, in microseconds.
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in * @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
* the same time base as {@link System#nanoTime()}. * the same time base as {@link System#nanoTime()}.
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as * @return The adjusted frame release timestamp, in nanoseconds and in the same time base as
* {@link System#nanoTime()}. * {@link System#nanoTime()}.
*/ */
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) { public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) {
@ -205,9 +205,9 @@ public final class VideoFrameReleaseTimeHelper {
} }
/** /**
* Manages the lifecycle of a single {@link Choreographer} to be shared among all * Samples display vsync timestamps. A single instance using a single {@link Choreographer} is
* {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a bug fixed in platform * shared by all {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a resource
* API version 23 that causes resource leakage. See [Internal: b/12455729]. * leak in the platform on API levels prior to 23. See [Internal: b/12455729].
*/ */
private static final class VSyncSampler implements FrameCallback, Handler.Callback { private static final class VSyncSampler implements FrameCallback, Handler.Callback {
@ -236,17 +236,16 @@ public final class VideoFrameReleaseTimeHelper {
} }
/** /**
* Tells the {@link VSyncSampler} that there is a new {@link VideoFrameReleaseTimeHelper} * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is observing
* instance observing the currentSampledVsyncTimeNs value. As a consequence, if necessary, it * {@link #sampledVsyncTimeNs}, and hence that the value should be periodically updated.
* will register itself as a {@code doFrame} callback listener.
*/ */
public void addObserver() { public void addObserver() {
handler.sendEmptyMessage(MSG_ADD_OBSERVER); handler.sendEmptyMessage(MSG_ADD_OBSERVER);
} }
/** /**
* Counterpart of {@code addNewObservingHelper}. This method should be called once the observer * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is no longer observing
* no longer needs to read {@link #sampledVsyncTimeNs} * {@link #sampledVsyncTimeNs}.
*/ */
public void removeObserver() { public void removeObserver() {
handler.sendEmptyMessage(MSG_REMOVE_OBSERVER); handler.sendEmptyMessage(MSG_REMOVE_OBSERVER);

View File

@ -26,12 +26,12 @@ import android.view.Surface;
import android.view.TextureView; import android.view.TextureView;
/** /**
* Interface definition for a callback to be notified of video {@link Renderer} events. * Listener of video {@link Renderer} events.
*/ */
public interface VideoRendererEventListener { public interface VideoRendererEventListener {
/** /**
* Invoked when the renderer is enabled. * Called when the renderer is enabled.
* *
* @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it * @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
* remains enabled. * remains enabled.
@ -39,7 +39,7 @@ public interface VideoRendererEventListener {
void onVideoEnabled(DecoderCounters counters); void onVideoEnabled(DecoderCounters counters);
/** /**
* Invoked when a decoder is created. * Called when a decoder is created.
* *
* @param decoderName The decoder that was created. * @param decoderName The decoder that was created.
* @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
@ -50,14 +50,14 @@ public interface VideoRendererEventListener {
long initializationDurationMs); long initializationDurationMs);
/** /**
* Invoked when the format of the media being consumed by the renderer changes. * Called when the format of the media being consumed by the renderer changes.
* *
* @param format The new format. * @param format The new format.
*/ */
void onVideoInputFormatChanged(Format format); void onVideoInputFormatChanged(Format format);
/** /**
* Invoked to report the number of frames dropped by the renderer. Dropped frames are reported * Called to report the number of frames dropped by the renderer. Dropped frames are reported
* whenever the renderer is stopped having dropped frames, and optionally, whenever the count * whenever the renderer is stopped having dropped frames, and optionally, whenever the count
* reaches a specified threshold whilst the renderer is started. * reaches a specified threshold whilst the renderer is started.
* *
@ -70,7 +70,7 @@ public interface VideoRendererEventListener {
void onDroppedFrames(int count, long elapsedMs); void onDroppedFrames(int count, long elapsedMs);
/** /**
* Invoked each time there's a change in the size of the video being rendered. * Called each time there's a change in the size of the video being rendered.
* *
* @param width The video width in pixels. * @param width The video width in pixels.
* @param height The video height in pixels. * @param height The video height in pixels.
@ -89,7 +89,7 @@ public interface VideoRendererEventListener {
float pixelWidthHeightRatio); float pixelWidthHeightRatio);
/** /**
* Invoked when a frame is rendered to a surface for the first time following that surface * Called when a frame is rendered to a surface for the first time following that surface
* having been set as the target for the renderer. * having been set as the target for the renderer.
* *
* @param surface The surface to which a first frame has been rendered. * @param surface The surface to which a first frame has been rendered.
@ -97,7 +97,7 @@ public interface VideoRendererEventListener {
void onDrawnToSurface(Surface surface); void onDrawnToSurface(Surface surface);
/** /**
* Invoked when the renderer is disabled. * Called when the renderer is disabled.
* *
* @param counters {@link DecoderCounters} that were updated by the renderer. * @param counters {@link DecoderCounters} that were updated by the renderer.
*/ */
@ -111,11 +111,19 @@ public interface VideoRendererEventListener {
private final Handler handler; private final Handler handler;
private final VideoRendererEventListener listener; private final VideoRendererEventListener listener;
/**
* @param handler A handler for dispatching events, or null if creating a dummy instance.
* @param listener The listener to which events should be dispatched, or null if creating a
* dummy instance.
*/
public EventDispatcher(Handler handler, VideoRendererEventListener listener) { public EventDispatcher(Handler handler, VideoRendererEventListener listener) {
this.handler = listener != null ? Assertions.checkNotNull(handler) : null; this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
this.listener = listener; this.listener = listener;
} }
/**
* Invokes {@link VideoRendererEventListener#onVideoEnabled(DecoderCounters)}.
*/
public void enabled(final DecoderCounters decoderCounters) { public void enabled(final DecoderCounters decoderCounters) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -127,6 +135,9 @@ public interface VideoRendererEventListener {
} }
} }
/**
* Invokes {@link VideoRendererEventListener#onVideoDecoderInitialized(String, long, long)}.
*/
public void decoderInitialized(final String decoderName, public void decoderInitialized(final String decoderName,
final long initializedTimestampMs, final long initializationDurationMs) { final long initializedTimestampMs, final long initializationDurationMs) {
if (listener != null) { if (listener != null) {
@ -140,6 +151,9 @@ public interface VideoRendererEventListener {
} }
} }
/**
* Invokes {@link VideoRendererEventListener#onVideoInputFormatChanged(Format)}.
*/
public void inputFormatChanged(final Format format) { public void inputFormatChanged(final Format format) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -151,7 +165,10 @@ public interface VideoRendererEventListener {
} }
} }
public void droppedFrameCount(final int droppedFrameCount, final long elapsedMs) { /**
* Invokes {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
*/
public void droppedFrames(final int droppedFrameCount, final long elapsedMs) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@Override @Override
@ -162,6 +179,9 @@ public interface VideoRendererEventListener {
} }
} }
/**
* Invokes {@link VideoRendererEventListener#onVideoSizeChanged(int, int, int, float)}.
*/
public void videoSizeChanged(final int width, final int height, public void videoSizeChanged(final int width, final int height,
final int unappliedRotationDegrees, final float pixelWidthHeightRatio) { final int unappliedRotationDegrees, final float pixelWidthHeightRatio) {
if (listener != null) { if (listener != null) {
@ -175,6 +195,9 @@ public interface VideoRendererEventListener {
} }
} }
/**
* Invokes {@link VideoRendererEventListener#onDrawnToSurface(Surface)}.
*/
public void drawnToSurface(final Surface surface) { public void drawnToSurface(final Surface surface) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {
@ -186,6 +209,9 @@ public interface VideoRendererEventListener {
} }
} }
/**
* Invokes {@link VideoRendererEventListener#onVideoDisabled(DecoderCounters)}.
*/
public void disabled(final DecoderCounters counters) { public void disabled(final DecoderCounters counters) {
if (listener != null) { if (listener != null) {
handler.post(new Runnable() { handler.post(new Runnable() {