Fix up a bunch more Javadoc.
------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=128565942
This commit is contained in:
parent
3cb5862621
commit
ac59bde2be
@ -56,7 +56,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
|
||||
private final boolean scaleToFit;
|
||||
private final long allowedJoiningTimeMs;
|
||||
private final int maxDroppedFrameCountToNotify;
|
||||
private final int maxDroppedFramesToNotify;
|
||||
private final EventDispatcher eventDispatcher;
|
||||
private final FormatHolder formatHolder;
|
||||
|
||||
@ -81,7 +81,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
private int previousHeight;
|
||||
|
||||
private long droppedFrameAccumulationStartTimeMs;
|
||||
private int droppedFrameCount;
|
||||
private int droppedFrames;
|
||||
private int consecutiveDroppedFrameCount;
|
||||
|
||||
/**
|
||||
@ -100,16 +100,16 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
|
||||
* null if delivery of events is not required.
|
||||
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
|
||||
* @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
|
||||
* invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
|
||||
*/
|
||||
public LibvpxVideoRenderer(boolean scaleToFit, long allowedJoiningTimeMs,
|
||||
Handler eventHandler, VideoRendererEventListener eventListener,
|
||||
int maxDroppedFrameCountToNotify) {
|
||||
int maxDroppedFramesToNotify) {
|
||||
super(C.TRACK_TYPE_VIDEO);
|
||||
this.scaleToFit = scaleToFit;
|
||||
this.allowedJoiningTimeMs = allowedJoiningTimeMs;
|
||||
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
|
||||
this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
|
||||
joiningDeadlineMs = -1;
|
||||
previousWidth = -1;
|
||||
previousHeight = -1;
|
||||
@ -169,7 +169,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
long codecInitializedTimestamp = SystemClock.elapsedRealtime();
|
||||
eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
|
||||
codecInitializedTimestamp - codecInitializingTimestamp);
|
||||
decoderCounters.codecInitCount++;
|
||||
decoderCounters.decoderInitCount++;
|
||||
}
|
||||
TraceUtil.beginSection("drainAndFeed");
|
||||
while (drainOutputBuffer(positionUs)) {}
|
||||
@ -220,13 +220,13 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
|| (nextOutputBuffer != null && !nextOutputBuffer.isEndOfStream()
|
||||
&& nextOutputBuffer.timeUs < positionUs)) {
|
||||
decoderCounters.droppedOutputBufferCount++;
|
||||
droppedFrameCount++;
|
||||
droppedFrames++;
|
||||
consecutiveDroppedFrameCount++;
|
||||
decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max(
|
||||
consecutiveDroppedFrameCount,
|
||||
decoderCounters.maxConsecutiveDroppedOutputBufferCount);
|
||||
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
|
||||
maybeNotifyDroppedFrameCount();
|
||||
if (droppedFrames == maxDroppedFramesToNotify) {
|
||||
maybeNotifyDroppedFrames();
|
||||
}
|
||||
outputBuffer.release();
|
||||
outputBuffer = null;
|
||||
@ -374,14 +374,14 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
|
||||
@Override
|
||||
protected void onStarted() {
|
||||
droppedFrameCount = 0;
|
||||
droppedFrames = 0;
|
||||
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStopped() {
|
||||
joiningDeadlineMs = -1;
|
||||
maybeNotifyDroppedFrameCount();
|
||||
maybeNotifyDroppedFrames();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -401,7 +401,7 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
if (decoder != null) {
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
decoderCounters.codecReleaseCount++;
|
||||
decoderCounters.decoderReleaseCount++;
|
||||
}
|
||||
}
|
||||
|
||||
@ -474,12 +474,12 @@ public final class LibvpxVideoRenderer extends BaseRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeNotifyDroppedFrameCount() {
|
||||
if (droppedFrameCount > 0) {
|
||||
private void maybeNotifyDroppedFrames() {
|
||||
if (droppedFrames > 0) {
|
||||
long now = SystemClock.elapsedRealtime();
|
||||
long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
|
||||
eventDispatcher.droppedFrameCount(droppedFrameCount, elapsedMs);
|
||||
droppedFrameCount = 0;
|
||||
eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
|
||||
droppedFrames = 0;
|
||||
droppedFrameAccumulationStartTimeMs = now;
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ import com.google.android.exoplayer2.source.Timeline;
|
||||
public interface ExoPlayer {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of changes in player state.
|
||||
* Listener of changes in player state.
|
||||
*/
|
||||
interface EventListener {
|
||||
|
||||
|
@ -214,10 +214,11 @@ public final class Ac3Util {
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #parseEAc3SyncframeAudioSampleCount(byte[])} but reads from a byte buffer. The
|
||||
* buffer position is not modified.
|
||||
* Like {@link #parseEAc3SyncframeAudioSampleCount(byte[])} but reads from a {@link ByteBuffer}.
|
||||
* The buffer's position is not modified.
|
||||
*
|
||||
* @see #parseEAc3SyncframeAudioSampleCount(byte[])
|
||||
* @param buffer The {@link ByteBuffer} from which to read.
|
||||
* @return The number of audio samples represented by the syncframe.
|
||||
*/
|
||||
public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
|
||||
// See ETSI TS 102 366 subsection E.1.2.2.
|
||||
|
@ -26,7 +26,7 @@ import android.media.AudioManager;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Represents the set of audio formats a device is capable of playing back.
|
||||
* Represents the set of audio formats that a device is capable of playing.
|
||||
*/
|
||||
@TargetApi(21)
|
||||
public final class AudioCapabilities {
|
||||
@ -38,11 +38,10 @@ public final class AudioCapabilities {
|
||||
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
|
||||
|
||||
/**
|
||||
* Gets the current audio capabilities. Note that to be notified when audio capabilities change,
|
||||
* you can create an instance of {@link AudioCapabilitiesReceiver} and register a listener.
|
||||
* Returns the current audio capabilities for the device.
|
||||
*
|
||||
* @param context Context for receiving the initial broadcast.
|
||||
* @return Current audio capabilities for the device.
|
||||
* @param context A context for obtaining the current audio capabilities.
|
||||
* @return The current audio capabilities for the device.
|
||||
*/
|
||||
@SuppressWarnings("InlinedApi")
|
||||
public static AudioCapabilities getCapabilities(Context context) {
|
||||
@ -90,7 +89,9 @@ public final class AudioCapabilities {
|
||||
return Arrays.binarySearch(supportedEncodings, encoding) >= 0;
|
||||
}
|
||||
|
||||
/** Returns the maximum number of channels the device can play at the same time. */
|
||||
/**
|
||||
* Returns the maximum number of channels the device can play at the same time.
|
||||
*/
|
||||
public int getMaxChannelCount() {
|
||||
return maxChannelCount;
|
||||
}
|
||||
|
@ -25,8 +25,8 @@ import android.content.IntentFilter;
|
||||
import android.media.AudioManager;
|
||||
|
||||
/**
|
||||
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
|
||||
* (or resume) receiving notifications, and {@link #unregister} to stop.
|
||||
* Receives broadcast events indicating changes to the device's audio capabilities, notifying a
|
||||
* {@link Listener} when audio capability changes occur.
|
||||
*/
|
||||
public final class AudioCapabilitiesReceiver {
|
||||
|
||||
@ -38,7 +38,7 @@ public final class AudioCapabilitiesReceiver {
|
||||
/**
|
||||
* Called when the audio capabilities change.
|
||||
*
|
||||
* @param audioCapabilities Current audio capabilities for the device.
|
||||
* @param audioCapabilities The current audio capabilities for the device.
|
||||
*/
|
||||
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
|
||||
|
||||
@ -51,10 +51,8 @@ public final class AudioCapabilitiesReceiver {
|
||||
/* package */ AudioCapabilities audioCapabilities;
|
||||
|
||||
/**
|
||||
* Constructs a new audio capabilities receiver.
|
||||
*
|
||||
* @param context Context for registering to receive broadcasts.
|
||||
* @param listener Listener to notify when audio capabilities change.
|
||||
* @param context A context for registering the receiver.
|
||||
* @param listener The listener to notify when audio capabilities change.
|
||||
*/
|
||||
public AudioCapabilitiesReceiver(Context context, Listener listener) {
|
||||
this.context = Assertions.checkNotNull(context);
|
||||
@ -63,11 +61,11 @@ public final class AudioCapabilitiesReceiver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers to notify the listener when audio capabilities change. The current capabilities will
|
||||
* be returned. It is important to call {@link #unregister} so that the listener can be garbage
|
||||
* collected.
|
||||
* Registers the receiver, meaning it will notify the listener when audio capability changes
|
||||
* occur. The current audio capabilities will be returned. It is important to call
|
||||
* {@link #unregister} when the receiver is no longer required.
|
||||
*
|
||||
* @return Current audio capabilities for the device.
|
||||
* @return The current audio capabilities for the device.
|
||||
*/
|
||||
@SuppressWarnings("InlinedApi")
|
||||
public AudioCapabilities register() {
|
||||
@ -78,7 +76,8 @@ public final class AudioCapabilitiesReceiver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters to stop notifying the listener when audio capabilities change.
|
||||
* Unregisters the receiver, meaning it will no longer notify the listener when audio capability
|
||||
* changes occur.
|
||||
*/
|
||||
public void unregister() {
|
||||
if (receiver != null) {
|
||||
|
@ -20,6 +20,9 @@ package com.google.android.exoplayer2.audio;
|
||||
*/
|
||||
public abstract class AudioDecoderException extends Exception {
|
||||
|
||||
/**
|
||||
* @param detailMessage The detail message for this exception.
|
||||
*/
|
||||
public AudioDecoderException(String detailMessage) {
|
||||
super(detailMessage);
|
||||
}
|
||||
|
@ -24,12 +24,12 @@ import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of audio {@link Renderer} events.
|
||||
* Listener of audio {@link Renderer} events.
|
||||
*/
|
||||
public interface AudioRendererEventListener {
|
||||
|
||||
/**
|
||||
* Invoked when the renderer is enabled.
|
||||
* Called when the renderer is enabled.
|
||||
*
|
||||
* @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
|
||||
* remains enabled.
|
||||
@ -37,14 +37,14 @@ public interface AudioRendererEventListener {
|
||||
void onAudioEnabled(DecoderCounters counters);
|
||||
|
||||
/**
|
||||
* Invoked when the audio session is set.
|
||||
* Called when the audio session is set.
|
||||
*
|
||||
* @param audioSessionId The audio session id.
|
||||
*/
|
||||
void onAudioSessionId(int audioSessionId);
|
||||
|
||||
/**
|
||||
* Invoked when a decoder is created.
|
||||
* Called when a decoder is created.
|
||||
*
|
||||
* @param decoderName The decoder that was created.
|
||||
* @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
|
||||
@ -55,14 +55,14 @@ public interface AudioRendererEventListener {
|
||||
long initializationDurationMs);
|
||||
|
||||
/**
|
||||
* Invoked when the format of the media being consumed by the renderer changes.
|
||||
* Called when the format of the media being consumed by the renderer changes.
|
||||
*
|
||||
* @param format The new format.
|
||||
*/
|
||||
void onAudioInputFormatChanged(Format format);
|
||||
|
||||
/**
|
||||
* Invoked when an {@link AudioTrack} underrun occurs.
|
||||
* Called when an {@link AudioTrack} underrun occurs.
|
||||
*
|
||||
* @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
|
||||
* @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
|
||||
@ -73,7 +73,7 @@ public interface AudioRendererEventListener {
|
||||
void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
|
||||
|
||||
/**
|
||||
* Invoked when the renderer is disabled.
|
||||
* Called when the renderer is disabled.
|
||||
*
|
||||
* @param counters {@link DecoderCounters} that were updated by the renderer.
|
||||
*/
|
||||
@ -87,11 +87,19 @@ public interface AudioRendererEventListener {
|
||||
private final Handler handler;
|
||||
private final AudioRendererEventListener listener;
|
||||
|
||||
/**
|
||||
* @param handler A handler for dispatching events, or null if creating a dummy instance.
|
||||
* @param listener The listener to which events should be dispatched, or null if creating a
|
||||
* dummy instance.
|
||||
*/
|
||||
public EventDispatcher(Handler handler, AudioRendererEventListener listener) {
|
||||
this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}.
|
||||
*/
|
||||
public void enabled(final DecoderCounters decoderCounters) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -103,6 +111,9 @@ public interface AudioRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioDecoderInitialized(String, long, long)}.
|
||||
*/
|
||||
public void decoderInitialized(final String decoderName,
|
||||
final long initializedTimestampMs, final long initializationDurationMs) {
|
||||
if (listener != null) {
|
||||
@ -116,6 +127,9 @@ public interface AudioRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}.
|
||||
*/
|
||||
public void inputFormatChanged(final Format format) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -127,6 +141,9 @@ public interface AudioRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioTrackUnderrun(int, long, long)}.
|
||||
*/
|
||||
public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs,
|
||||
final long elapsedSinceLastFeedMs) {
|
||||
if (listener != null) {
|
||||
@ -139,6 +156,9 @@ public interface AudioRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioDisabled(DecoderCounters)}.
|
||||
*/
|
||||
public void disabled(final DecoderCounters counters) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -151,6 +171,9 @@ public interface AudioRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}.
|
||||
*/
|
||||
public void audioSessionId(final int audioSessionId) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
|
@ -35,28 +35,28 @@ import java.nio.ByteBuffer;
|
||||
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
|
||||
* playback position smoothing, non-blocking writes and reconfiguration.
|
||||
* <p>
|
||||
* Before starting playback, specify the input audio format by calling one of the {@link #configure}
|
||||
* methods and {@link #initialize} the instance, optionally specifying an audio session.
|
||||
* Before starting playback, specify the input format by calling
|
||||
* {@link #configure(String, int, int, int, int)}. Next call {@link #initialize(int)}, optionally
|
||||
* specifying an audio session.
|
||||
* <p>
|
||||
* Call {@link #handleBuffer(ByteBuffer, long)} to write data to play back, and
|
||||
* {@link #handleDiscontinuity()} when a buffer is skipped. Call {@link #play()} to start playing
|
||||
* back written data.
|
||||
* Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
|
||||
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
|
||||
* <p>
|
||||
* Call {@link #configure} again whenever the input format changes. If {@link #isInitialized()}
|
||||
* returns false after calling {@link #configure}, it is necessary to re-{@link #initialize} the
|
||||
* instance before writing more data.
|
||||
* Call {@link #configure(String, int, int, int, int)} whenever the input format changes. If
|
||||
* {@link #isInitialized()} returns {@code false} after the call, it is necessary to call
|
||||
* {@link #initialize(int)} before writing more data.
|
||||
* <p>
|
||||
* The underlying framework audio track is created by {@link #initialize} and released
|
||||
* asynchronously by {@link #reset} (and {@link #configure}, unless the format is unchanged).
|
||||
* Reinitialization blocks until releasing the old audio track completes. It is safe to
|
||||
* re-{@link #initialize} the instance after calling {@link #reset()}, without reconfiguration.
|
||||
* The underlying {@link android.media.AudioTrack} is created by {@link #initialize(int)} and
|
||||
* released by {@link #reset()} (and {@link #configure(String, int, int, int, int)} unless the input
|
||||
* format is unchanged). It is safe to call {@link #initialize(int)} after calling {@link #reset()}
|
||||
* without reconfiguration.
|
||||
* <p>
|
||||
* Call {@link #release()} when the instance will no longer be used.
|
||||
* Call {@link #release()} when the instance is no longer required.
|
||||
*/
|
||||
public final class AudioTrack {
|
||||
|
||||
/**
|
||||
* Thrown when a failure occurs instantiating an {@link android.media.AudioTrack}.
|
||||
* Thrown when a failure occurs initializing an {@link android.media.AudioTrack}.
|
||||
*/
|
||||
public static final class InitializationException extends Exception {
|
||||
|
||||
@ -65,8 +65,14 @@ public final class AudioTrack {
|
||||
*/
|
||||
public final int audioTrackState;
|
||||
|
||||
public InitializationException(
|
||||
int audioTrackState, int sampleRate, int channelConfig, int bufferSize) {
|
||||
/**
|
||||
* @param audioTrackState The state as reported by {@link android.media.AudioTrack#getState()}.
|
||||
* @param sampleRate The requested sample rate in Hz.
|
||||
* @param channelConfig The requested channel configuration.
|
||||
* @param bufferSize The requested buffer size in bytes.
|
||||
*/
|
||||
public InitializationException(int audioTrackState, int sampleRate, int channelConfig,
|
||||
int bufferSize) {
|
||||
super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
|
||||
+ channelConfig + ", " + bufferSize + ")");
|
||||
this.audioTrackState = audioTrackState;
|
||||
@ -80,10 +86,14 @@ public final class AudioTrack {
|
||||
public static final class WriteException extends Exception {
|
||||
|
||||
/**
|
||||
* The value returned from {@link android.media.AudioTrack#write(byte[], int, int)}.
|
||||
* An error value returned from {@link android.media.AudioTrack#write(byte[], int, int)}.
|
||||
*/
|
||||
public final int errorCode;
|
||||
|
||||
/**
|
||||
* @param errorCode An error value returned from
|
||||
* {@link android.media.AudioTrack#write(byte[], int, int)}.
|
||||
*/
|
||||
public WriteException(int errorCode) {
|
||||
super("AudioTrack write failed: " + errorCode);
|
||||
this.errorCode = errorCode;
|
||||
@ -97,8 +107,11 @@ public final class AudioTrack {
|
||||
*/
|
||||
public static final class InvalidAudioTrackTimestampException extends RuntimeException {
|
||||
|
||||
public InvalidAudioTrackTimestampException(String message) {
|
||||
super(message);
|
||||
/**
|
||||
* @param detailMessage The detail message for this exception.
|
||||
*/
|
||||
public InvalidAudioTrackTimestampException(String detailMessage) {
|
||||
super(detailMessage);
|
||||
}
|
||||
|
||||
}
|
||||
@ -189,7 +202,7 @@ public final class AudioTrack {
|
||||
private final AudioTrackUtil audioTrackUtil;
|
||||
|
||||
/**
|
||||
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}).
|
||||
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize(int)}).
|
||||
*/
|
||||
private android.media.AudioTrack keepSessionIdAudioTrack;
|
||||
|
||||
@ -228,9 +241,7 @@ public final class AudioTrack {
|
||||
private boolean useResampledBuffer;
|
||||
|
||||
/**
|
||||
* Creates an audio track using the specified audio capabilities and stream type.
|
||||
*
|
||||
* @param audioCapabilities The current audio playback capabilities.
|
||||
* @param audioCapabilities The current audio capabilities.
|
||||
* @param streamType The type of audio stream for the underlying {@link android.media.AudioTrack}.
|
||||
*/
|
||||
public AudioTrack(AudioCapabilities audioCapabilities, int streamType) {
|
||||
@ -324,20 +335,6 @@ public final class AudioTrack {
|
||||
return currentPositionUs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures (or reconfigures) the audio track, inferring a suitable buffer size automatically.
|
||||
*
|
||||
* @param mimeType The mime type.
|
||||
* @param channelCount The number of channels.
|
||||
* @param sampleRate The sample rate in Hz.
|
||||
* @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
|
||||
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
|
||||
* {@link C#ENCODING_PCM_32BIT}.
|
||||
*/
|
||||
public void configure(String mimeType, int channelCount, int sampleRate, int pcmEncoding) {
|
||||
configure(mimeType, channelCount, sampleRate, pcmEncoding, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures (or reconfigures) the audio track.
|
||||
*
|
||||
@ -435,15 +432,6 @@ public final class AudioTrack {
|
||||
bufferSizeUs = passthrough ? C.UNSET_TIME_US : framesToDurationUs(pcmBytesToFrames(bufferSize));
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
|
||||
*
|
||||
* @return The audio track session identifier.
|
||||
*/
|
||||
public int initialize() throws InitializationException {
|
||||
return initialize(SESSION_ID_NOT_SET);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
|
||||
*
|
||||
@ -491,7 +479,6 @@ public final class AudioTrack {
|
||||
|
||||
audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
|
||||
setAudioTrackVolume();
|
||||
|
||||
return sessionId;
|
||||
}
|
||||
|
||||
@ -705,6 +692,8 @@ public final class AudioTrack {
|
||||
|
||||
/**
|
||||
* Sets the playback volume.
|
||||
*
|
||||
* @param volume A volume in the range [0.0, 1.0].
|
||||
*/
|
||||
public void setVolume(float volume) {
|
||||
if (this.volume != volume) {
|
||||
@ -734,9 +723,11 @@ public final class AudioTrack {
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases the underlying audio track asynchronously. Calling {@link #initialize} will block
|
||||
* until the audio track has been released, so it is safe to initialize immediately after
|
||||
* resetting. The audio session may remain active until the instance is {@link #release}d.
|
||||
* Releases the underlying audio track asynchronously.
|
||||
* <p>
|
||||
* Calling {@link #initialize(int)} will block until the audio track has been released, so it is
|
||||
* safe to initialize immediately after a reset. The audio session may remain active until
|
||||
* {@link #release()} is called.
|
||||
*/
|
||||
public void reset() {
|
||||
if (isInitialized()) {
|
||||
|
@ -46,13 +46,9 @@ public final class DtsUtil {
|
||||
384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816,
|
||||
2823, 2944, 3072, 3840, 4096, 6144, 7680};
|
||||
|
||||
private static final ParsableBitArray SCRATCH_BITS = new ParsableBitArray();
|
||||
|
||||
/**
|
||||
* Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114
|
||||
* subsections 5.3/5.4.
|
||||
* <p>
|
||||
* This method may only be called from one thread at a time.
|
||||
*
|
||||
* @param frame The DTS frame to parse.
|
||||
* @param trackId The track identifier to set on the format, or null.
|
||||
@ -62,8 +58,7 @@ public final class DtsUtil {
|
||||
*/
|
||||
public static Format parseDtsFormat(byte[] frame, String trackId, String language,
|
||||
DrmInitData drmInitData) {
|
||||
ParsableBitArray frameBits = SCRATCH_BITS;
|
||||
frameBits.reset(frame);
|
||||
ParsableBitArray frameBits = new ParsableBitArray(frame);
|
||||
frameBits.skipBits(4 * 8 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
|
||||
int amode = frameBits.readBits(6);
|
||||
int channelCount = CHANNELS_BY_AMODE[amode];
|
||||
@ -91,14 +86,17 @@ public final class DtsUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #parseDtsAudioSampleCount(byte[])} but reads from a byte buffer. The buffer
|
||||
* position is not modified.
|
||||
* Like {@link #parseDtsAudioSampleCount(byte[])} but reads from a {@link ByteBuffer}. The
|
||||
* buffer's position is not modified.
|
||||
*
|
||||
* @param buffer The {@link ByteBuffer} from which to read.
|
||||
* @return The number of audio samples represented by the syncframe.
|
||||
*/
|
||||
public static int parseDtsAudioSampleCount(ByteBuffer data) {
|
||||
public static int parseDtsAudioSampleCount(ByteBuffer buffer) {
|
||||
// See ETSI TS 102 114 subsection 5.4.1.
|
||||
int position = data.position();
|
||||
int nblks = ((data.get(position + 4) & 0x01) << 6)
|
||||
| ((data.get(position + 5) & 0xFC) >> 2);
|
||||
int position = buffer.position();
|
||||
int nblks = ((buffer.get(position + 4) & 0x01) << 6)
|
||||
| ((buffer.get(position + 5) & 0xFC) >> 2);
|
||||
return (nblks + 1) * 32;
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ import android.os.SystemClock;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Decodes and renders audio using {@link MediaCodec} and {@link android.media.AudioTrack}.
|
||||
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
|
||||
*/
|
||||
@TargetApi(16)
|
||||
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
|
||||
@ -226,18 +226,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
@Override
|
||||
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) {
|
||||
boolean passthrough = passthroughMediaFormat != null;
|
||||
String mimeType = passthrough
|
||||
? passthroughMediaFormat.getString(android.media.MediaFormat.KEY_MIME)
|
||||
String mimeType = passthrough ? passthroughMediaFormat.getString(MediaFormat.KEY_MIME)
|
||||
: MimeTypes.AUDIO_RAW;
|
||||
android.media.MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
|
||||
int channelCount = format.getInteger(android.media.MediaFormat.KEY_CHANNEL_COUNT);
|
||||
int sampleRate = format.getInteger(android.media.MediaFormat.KEY_SAMPLE_RATE);
|
||||
audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding);
|
||||
MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
|
||||
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
||||
audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked when the audio session id becomes known. Once the id is known it will not change
|
||||
* (and hence this method will not be invoked again) unless the renderer is disabled and then
|
||||
* Called when the audio session id becomes known. Once the id is known it will not change (and
|
||||
* hence this method will not be invoked again) unless the renderer is disabled and then
|
||||
* subsequently re-enabled.
|
||||
* <p>
|
||||
* The default implementation is a no-op. One reason for overriding this method would be to
|
||||
@ -333,12 +332,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
if (!audioTrack.isInitialized()) {
|
||||
// Initialize the AudioTrack now.
|
||||
try {
|
||||
if (audioSessionId != AudioTrack.SESSION_ID_NOT_SET) {
|
||||
audioTrack.initialize(audioSessionId);
|
||||
} else {
|
||||
audioSessionId = audioTrack.initialize();
|
||||
if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
|
||||
audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
|
||||
eventDispatcher.audioSessionId(audioSessionId);
|
||||
onAudioSessionId(audioSessionId);
|
||||
} else {
|
||||
audioTrack.initialize(audioSessionId);
|
||||
}
|
||||
audioTrackHasData = false;
|
||||
} catch (AudioTrack.InitializationException e) {
|
||||
|
@ -119,7 +119,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
|
||||
long codecInitializedTimestamp = SystemClock.elapsedRealtime();
|
||||
eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
|
||||
codecInitializedTimestamp - codecInitializingTimestamp);
|
||||
decoderCounters.codecInitCount++;
|
||||
decoderCounters.decoderInitCount++;
|
||||
} catch (AudioDecoderException e) {
|
||||
throw ExoPlaybackException.createForRenderer(e, getIndex());
|
||||
}
|
||||
@ -138,6 +138,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
|
||||
decoderCounters.ensureUpdated();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a decoder for the given format.
|
||||
*
|
||||
* @param format The format for which a decoder is required.
|
||||
* @return The decoder.
|
||||
* @throws AudioDecoderException If an error occurred creating a suitable decoder.
|
||||
*/
|
||||
protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer,
|
||||
? extends AudioDecoderException> createDecoder(Format format) throws AudioDecoderException;
|
||||
|
||||
@ -179,13 +186,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
|
||||
if (!audioTrack.isInitialized()) {
|
||||
Format outputFormat = getOutputFormat();
|
||||
audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
|
||||
outputFormat.sampleRate, outputFormat.pcmEncoding);
|
||||
if (audioSessionId != AudioTrack.SESSION_ID_NOT_SET) {
|
||||
audioTrack.initialize(audioSessionId);
|
||||
} else {
|
||||
audioSessionId = audioTrack.initialize();
|
||||
outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
|
||||
if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
|
||||
audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
|
||||
eventDispatcher.audioSessionId(audioSessionId);
|
||||
onAudioSessionId(audioSessionId);
|
||||
} else {
|
||||
audioTrack.initialize(audioSessionId);
|
||||
}
|
||||
audioTrackHasData = false;
|
||||
if (getState() == STATE_STARTED) {
|
||||
@ -338,7 +345,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
|
||||
if (decoder != null) {
|
||||
decoder.release();
|
||||
decoder = null;
|
||||
decoderCounters.codecReleaseCount++;
|
||||
decoderCounters.decoderReleaseCount++;
|
||||
}
|
||||
audioTrack.release();
|
||||
} finally {
|
||||
|
@ -16,22 +16,22 @@
|
||||
package com.google.android.exoplayer2.decoder;
|
||||
|
||||
/**
|
||||
* Maintains codec event counts, for debugging purposes only.
|
||||
* Maintains decoder event counts, for debugging purposes only.
|
||||
* <p>
|
||||
* Counters should be written from the playback thread only. Counters may be read from any thread.
|
||||
* To ensure that the counter values are correctly reflected between threads, users of this class
|
||||
* should invoke {@link #ensureUpdated()} prior to reading and after writing.
|
||||
* To ensure that the counter values are made visible across threads, users of this class should
|
||||
* invoke {@link #ensureUpdated()} prior to reading and after writing.
|
||||
*/
|
||||
public final class DecoderCounters {
|
||||
|
||||
/**
|
||||
* The number of times the codec has been initialized.
|
||||
* The number of times a decoder has been initialized.
|
||||
*/
|
||||
public int codecInitCount;
|
||||
public int decoderInitCount;
|
||||
/**
|
||||
* The number of times the codec has been released.
|
||||
* The number of times a decoder has been released.
|
||||
*/
|
||||
public int codecReleaseCount;
|
||||
public int decoderReleaseCount;
|
||||
/**
|
||||
* The number of queued input buffers.
|
||||
*/
|
||||
@ -76,8 +76,8 @@ public final class DecoderCounters {
|
||||
* @param other The {@link DecoderCounters} to merge into this instance.
|
||||
*/
|
||||
public void merge(DecoderCounters other) {
|
||||
codecInitCount += other.codecInitCount;
|
||||
codecReleaseCount += other.codecReleaseCount;
|
||||
decoderInitCount += other.decoderInitCount;
|
||||
decoderReleaseCount += other.decoderReleaseCount;
|
||||
inputBufferCount += other.inputBufferCount;
|
||||
renderedOutputBufferCount += other.renderedOutputBufferCount;
|
||||
skippedOutputBufferCount += other.skippedOutputBufferCount;
|
||||
|
@ -47,18 +47,17 @@ import java.util.UUID;
|
||||
public class StreamingDrmSessionManager implements DrmSessionManager, DrmSession {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link StreamingDrmSessionManager}
|
||||
* events.
|
||||
* Listener of {@link StreamingDrmSessionManager} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked each time keys are loaded.
|
||||
* Called each time keys are loaded.
|
||||
*/
|
||||
void onDrmKeysLoaded();
|
||||
|
||||
/**
|
||||
* Invoked when a drm error occurs.
|
||||
* Called when a drm error occurs.
|
||||
*
|
||||
* @param e The corresponding exception.
|
||||
*/
|
||||
|
@ -363,7 +363,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
? (SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS) : -1;
|
||||
inputIndex = -1;
|
||||
outputIndex = -1;
|
||||
decoderCounters.codecInitCount++;
|
||||
decoderCounters.decoderInitCount++;
|
||||
}
|
||||
|
||||
private void throwDecoderInitError(DecoderInitializationException e)
|
||||
@ -436,7 +436,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
codecReceivedEos = false;
|
||||
codecReconfigurationState = RECONFIGURATION_STATE_NONE;
|
||||
codecReinitializationState = REINITIALIZATION_STATE_NONE;
|
||||
decoderCounters.codecReleaseCount++;
|
||||
decoderCounters.decoderReleaseCount++;
|
||||
try {
|
||||
codec.stop();
|
||||
} finally {
|
||||
|
@ -16,7 +16,7 @@
|
||||
package com.google.android.exoplayer2.metadata;
|
||||
|
||||
/**
|
||||
* Parses objects of type <T> from binary data.
|
||||
* Decodes objects of type <T> from binary data.
|
||||
*
|
||||
* @param <T> The type of the metadata.
|
||||
*/
|
||||
|
@ -21,15 +21,15 @@ package com.google.android.exoplayer2.metadata;
|
||||
public class MetadataDecoderException extends Exception {
|
||||
|
||||
/**
|
||||
* @param message The detail message.
|
||||
* @param message The detail message for this exception.
|
||||
*/
|
||||
public MetadataDecoderException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param message The detail message.
|
||||
* @param cause The cause.
|
||||
* @param message The detail message for this exception.
|
||||
* @param cause The cause of this exception.
|
||||
*/
|
||||
public MetadataDecoderException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
|
@ -66,12 +66,12 @@ public final class ExtractorMediaSource implements MediaPeriod, MediaSource,
|
||||
UpstreamFormatChangedListener {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link ExtractorMediaSource} events.
|
||||
* Listener of {@link ExtractorMediaSource} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs loading media data.
|
||||
* Called when an error occurs loading media data.
|
||||
*
|
||||
* @param error The load error.
|
||||
*/
|
||||
|
@ -41,12 +41,12 @@ public final class SingleSampleMediaSource implements MediaPeriod, MediaSource,
|
||||
Loader.Callback<SingleSampleMediaSource.SourceLoadable> {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link SingleSampleMediaSource} events.
|
||||
* Listener of {@link SingleSampleMediaSource} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs loading media data.
|
||||
* Called when an error occurs loading media data.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SingleSampleMediaSource}.
|
||||
* @param e The cause of the failure.
|
||||
|
@ -18,15 +18,15 @@ package com.google.android.exoplayer2.text;
|
||||
import com.google.android.exoplayer2.decoder.Decoder;
|
||||
|
||||
/**
|
||||
* Parses {@link Subtitle}s from {@link SubtitleInputBuffer}s.
|
||||
* Decodes {@link Subtitle}s from {@link SubtitleInputBuffer}s.
|
||||
*/
|
||||
public interface SubtitleDecoder extends
|
||||
Decoder<SubtitleInputBuffer, SubtitleOutputBuffer, SubtitleDecoderException> {
|
||||
|
||||
/**
|
||||
* Informs the parser of the current playback position.
|
||||
* Informs the decoder of the current playback position.
|
||||
* <p>
|
||||
* Must be called prior to each attempt to dequeue output buffers from the parser.
|
||||
* Must be called prior to each attempt to dequeue output buffers from the decoder.
|
||||
*
|
||||
* @param positionUs The current playback position in microseconds.
|
||||
*/
|
||||
|
@ -21,15 +21,15 @@ package com.google.android.exoplayer2.text;
|
||||
public class SubtitleDecoderException extends Exception {
|
||||
|
||||
/**
|
||||
* @param message The detail message.
|
||||
* @param message The detail message for this exception.
|
||||
*/
|
||||
public SubtitleDecoderException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param message The detail message.
|
||||
* @param cause The cause.
|
||||
* @param message The detail message for this exception.
|
||||
* @param cause The cause of this exception.
|
||||
*/
|
||||
public SubtitleDecoderException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
|
@ -40,12 +40,12 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
||||
public abstract class MappingTrackSelector extends TrackSelector {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link MappingTrackSelector} events.
|
||||
* Listener of {@link MappingTrackSelector} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked when the track information has changed.
|
||||
* Called when the track information has changed.
|
||||
*
|
||||
* @param trackInfo Contains the new track and track selection information.
|
||||
*/
|
||||
|
@ -47,9 +47,8 @@ public final class DebugTextViewHelper implements Runnable, ExoPlayer.EventListe
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts periodic updates of the {@link TextView}.
|
||||
* <p>
|
||||
* Should be called from the application's main thread.
|
||||
* Starts periodic updates of the {@link TextView}. Must be called from the application's main
|
||||
* thread.
|
||||
*/
|
||||
public void start() {
|
||||
if (started) {
|
||||
@ -61,9 +60,8 @@ public final class DebugTextViewHelper implements Runnable, ExoPlayer.EventListe
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops periodic updates of the {@link TextView}.
|
||||
* <p>
|
||||
* Should be called from the application's main thread.
|
||||
* Stops periodic updates of the {@link TextView}. Must be called from the application's main
|
||||
* thread.
|
||||
*/
|
||||
public void stop() {
|
||||
if (!started) {
|
||||
|
@ -36,11 +36,11 @@ import android.util.DisplayMetrics;
|
||||
import android.util.Log;
|
||||
|
||||
/**
|
||||
* Draws subtitle {@link Cue}s.
|
||||
* Paints subtitle {@link Cue}s.
|
||||
*/
|
||||
/* package */ final class SubtitleCuePainter {
|
||||
/* package */ final class SubtitlePainter {
|
||||
|
||||
private static final String TAG = "SubtitleCuePainter";
|
||||
private static final String TAG = "SubtitlePainter";
|
||||
|
||||
/**
|
||||
* Ratio of inner padding to font size.
|
||||
@ -91,7 +91,7 @@ import android.util.Log;
|
||||
private int textTop;
|
||||
private int textPaddingX;
|
||||
|
||||
public SubtitleCuePainter(Context context) {
|
||||
public SubtitlePainter(Context context) {
|
||||
int[] viewAttr = {android.R.attr.lineSpacingExtra, android.R.attr.lineSpacingMultiplier};
|
||||
TypedArray styledAttributes = context.obtainStyledAttributes(null, viewAttr, 0, 0);
|
||||
spacingAdd = styledAttributes.getDimensionPixelSize(0, 0);
|
@ -29,7 +29,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A view for rendering rich-formatted captions.
|
||||
* A view for displaying subtitle {@link Cue}s.
|
||||
*/
|
||||
public final class SubtitleView extends View {
|
||||
|
||||
@ -52,7 +52,7 @@ public final class SubtitleView extends View {
|
||||
private static final int FRACTIONAL_IGNORE_PADDING = 1;
|
||||
private static final int ABSOLUTE = 2;
|
||||
|
||||
private final List<SubtitleCuePainter> painters;
|
||||
private final List<SubtitlePainter> painters;
|
||||
|
||||
private List<Cue> cues;
|
||||
private int textSizeType;
|
||||
@ -88,7 +88,7 @@ public final class SubtitleView extends View {
|
||||
// Ensure we have sufficient painters.
|
||||
int cueCount = (cues == null) ? 0 : cues.size();
|
||||
while (painters.size() < cueCount) {
|
||||
painters.add(new SubtitleCuePainter(getContext()));
|
||||
painters.add(new SubtitlePainter(getContext()));
|
||||
}
|
||||
// Invalidate to trigger drawing.
|
||||
invalidate();
|
||||
|
@ -25,12 +25,12 @@ import java.util.Set;
|
||||
public interface Cache {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link Cache} events.
|
||||
* Listener of {@link Cache} events.
|
||||
*/
|
||||
interface Listener {
|
||||
|
||||
/**
|
||||
* Invoked when a {@link CacheSpan} is added to the cache.
|
||||
* Called when a {@link CacheSpan} is added to the cache.
|
||||
*
|
||||
* @param cache The source of the event.
|
||||
* @param span The added {@link CacheSpan}.
|
||||
@ -38,7 +38,7 @@ public interface Cache {
|
||||
void onSpanAdded(Cache cache, CacheSpan span);
|
||||
|
||||
/**
|
||||
* Invoked when a {@link CacheSpan} is removed from the cache.
|
||||
* Called when a {@link CacheSpan} is removed from the cache.
|
||||
*
|
||||
* @param cache The source of the event.
|
||||
* @param span The removed {@link CacheSpan}.
|
||||
@ -46,7 +46,7 @@ public interface Cache {
|
||||
void onSpanRemoved(Cache cache, CacheSpan span);
|
||||
|
||||
/**
|
||||
* Invoked when an existing {@link CacheSpan} is accessed, causing it to be replaced. The new
|
||||
* Called when an existing {@link CacheSpan} is accessed, causing it to be replaced. The new
|
||||
* {@link CacheSpan} is guaranteed to represent the same data as the one it replaces, however
|
||||
* {@link CacheSpan#file} and {@link CacheSpan#lastAccessTimestamp} may have changed.
|
||||
* <p>
|
||||
|
@ -37,12 +37,12 @@ import java.io.InterruptedIOException;
|
||||
public final class CacheDataSource implements DataSource {
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of {@link CacheDataSource} events.
|
||||
* Listener of {@link CacheDataSource} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked when bytes have been read from the cache.
|
||||
* Called when bytes have been read from the cache.
|
||||
*
|
||||
* @param cacheSizeBytes Current cache size in bytes.
|
||||
* @param cachedBytesRead Total bytes read from the cache since this method was last invoked.
|
||||
|
@ -59,7 +59,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
private final EventDispatcher eventDispatcher;
|
||||
private final long allowedJoiningTimeMs;
|
||||
private final int videoScalingMode;
|
||||
private final int maxDroppedFrameCountToNotify;
|
||||
private final int maxDroppedFramesToNotify;
|
||||
private final boolean deviceNeedsAutoFrcWorkaround;
|
||||
|
||||
private int adaptiveMaxWidth;
|
||||
@ -70,7 +70,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
private boolean renderedFirstFrame;
|
||||
private long joiningDeadlineMs;
|
||||
private long droppedFrameAccumulationStartTimeMs;
|
||||
private int droppedFrameCount;
|
||||
private int droppedFrames;
|
||||
private int consecutiveDroppedFrameCount;
|
||||
|
||||
private int pendingRotationDegrees;
|
||||
@ -145,17 +145,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
|
||||
* null if delivery of events is not required.
|
||||
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
|
||||
* @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
|
||||
* invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
|
||||
*/
|
||||
public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector,
|
||||
int videoScalingMode, long allowedJoiningTimeMs, DrmSessionManager drmSessionManager,
|
||||
boolean playClearSamplesWithoutKeys, Handler eventHandler,
|
||||
VideoRendererEventListener eventListener, int maxDroppedFrameCountToNotify) {
|
||||
VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) {
|
||||
super(C.TRACK_TYPE_VIDEO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
|
||||
this.videoScalingMode = videoScalingMode;
|
||||
this.allowedJoiningTimeMs = allowedJoiningTimeMs;
|
||||
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
|
||||
this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
|
||||
frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context);
|
||||
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
|
||||
deviceNeedsAutoFrcWorkaround = deviceNeedsAutoFrcWorkaround();
|
||||
@ -268,14 +268,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
@Override
|
||||
protected void onStarted() {
|
||||
super.onStarted();
|
||||
droppedFrameCount = 0;
|
||||
droppedFrames = 0;
|
||||
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStopped() {
|
||||
joiningDeadlineMs = -1;
|
||||
maybeNotifyDroppedFrameCount();
|
||||
maybeNotifyDroppedFrames();
|
||||
super.onStopped();
|
||||
}
|
||||
|
||||
@ -306,10 +306,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param surface The surface to set.
|
||||
* @throws ExoPlaybackException
|
||||
*/
|
||||
private void setSurface(Surface surface) throws ExoPlaybackException {
|
||||
if (this.surface == surface) {
|
||||
return;
|
||||
@ -350,13 +346,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
: newFormat.rotationDegrees;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return True if the first frame has been rendered (playback has not necessarily begun).
|
||||
*/
|
||||
protected final boolean haveRenderedFirstFrame() {
|
||||
return renderedFirstFrame;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) {
|
||||
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
|
||||
@ -464,28 +453,28 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void skipOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
private void skipOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
TraceUtil.beginSection("skipVideoBuffer");
|
||||
codec.releaseOutputBuffer(bufferIndex, false);
|
||||
TraceUtil.endSection();
|
||||
decoderCounters.skippedOutputBufferCount++;
|
||||
}
|
||||
|
||||
protected void dropOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
private void dropOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
TraceUtil.beginSection("dropVideoBuffer");
|
||||
codec.releaseOutputBuffer(bufferIndex, false);
|
||||
TraceUtil.endSection();
|
||||
decoderCounters.droppedOutputBufferCount++;
|
||||
droppedFrameCount++;
|
||||
droppedFrames++;
|
||||
consecutiveDroppedFrameCount++;
|
||||
decoderCounters.maxConsecutiveDroppedOutputBufferCount = Math.max(consecutiveDroppedFrameCount,
|
||||
decoderCounters.maxConsecutiveDroppedOutputBufferCount);
|
||||
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
|
||||
maybeNotifyDroppedFrameCount();
|
||||
if (droppedFrames == maxDroppedFramesToNotify) {
|
||||
maybeNotifyDroppedFrames();
|
||||
}
|
||||
}
|
||||
|
||||
protected void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
private void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
|
||||
maybeNotifyVideoSizeChanged();
|
||||
TraceUtil.beginSection("releaseOutputBuffer");
|
||||
codec.releaseOutputBuffer(bufferIndex, true);
|
||||
@ -497,7 +486,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
protected void renderOutputBufferV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
|
||||
private void renderOutputBufferV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
|
||||
maybeNotifyVideoSizeChanged();
|
||||
TraceUtil.beginSection("releaseOutputBuffer");
|
||||
codec.releaseOutputBuffer(bufferIndex, releaseTimeNs);
|
||||
@ -588,12 +577,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeNotifyDroppedFrameCount() {
|
||||
if (droppedFrameCount > 0) {
|
||||
private void maybeNotifyDroppedFrames() {
|
||||
if (droppedFrames > 0) {
|
||||
long now = SystemClock.elapsedRealtime();
|
||||
long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
|
||||
eventDispatcher.droppedFrameCount(droppedFrameCount, elapsedMs);
|
||||
droppedFrameCount = 0;
|
||||
eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
|
||||
droppedFrames = 0;
|
||||
droppedFrameAccumulationStartTimeMs = now;
|
||||
}
|
||||
}
|
||||
|
@ -51,16 +51,16 @@ public final class VideoFrameReleaseTimeHelper {
|
||||
private long frameCount;
|
||||
|
||||
/**
|
||||
* Constructs an instance that smoothes frame release but does not snap release to the default
|
||||
* display's vsync signal.
|
||||
* Constructs an instance that smoothes frame release timestamps but does not align them with
|
||||
* the default display's vsync signal.
|
||||
*/
|
||||
public VideoFrameReleaseTimeHelper() {
|
||||
this(-1, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an instance that smoothes frame release and snaps release to the default display's
|
||||
* vsync signal.
|
||||
* Constructs an instance that smoothes frame release timestamps and aligns them with the default
|
||||
* display's vsync signal.
|
||||
*
|
||||
* @param context A context from which information about the default display can be retrieved.
|
||||
*/
|
||||
@ -102,12 +102,12 @@ public final class VideoFrameReleaseTimeHelper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called to make a fine-grained adjustment to a frame release time.
|
||||
* Adjusts a frame release timestamp.
|
||||
*
|
||||
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
|
||||
* @param framePresentationTimeUs The frame's presentation time, in microseconds.
|
||||
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
|
||||
* the same time base as {@link System#nanoTime()}.
|
||||
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
|
||||
* @return The adjusted frame release timestamp, in nanoseconds and in the same time base as
|
||||
* {@link System#nanoTime()}.
|
||||
*/
|
||||
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) {
|
||||
@ -205,9 +205,9 @@ public final class VideoFrameReleaseTimeHelper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages the lifecycle of a single {@link Choreographer} to be shared among all
|
||||
* {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a bug fixed in platform
|
||||
* API version 23 that causes resource leakage. See [Internal: b/12455729].
|
||||
* Samples display vsync timestamps. A single instance using a single {@link Choreographer} is
|
||||
* shared by all {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a resource
|
||||
* leak in the platform on API levels prior to 23. See [Internal: b/12455729].
|
||||
*/
|
||||
private static final class VSyncSampler implements FrameCallback, Handler.Callback {
|
||||
|
||||
@ -236,17 +236,16 @@ public final class VideoFrameReleaseTimeHelper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells the {@link VSyncSampler} that there is a new {@link VideoFrameReleaseTimeHelper}
|
||||
* instance observing the currentSampledVsyncTimeNs value. As a consequence, if necessary, it
|
||||
* will register itself as a {@code doFrame} callback listener.
|
||||
* Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is observing
|
||||
* {@link #sampledVsyncTimeNs}, and hence that the value should be periodically updated.
|
||||
*/
|
||||
public void addObserver() {
|
||||
handler.sendEmptyMessage(MSG_ADD_OBSERVER);
|
||||
}
|
||||
|
||||
/**
|
||||
* Counterpart of {@code addNewObservingHelper}. This method should be called once the observer
|
||||
* no longer needs to read {@link #sampledVsyncTimeNs}
|
||||
* Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is no longer observing
|
||||
* {@link #sampledVsyncTimeNs}.
|
||||
*/
|
||||
public void removeObserver() {
|
||||
handler.sendEmptyMessage(MSG_REMOVE_OBSERVER);
|
||||
|
@ -26,12 +26,12 @@ import android.view.Surface;
|
||||
import android.view.TextureView;
|
||||
|
||||
/**
|
||||
* Interface definition for a callback to be notified of video {@link Renderer} events.
|
||||
* Listener of video {@link Renderer} events.
|
||||
*/
|
||||
public interface VideoRendererEventListener {
|
||||
|
||||
/**
|
||||
* Invoked when the renderer is enabled.
|
||||
* Called when the renderer is enabled.
|
||||
*
|
||||
* @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
|
||||
* remains enabled.
|
||||
@ -39,7 +39,7 @@ public interface VideoRendererEventListener {
|
||||
void onVideoEnabled(DecoderCounters counters);
|
||||
|
||||
/**
|
||||
* Invoked when a decoder is created.
|
||||
* Called when a decoder is created.
|
||||
*
|
||||
* @param decoderName The decoder that was created.
|
||||
* @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
|
||||
@ -50,14 +50,14 @@ public interface VideoRendererEventListener {
|
||||
long initializationDurationMs);
|
||||
|
||||
/**
|
||||
* Invoked when the format of the media being consumed by the renderer changes.
|
||||
* Called when the format of the media being consumed by the renderer changes.
|
||||
*
|
||||
* @param format The new format.
|
||||
*/
|
||||
void onVideoInputFormatChanged(Format format);
|
||||
|
||||
/**
|
||||
* Invoked to report the number of frames dropped by the renderer. Dropped frames are reported
|
||||
* Called to report the number of frames dropped by the renderer. Dropped frames are reported
|
||||
* whenever the renderer is stopped having dropped frames, and optionally, whenever the count
|
||||
* reaches a specified threshold whilst the renderer is started.
|
||||
*
|
||||
@ -70,7 +70,7 @@ public interface VideoRendererEventListener {
|
||||
void onDroppedFrames(int count, long elapsedMs);
|
||||
|
||||
/**
|
||||
* Invoked each time there's a change in the size of the video being rendered.
|
||||
* Called each time there's a change in the size of the video being rendered.
|
||||
*
|
||||
* @param width The video width in pixels.
|
||||
* @param height The video height in pixels.
|
||||
@ -89,7 +89,7 @@ public interface VideoRendererEventListener {
|
||||
float pixelWidthHeightRatio);
|
||||
|
||||
/**
|
||||
* Invoked when a frame is rendered to a surface for the first time following that surface
|
||||
* Called when a frame is rendered to a surface for the first time following that surface
|
||||
* having been set as the target for the renderer.
|
||||
*
|
||||
* @param surface The surface to which a first frame has been rendered.
|
||||
@ -97,7 +97,7 @@ public interface VideoRendererEventListener {
|
||||
void onDrawnToSurface(Surface surface);
|
||||
|
||||
/**
|
||||
* Invoked when the renderer is disabled.
|
||||
* Called when the renderer is disabled.
|
||||
*
|
||||
* @param counters {@link DecoderCounters} that were updated by the renderer.
|
||||
*/
|
||||
@ -111,11 +111,19 @@ public interface VideoRendererEventListener {
|
||||
private final Handler handler;
|
||||
private final VideoRendererEventListener listener;
|
||||
|
||||
/**
|
||||
* @param handler A handler for dispatching events, or null if creating a dummy instance.
|
||||
* @param listener The listener to which events should be dispatched, or null if creating a
|
||||
* dummy instance.
|
||||
*/
|
||||
public EventDispatcher(Handler handler, VideoRendererEventListener listener) {
|
||||
this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onVideoEnabled(DecoderCounters)}.
|
||||
*/
|
||||
public void enabled(final DecoderCounters decoderCounters) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -127,6 +135,9 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onVideoDecoderInitialized(String, long, long)}.
|
||||
*/
|
||||
public void decoderInitialized(final String decoderName,
|
||||
final long initializedTimestampMs, final long initializationDurationMs) {
|
||||
if (listener != null) {
|
||||
@ -140,6 +151,9 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onVideoInputFormatChanged(Format)}.
|
||||
*/
|
||||
public void inputFormatChanged(final Format format) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -151,7 +165,10 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
public void droppedFrameCount(final int droppedFrameCount, final long elapsedMs) {
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
|
||||
*/
|
||||
public void droppedFrames(final int droppedFrameCount, final long elapsedMs) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
@ -162,6 +179,9 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onVideoSizeChanged(int, int, int, float)}.
|
||||
*/
|
||||
public void videoSizeChanged(final int width, final int height,
|
||||
final int unappliedRotationDegrees, final float pixelWidthHeightRatio) {
|
||||
if (listener != null) {
|
||||
@ -175,6 +195,9 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onDrawnToSurface(Surface)}.
|
||||
*/
|
||||
public void drawnToSurface(final Surface surface) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
@ -186,6 +209,9 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes {@link VideoRendererEventListener#onVideoDisabled(DecoderCounters)}.
|
||||
*/
|
||||
public void disabled(final DecoderCounters counters) {
|
||||
if (listener != null) {
|
||||
handler.post(new Runnable() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user