Fix some nullness warnings in audio classes

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=206021873
This commit is contained in:
andrewlewis 2018-07-25 11:06:28 -07:00 committed by Oliver Woodman
parent d810352f2c
commit 5da703a588
4 changed files with 29 additions and 26 deletions

View File

@ -39,12 +39,9 @@ public final class AudioAttributes {
*/
public static final class Builder {
@C.AudioContentType
private int contentType;
@C.AudioFlags
private int flags;
@C.AudioUsage
private int usage;
private @C.AudioContentType int contentType;
private @C.AudioFlags int flags;
private @C.AudioUsage int usage;
/**
* Creates a new builder for {@link AudioAttributes}.
@ -91,14 +88,11 @@ public final class AudioAttributes {
}
@C.AudioContentType
public final int contentType;
@C.AudioFlags
public final int flags;
@C.AudioUsage
public final int usage;
public final @C.AudioContentType int contentType;
public final @C.AudioFlags int flags;
public final @C.AudioUsage int usage;
private android.media.AudioAttributes audioAttributesV21;
private @Nullable android.media.AudioAttributes audioAttributesV21;
private AudioAttributes(@C.AudioContentType int contentType, @C.AudioFlags int flags,
@C.AudioUsage int usage) {

View File

@ -50,7 +50,7 @@ public final class AudioCapabilities {
}
@SuppressLint("InlinedApi")
/* package */ static AudioCapabilities getCapabilities(Intent intent) {
/* package */ static AudioCapabilities getCapabilities(@Nullable Intent intent) {
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
return DEFAULT_AUDIO_CAPABILITIES;
}

View File

@ -20,6 +20,7 @@ import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
@ -45,9 +46,9 @@ public final class AudioCapabilitiesReceiver {
private final Context context;
private final Listener listener;
private final BroadcastReceiver receiver;
private final @Nullable BroadcastReceiver receiver;
/* package */ AudioCapabilities audioCapabilities;
/* package */ @Nullable AudioCapabilities audioCapabilities;
/**
* @param context A context for registering the receiver.

View File

@ -15,10 +15,13 @@
*/
package com.google.android.exoplayer2.audio;
import static org.checkerframework.checker.nullness.NullnessUtil.castNonNull;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.os.SystemClock;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
@ -128,10 +131,10 @@ import java.lang.reflect.Method;
private final Listener listener;
private final long[] playheadOffsets;
private AudioTrack audioTrack;
private @Nullable AudioTrack audioTrack;
private int outputPcmFrameSize;
private int bufferSize;
private AudioTimestampPoller audioTimestampPoller;
private @Nullable AudioTimestampPoller audioTimestampPoller;
private int outputSampleRate;
private boolean needsPassthroughWorkarounds;
private long bufferSizeUs;
@ -139,7 +142,7 @@ import java.lang.reflect.Method;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
private Method getLatencyMethod;
private @Nullable Method getLatencyMethod;
private long latencyUs;
private boolean hasData;
@ -205,13 +208,14 @@ import java.lang.reflect.Method;
}
public long getCurrentPositionUs(boolean sourceEnded) {
if (audioTrack.getPlayState() == PLAYSTATE_PLAYING) {
if (Assertions.checkNotNull(this.audioTrack).getPlayState() == PLAYSTATE_PLAYING) {
maybeSampleSyncParams();
}
// If the device supports it, use the playback timestamp from AudioTrack.getTimestamp.
// Otherwise, derive a smoothed position by sampling the track's frame position.
long systemTimeUs = System.nanoTime() / 1000;
AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller);
if (audioTimestampPoller.hasTimestamp()) {
// Calculate the speed-adjusted position using the timestamp (which may be in the future).
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
@ -241,12 +245,12 @@ import java.lang.reflect.Method;
/** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
public void start() {
audioTimestampPoller.reset();
Assertions.checkNotNull(audioTimestampPoller).reset();
}
/** Returns whether the audio track is in the playing state. */
public boolean isPlaying() {
return audioTrack.getPlayState() == PLAYSTATE_PLAYING;
return Assertions.checkNotNull(audioTrack).getPlayState() == PLAYSTATE_PLAYING;
}
/**
@ -257,7 +261,7 @@ import java.lang.reflect.Method;
* @return Whether the caller can write data to the track.
*/
public boolean mayHandleBuffer(long writtenFrames) {
@PlayState int playState = audioTrack.getPlayState();
@PlayState int playState = Assertions.checkNotNull(audioTrack).getPlayState();
if (needsPassthroughWorkarounds) {
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
// buffer empties. See [Internal: b/18899620].
@ -339,7 +343,7 @@ import java.lang.reflect.Method;
if (stopTimestampUs == C.TIME_UNSET) {
// The audio track is going to be paused, so reset the timestamp poller to ensure it doesn't
// supply an advancing position.
audioTimestampPoller.reset();
Assertions.checkNotNull(audioTimestampPoller).reset();
return true;
}
// We've handled the end of the stream already, so there's no need to pause the track.
@ -388,6 +392,7 @@ import java.lang.reflect.Method;
}
private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) {
AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller);
if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) {
return;
}
@ -423,7 +428,9 @@ import java.lang.reflect.Method;
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
latencyUs =
(Integer) getLatencyMethod.invoke(audioTrack, (Object[]) null) * 1000L - bufferSizeUs;
castNonNull((Integer) getLatencyMethod.invoke(Assertions.checkNotNull(audioTrack)))
* 1000L
- bufferSizeUs;
// Sanity check that the latency is non-negative.
latencyUs = Math.max(latencyUs, 0);
// Sanity check that the latency isn't too large.
@ -457,7 +464,7 @@ import java.lang.reflect.Method;
*/
private boolean forceHasPendingData() {
return needsPassthroughWorkarounds
&& audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED
&& Assertions.checkNotNull(audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED
&& getPlaybackHeadPosition() == 0;
}
@ -483,6 +490,7 @@ import java.lang.reflect.Method;
* @return The playback head position, in frames.
*/
private long getPlaybackHeadPosition() {
AudioTrack audioTrack = Assertions.checkNotNull(this.audioTrack);
if (stopTimestampUs != C.TIME_UNSET) {
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs;