*** Original commit ***

Rollforward of commit 5612ac50a332e425dc130c3c13a139b9e6fce9ec.

*** Reason for rollforward ***

Rollforward after making sure the handler is created
from the playback thread and not from an app thread.

*** Original change description ***

Rollback of e1beb1d194

*** Original commit ***

Expose experimental offload scheduling

Add a new scheduling mode that stops ExoPlayer main loop
when the audio offload buffer is full and resume it...

***

PiperOrigin-RevId: 316914147
This commit is contained in:
olly 2020-06-17 18:21:05 +01:00 committed by Oliver Woodman
parent a5bc91f09b
commit ffa4ad0e77
12 changed files with 7 additions and 242 deletions

View File

@ -164,7 +164,6 @@
* No longer use a `MediaCodec` in audio passthrough mode. * No longer use a `MediaCodec` in audio passthrough mode.
* Check `DefaultAudioSink` supports passthrough, in addition to checking * Check `DefaultAudioSink` supports passthrough, in addition to checking
the `AudioCapabilities` the `AudioCapabilities`
* Add an experimental scheduling mode to save power in offload.
([#7404](https://github.com/google/ExoPlayer/issues/7404)). ([#7404](https://github.com/google/ExoPlayer/issues/7404)).
* Adjust input timestamps in `MediaCodecRenderer` to account for the * Adjust input timestamps in `MediaCodecRenderer` to account for the
Codec2 MP3 decoder having lower timestamps on the output side. Codec2 MP3 decoder having lower timestamps on the output side.

View File

@ -219,20 +219,12 @@ public class DefaultRenderersFactory implements RenderersFactory {
} }
/** /**
* Sets whether audio should be played using the offload path. * Sets whether audio should be played using the offload path. Audio offload disables audio
* * processors (for example speed adjustment).
* <p>Audio offload disables ExoPlayer audio processing, but significantly reduces the energy
* consumption of the playback when {@link
* ExoPlayer#experimental_enableOffloadScheduling(boolean)} is enabled.
*
* <p>Most Android devices can only support one offload {@link android.media.AudioTrack} at a time
* and can invalidate it at any time. Thus an app can never be guaranteed that it will be able to
* play in offload.
* *
* <p>The default value is {@code false}. * <p>The default value is {@code false}.
* *
* @param enableOffload Whether to enable use of audio offload for supported formats, if * @param enableOffload If audio offload should be used.
* available.
* @return This factory, for convenience. * @return This factory, for convenience.
*/ */
public DefaultRenderersFactory setEnableAudioOffload(boolean enableOffload) { public DefaultRenderersFactory setEnableAudioOffload(boolean enableOffload) {
@ -431,8 +423,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
* before output. May be empty. * before output. May be empty.
* @param eventHandler A handler to use when invoking event listeners and outputs. * @param eventHandler A handler to use when invoking event listeners and outputs.
* @param eventListener An event listener. * @param eventListener An event listener.
* @param enableOffload Whether to enable use of audio offload for supported formats, if * @param enableOffload If the renderer should use audio offload for all supported formats.
* available.
* @param out An array to which the built renderers should be appended. * @param out An array to which the built renderers should be appended.
*/ */
protected void buildAudioRenderers( protected void buildAudioRenderers(

View File

@ -20,8 +20,6 @@ import android.os.Looper;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.analytics.AnalyticsCollector; import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.audio.AudioCapabilities;
import com.google.android.exoplayer2.audio.DefaultAudioSink;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer; import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.metadata.MetadataRenderer; import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.source.ClippingMediaSource; import com.google.android.exoplayer2.source.ClippingMediaSource;
@ -599,39 +597,4 @@ public interface ExoPlayer extends Player {
* @see #setPauseAtEndOfMediaItems(boolean) * @see #setPauseAtEndOfMediaItems(boolean)
*/ */
boolean getPauseAtEndOfMediaItems(); boolean getPauseAtEndOfMediaItems();
/**
* Enables audio offload scheduling, which runs ExoPlayer's main loop as rarely as possible when
* playing an audio stream using audio offload.
*
* <p>Only use this scheduling mode if the player is not displaying anything to the user. For
* example when the application is in the background, or the screen is off. The player state
* (including position) is rarely updated (between 10s and 1min).
*
* <p>While offload scheduling is enabled, player events may be delivered severely delayed and
* apps should not interact with the player. When returning to the foreground, disable offload
* scheduling before interacting with the player
*
* <p>This mode should save significant power when the phone is playing offload audio with the
* screen off.
*
* <p>This mode only has an effect when playing an audio track in offload mode, which requires all
* the following:
*
* <ul>
* <li>audio offload rendering is enabled in {@link
* DefaultRenderersFactory#setEnableAudioOffload} or the equivalent option passed to {@link
* com.google.android.exoplayer2.audio.DefaultAudioSink#DefaultAudioSink(AudioCapabilities,
* DefaultAudioSink.AudioProcessorChain, boolean, boolean)}.
* <li>an audio track is playing in a format which the device supports offloading (for example
* MP3 or AAC).
* <li>The {@link com.google.android.exoplayer2.audio.AudioSink} is playing with an offload
* {@link android.media.AudioTrack}.
* </ul>
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param enableOffloadScheduling Whether to enable offload scheduling.
*/
void experimental_enableOffloadScheduling(boolean enableOffloadScheduling);
} }

View File

@ -202,11 +202,6 @@ import java.util.concurrent.TimeoutException;
internalPlayer.experimental_throwWhenStuckBuffering(); internalPlayer.experimental_throwWhenStuckBuffering();
} }
@Override
public void experimental_enableOffloadScheduling(boolean enableOffloadScheduling) {
internalPlayer.experimental_enableOffloadScheduling(enableOffloadScheduling);
}
@Override @Override
@Nullable @Nullable
public AudioComponent getAudioComponent() { public AudioComponent getAudioComponent() {

View File

@ -94,15 +94,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
private static final int ACTIVE_INTERVAL_MS = 10; private static final int ACTIVE_INTERVAL_MS = 10;
private static final int IDLE_INTERVAL_MS = 1000; private static final int IDLE_INTERVAL_MS = 1000;
/**
* Duration under which pausing the main DO_SOME_WORK loop is not expected to yield significant
* power saving.
*
* <p>This value is probably too high, power measurements are needed adjust it, but as renderer
* sleep is currently only implemented for audio offload, which uses buffer much bigger than 2s,
* this does not matter for now.
*/
private static final long MIN_RENDERER_SLEEP_DURATION_MS = 2000;
private final Renderer[] renderers; private final Renderer[] renderers;
private final RendererCapabilities[] rendererCapabilities; private final RendererCapabilities[] rendererCapabilities;
@ -136,8 +127,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
@Player.RepeatMode private int repeatMode; @Player.RepeatMode private int repeatMode;
private boolean shuffleModeEnabled; private boolean shuffleModeEnabled;
private boolean foregroundMode; private boolean foregroundMode;
private boolean requestForRendererSleep;
private boolean offloadSchedulingEnabled;
private int enabledRendererCount; private int enabledRendererCount;
@Nullable private SeekPosition pendingInitialSeekPosition; @Nullable private SeekPosition pendingInitialSeekPosition;
@ -210,13 +199,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
throwWhenStuckBuffering = true; throwWhenStuckBuffering = true;
} }
public void experimental_enableOffloadScheduling(boolean enableOffloadScheduling) {
offloadSchedulingEnabled = enableOffloadScheduling;
if (!enableOffloadScheduling) {
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
}
public void prepare() { public void prepare() {
handler.obtainMessage(MSG_PREPARE).sendToTarget(); handler.obtainMessage(MSG_PREPARE).sendToTarget();
} }
@ -903,13 +885,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
if ((shouldPlayWhenReady() && playbackInfo.playbackState == Player.STATE_READY) if ((shouldPlayWhenReady() && playbackInfo.playbackState == Player.STATE_READY)
|| playbackInfo.playbackState == Player.STATE_BUFFERING) { || playbackInfo.playbackState == Player.STATE_BUFFERING) {
maybeScheduleWakeup(operationStartTimeMs, ACTIVE_INTERVAL_MS); scheduleNextWork(operationStartTimeMs, ACTIVE_INTERVAL_MS);
} else if (enabledRendererCount != 0 && playbackInfo.playbackState != Player.STATE_ENDED) { } else if (enabledRendererCount != 0 && playbackInfo.playbackState != Player.STATE_ENDED) {
scheduleNextWork(operationStartTimeMs, IDLE_INTERVAL_MS); scheduleNextWork(operationStartTimeMs, IDLE_INTERVAL_MS);
} else { } else {
handler.removeMessages(MSG_DO_SOME_WORK); handler.removeMessages(MSG_DO_SOME_WORK);
} }
requestForRendererSleep = false; // A sleep request is only valid for the current doSomeWork.
TraceUtil.endSection(); TraceUtil.endSection();
} }
@ -919,14 +900,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
handler.sendEmptyMessageAtTime(MSG_DO_SOME_WORK, thisOperationStartTimeMs + intervalMs); handler.sendEmptyMessageAtTime(MSG_DO_SOME_WORK, thisOperationStartTimeMs + intervalMs);
} }
private void maybeScheduleWakeup(long operationStartTimeMs, long intervalMs) {
if (offloadSchedulingEnabled && requestForRendererSleep) {
return;
}
scheduleNextWork(operationStartTimeMs, intervalMs);
}
private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackException { private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackException {
playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1);
@ -2095,24 +2068,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
joining, joining,
mayRenderStartOfStream, mayRenderStartOfStream,
periodHolder.getRendererOffset()); periodHolder.getRendererOffset());
renderer.handleMessage(
Renderer.MSG_SET_WAKEUP_LISTENER,
new Renderer.WakeupListener() {
@Override
public void onSleep(long wakeupDeadlineMs) {
// Do not sleep if the expected sleep time is not long enough to save significant power.
if (wakeupDeadlineMs >= MIN_RENDERER_SLEEP_DURATION_MS) {
requestForRendererSleep = true;
}
}
@Override
public void onWakeup() {
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
});
mediaClock.onRendererEnabled(renderer); mediaClock.onRendererEnabled(renderer);
// Start the renderer if playing. // Start the renderer if playing.
if (playing) { if (playing) {

View File

@ -46,30 +46,6 @@ import java.lang.annotation.RetentionPolicy;
*/ */
public interface Renderer extends PlayerMessage.Target { public interface Renderer extends PlayerMessage.Target {
/**
* Some renderers can signal when {@link #render(long, long)} should be called.
*
* <p>That allows the player to sleep until the next wakeup, instead of calling {@link
* #render(long, long)} in a tight loop. The aim of this interrupt based scheduling is to save
* power.
*/
interface WakeupListener {
/**
* The renderer no longer needs to render until the next wakeup.
*
* @param wakeupDeadlineMs Maximum time in milliseconds until {@link #onWakeup()} will be
* called.
*/
void onSleep(long wakeupDeadlineMs);
/**
* The renderer needs to render some frames. The client should call {@link #render(long, long)}
* at its earliest convenience.
*/
void onWakeup();
}
/** /**
* The type of a message that can be passed to a video renderer via {@link * The type of a message that can be passed to a video renderer via {@link
* ExoPlayer#createMessage(Target)}. The message payload should be the target {@link Surface}, or * ExoPlayer#createMessage(Target)}. The message payload should be the target {@link Surface}, or
@ -161,14 +137,6 @@ public interface Renderer extends PlayerMessage.Target {
* representing the audio session ID that will be attached to the underlying audio track. * representing the audio session ID that will be attached to the underlying audio track.
*/ */
int MSG_SET_AUDIO_SESSION_ID = 102; int MSG_SET_AUDIO_SESSION_ID = 102;
/**
* A type of a message that can be passed to a {@link Renderer} via {@link
* ExoPlayer#createMessage(Target)}, to inform the renderer that it can schedule waking up another
* component.
*
* <p>The message payload must be a {@link WakeupListener} instance.
*/
int MSG_SET_WAKEUP_LISTENER = 103;
/** /**
* Applications or extensions may define custom {@code MSG_*} constants that can be passed to * Applications or extensions may define custom {@code MSG_*} constants that can be passed to
* renderers. These custom constants must be greater than or equal to this value. * renderers. These custom constants must be greater than or equal to this value.

View File

@ -633,11 +633,6 @@ public class SimpleExoPlayer extends BasePlayer
C.TRACK_TYPE_AUDIO, Renderer.MSG_SET_SKIP_SILENCE_ENABLED, skipSilenceEnabled); C.TRACK_TYPE_AUDIO, Renderer.MSG_SET_SKIP_SILENCE_ENABLED, skipSilenceEnabled);
} }
@Override
public void experimental_enableOffloadScheduling(boolean enableOffloadScheduling) {
player.experimental_enableOffloadScheduling(enableOffloadScheduling);
}
@Override @Override
@Nullable @Nullable
public AudioComponent getAudioComponent() { public AudioComponent getAudioComponent() {

View File

@ -90,17 +90,6 @@ public interface AudioSink {
* @param skipSilenceEnabled Whether skipping silences is enabled. * @param skipSilenceEnabled Whether skipping silences is enabled.
*/ */
void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled); void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled);
/** Called when the offload buffer has been partially emptied. */
default void onOffloadBufferEmptying() {}
/**
* Called when the offload buffer has been filled completely.
*
* @param bufferEmptyingDeadlineMs Maximum time in milliseconds until {@link
* #onOffloadBufferEmptying()} will be called.
*/
default void onOffloadBufferFull(long bufferEmptyingDeadlineMs) {}
} }
/** /**

View File

@ -335,11 +335,6 @@ import java.lang.reflect.Method;
return bufferSize - bytesPending; return bufferSize - bytesPending;
} }
/** Returns the duration of audio that is buffered but unplayed. */
public long getPendingBufferDurationMs(long writtenFrames) {
return C.usToMs(framesToDurationUs(writtenFrames - getPlaybackHeadPosition()));
}
/** Returns whether the track is in an invalid state and must be recreated. */ /** Returns whether the track is in an invalid state and must be recreated. */
public boolean isStalled(long writtenFrames) { public boolean isStalled(long writtenFrames) {
return forceResetWorkaroundTimeMs != C.TIME_UNSET return forceResetWorkaroundTimeMs != C.TIME_UNSET

View File

@ -20,7 +20,6 @@ import android.media.AudioFormat;
import android.media.AudioManager; import android.media.AudioManager;
import android.media.AudioTrack; import android.media.AudioTrack;
import android.os.ConditionVariable; import android.os.ConditionVariable;
import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
@ -275,7 +274,6 @@ public final class DefaultAudioSink implements AudioSink {
private final AudioTrackPositionTracker audioTrackPositionTracker; private final AudioTrackPositionTracker audioTrackPositionTracker;
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints; private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
private final boolean enableOffload; private final boolean enableOffload;
@MonotonicNonNull private StreamEventCallback offloadStreamEventCallback;
@Nullable private Listener listener; @Nullable private Listener listener;
/** Used to keep the audio session active on pre-V21 builds (see {@link #initialize(long)}). */ /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize(long)}). */
@ -306,7 +304,7 @@ public final class DefaultAudioSink implements AudioSink {
@Nullable private ByteBuffer inputBuffer; @Nullable private ByteBuffer inputBuffer;
private int inputBufferAccessUnitCount; private int inputBufferAccessUnitCount;
@Nullable private ByteBuffer outputBuffer; @Nullable private ByteBuffer outputBuffer;
@MonotonicNonNull private byte[] preV21OutputBuffer; private byte[] preV21OutputBuffer;
private int preV21OutputBufferOffset; private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex; private int drainingAudioProcessorIndex;
private boolean handledEndOfStream; private boolean handledEndOfStream;
@ -368,10 +366,7 @@ public final class DefaultAudioSink implements AudioSink {
* be available when float output is in use. * be available when float output is in use.
* @param enableOffload Whether audio offloading is enabled. If an audio format can be both played * @param enableOffload Whether audio offloading is enabled. If an audio format can be both played
* with offload and encoded audio passthrough, it will be played in offload. Audio offload is * with offload and encoded audio passthrough, it will be played in offload. Audio offload is
* supported starting with API 29 ({@link android.os.Build.VERSION_CODES#Q}). Most Android * supported starting with API 29 ({@link android.os.Build.VERSION_CODES#Q}).
* devices can only support one offload {@link android.media.AudioTrack} at a time and can
* invalidate it at any time. Thus an app can never be guaranteed that it will be able to play
* in offload.
*/ */
public DefaultAudioSink( public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, @Nullable AudioCapabilities audioCapabilities,
@ -409,7 +404,6 @@ public final class DefaultAudioSink implements AudioSink {
activeAudioProcessors = new AudioProcessor[0]; activeAudioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0]; outputBuffers = new ByteBuffer[0];
mediaPositionParametersCheckpoints = new ArrayDeque<>(); mediaPositionParametersCheckpoints = new ArrayDeque<>();
offloadStreamEventCallback = Util.SDK_INT >= 29 ? new StreamEventCallback() : null;
} }
// AudioSink implementation. // AudioSink implementation.
@ -569,9 +563,6 @@ public final class DefaultAudioSink implements AudioSink {
audioTrack = audioTrack =
Assertions.checkNotNull(configuration) Assertions.checkNotNull(configuration)
.buildAudioTrack(tunneling, audioAttributes, audioSessionId); .buildAudioTrack(tunneling, audioAttributes, audioSessionId);
if (isOffloadedPlayback(audioTrack)) {
registerStreamEventCallback(audioTrack);
}
int audioSessionId = audioTrack.getAudioSessionId(); int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) { if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) { if (Util.SDK_INT < 21) {
@ -753,16 +744,6 @@ public final class DefaultAudioSink implements AudioSink {
return false; return false;
} }
@RequiresApi(29)
private void registerStreamEventCallback(AudioTrack audioTrack) {
if (offloadStreamEventCallback == null) {
// Must be lazily initialized to receive stream event callbacks on the current (playback)
// thread as the constructor is not called in the playback thread.
offloadStreamEventCallback = new StreamEventCallback();
}
offloadStreamEventCallback.register(audioTrack);
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException { private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = activeAudioProcessors.length; int count = activeAudioProcessors.length;
int index = count; int index = count;
@ -841,15 +822,6 @@ public final class DefaultAudioSink implements AudioSink {
throw new WriteException(bytesWritten); throw new WriteException(bytesWritten);
} }
if (playing
&& listener != null
&& bytesWritten < bytesRemaining
&& isOffloadedPlayback(audioTrack)) {
long pendingDurationMs =
audioTrackPositionTracker.getPendingBufferDurationMs(writtenEncodedFrames);
listener.onOffloadBufferFull(pendingDurationMs);
}
if (configuration.isInputPcm) { if (configuration.isInputPcm) {
writtenPcmBytes += bytesWritten; writtenPcmBytes += bytesWritten;
} }
@ -1068,9 +1040,6 @@ public final class DefaultAudioSink implements AudioSink {
if (audioTrackPositionTracker.isPlaying()) { if (audioTrackPositionTracker.isPlaying()) {
audioTrack.pause(); audioTrack.pause();
} }
if (isOffloadedPlayback(audioTrack)) {
Assertions.checkNotNull(offloadStreamEventCallback).unregister(audioTrack);
}
// AudioTrack.release can take some time, so we call it on a background thread. // AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = audioTrack; final AudioTrack toRelease = audioTrack;
audioTrack = null; audioTrack = null;
@ -1260,36 +1229,6 @@ public final class DefaultAudioSink implements AudioSink {
audioFormat, audioAttributes.getAudioAttributesV21()); audioFormat, audioAttributes.getAudioAttributesV21());
} }
private static boolean isOffloadedPlayback(AudioTrack audioTrack) {
return Util.SDK_INT >= 29 && audioTrack.isOffloadedPlayback();
}
@RequiresApi(29)
private final class StreamEventCallback extends AudioTrack.StreamEventCallback {
private final Handler handler;
public StreamEventCallback() {
handler = new Handler();
}
@Override
public void onDataRequest(AudioTrack track, int size) {
Assertions.checkState(track == DefaultAudioSink.this.audioTrack);
if (listener != null) {
listener.onOffloadBufferEmptying();
}
}
public void register(AudioTrack audioTrack) {
audioTrack.registerStreamEventCallback(handler::post, this);
}
public void unregister(AudioTrack audioTrack) {
audioTrack.unregisterStreamEventCallback(this);
handler.removeCallbacksAndMessages(/* token= */ null);
}
}
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) { private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE. int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
int channelConfig = AudioFormat.CHANNEL_OUT_MONO; int channelConfig = AudioFormat.CHANNEL_OUT_MONO;

View File

@ -92,8 +92,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean allowFirstBufferPositionDiscontinuity; private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity; private boolean allowPositionDiscontinuity;
@Nullable private WakeupListener wakeupListener;
/** /**
* @param context A context. * @param context A context.
* @param mediaCodecSelector A decoder selector. * @param mediaCodecSelector A decoder selector.
@ -698,9 +696,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
case MSG_SET_AUDIO_SESSION_ID: case MSG_SET_AUDIO_SESSION_ID:
audioSink.setAudioSessionId((Integer) message); audioSink.setAudioSessionId((Integer) message);
break; break;
case MSG_SET_WAKEUP_LISTENER:
this.wakeupListener = (WakeupListener) message;
break;
default: default:
super.handleMessage(messageType, message); super.handleMessage(messageType, message);
break; break;
@ -880,19 +875,5 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
eventDispatcher.skipSilenceEnabledChanged(skipSilenceEnabled); eventDispatcher.skipSilenceEnabledChanged(skipSilenceEnabled);
onAudioTrackSkipSilenceEnabledChanged(skipSilenceEnabled); onAudioTrackSkipSilenceEnabledChanged(skipSilenceEnabled);
} }
@Override
public void onOffloadBufferEmptying() {
if (wakeupListener != null) {
wakeupListener.onWakeup();
}
}
@Override
public void onOffloadBufferFull(long bufferEmptyingDeadlineMs) {
if (wakeupListener != null) {
wakeupListener.onSleep(bufferEmptyingDeadlineMs);
}
}
} }
} }

View File

@ -465,9 +465,4 @@ public abstract class StubExoPlayer extends BasePlayer implements ExoPlayer {
public boolean getPauseAtEndOfMediaItems() { public boolean getPauseAtEndOfMediaItems() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public void experimental_enableOffloadScheduling(boolean enableOffloadScheduling) {
throw new UnsupportedOperationException();
}
} }