Use Clock in audio renderers and sinks

This makes tests more realistic because the returned value matches
the rest of the simulated test time.

It also prevents test flakiness in (yet to be written) tests that
may not correctly advance the playback time in the position tracker.

PiperOrigin-RevId: 546011842
This commit is contained in:
tonihei 2023-07-06 17:31:17 +01:00 committed by Rohit Singh
parent a9be9caf40
commit d4c5414eff
7 changed files with 38 additions and 18 deletions

View File

@ -29,6 +29,7 @@ import androidx.media3.common.Format;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.PlaybackParameters;
import androidx.media3.common.Player;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.analytics.PlayerId;
@ -342,6 +343,13 @@ public interface AudioSink {
*/
default void setPlayerId(@Nullable PlayerId playerId) {}
/**
* Sets the {@link Clock} to use for timing in this audio sink.
*
* @param clock The {@link Clock}.
*/
default void setClock(Clock clock) {}
/**
* Returns whether the sink supports a given {@link Format}.
*

View File

@ -17,16 +17,15 @@ package androidx.media3.exoplayer.audio;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.castNonNull;
import static androidx.media3.common.util.Util.msToUs;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Util;
@ -214,7 +213,7 @@ import java.lang.reflect.Method;
private long sumRawPlaybackHeadPosition;
private Clock clock = Clock.DEFAULT;
private Clock clock;
/**
* Creates a new audio track position tracker.
@ -231,6 +230,7 @@ import java.lang.reflect.Method;
}
}
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
clock = Clock.DEFAULT;
}
/**
@ -344,7 +344,7 @@ import java.lang.reflect.Method;
Util.getPlayoutDurationForMediaDuration(
mediaDurationSinceLastPositionUs, audioTrackPlaybackSpeed);
long playoutStartSystemTimeMs =
System.currentTimeMillis() - Util.usToMs(playoutDurationSinceLastPositionUs);
clock.currentTimeMillis() - Util.usToMs(playoutDurationSinceLastPositionUs);
listener.onPositionAdvancing(playoutStartSystemTimeMs);
}
@ -418,7 +418,7 @@ import java.lang.reflect.Method;
public boolean isStalled(long writtenFrames) {
return forceResetWorkaroundTimeMs != C.TIME_UNSET
&& writtenFrames > 0
&& SystemClock.elapsedRealtime() - forceResetWorkaroundTimeMs
&& clock.elapsedRealtime() - forceResetWorkaroundTimeMs
>= FORCE_RESET_WORKAROUND_TIMEOUT_MS;
}
@ -430,7 +430,7 @@ import java.lang.reflect.Method;
*/
public void handleEndOfStream(long writtenFrames) {
stopPlaybackHeadPosition = getPlaybackHeadPosition();
stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
stopTimestampUs = msToUs(clock.elapsedRealtime());
endPlaybackHeadPosition = writtenFrames;
}
@ -481,12 +481,11 @@ import java.lang.reflect.Method;
}
/**
* Set clock used for {@code nanoTime()} requests.
* Sets the {@link Clock}.
*
* @param clock The clock to be used for {@code nanoTime()} requests.
* @param clock The {@link Clock}.
*/
@VisibleForTesting
/* package */ void setClock(Clock clock) {
public void setClock(Clock clock) {
this.clock = clock;
}
@ -629,10 +628,10 @@ import java.lang.reflect.Method;
* @return The playback head position, in frames.
*/
private long getPlaybackHeadPosition() {
long currentTimeMs = SystemClock.elapsedRealtime();
long currentTimeMs = clock.elapsedRealtime();
if (stopTimestampUs != C.TIME_UNSET) {
// Simulate the playback head position up to the total number of frames submitted.
long elapsedTimeSinceStopUs = (currentTimeMs * 1000) - stopTimestampUs;
long elapsedTimeSinceStopUs = msToUs(currentTimeMs) - stopTimestampUs;
long mediaTimeSinceStopUs =
Util.getMediaDurationForPlayoutDuration(elapsedTimeSinceStopUs, audioTrackPlaybackSpeed);
long framesSinceStop = durationUsToFrames(mediaTimeSinceStopUs);

View File

@ -599,6 +599,7 @@ public abstract class DecoderAudioRenderer<
audioSink.disableTunneling();
}
audioSink.setPlayerId(getPlayerId());
audioSink.setClock(getClock());
}
@Override

View File

@ -591,6 +591,11 @@ public final class DefaultAudioSink implements AudioSink {
this.playerId = playerId;
}
@Override
public void setClock(Clock clock) {
audioTrackPositionTracker.setClock(clock);
}
@Override
public boolean supportsFormat(Format format) {
return getFormatSupport(format) != SINK_FORMAT_UNSUPPORTED;

View File

@ -22,6 +22,7 @@ import androidx.media3.common.AudioAttributes;
import androidx.media3.common.AuxEffectInfo;
import androidx.media3.common.Format;
import androidx.media3.common.PlaybackParameters;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.analytics.PlayerId;
import java.nio.ByteBuffer;
@ -46,6 +47,11 @@ public class ForwardingAudioSink implements AudioSink {
sink.setPlayerId(playerId);
}
@Override
public void setClock(Clock clock) {
sink.setClock(clock);
}
@Override
public boolean supportsFormat(Format format) {
return sink.supportsFormat(format);

View File

@ -611,6 +611,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSink.disableTunneling();
}
audioSink.setPlayerId(getPlayerId());
audioSink.setClock(getClock());
}
@Override

View File

@ -827,7 +827,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
while (bypassRender(positionUs, elapsedRealtimeUs)) {}
TraceUtil.endSection();
} else if (codec != null) {
long renderStartTimeMs = SystemClock.elapsedRealtime();
long renderStartTimeMs = getClock().elapsedRealtime();
TraceUtil.beginSection("drainAndFeed");
while (drainOutputBuffer(positionUs, elapsedRealtimeUs)
&& shouldContinueRendering(renderStartTimeMs)) {}
@ -1139,7 +1139,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecOperatingRate = CODEC_OPERATING_RATE_UNSET;
}
onReadyToInitializeCodec(inputFormat);
codecInitializingTimestamp = SystemClock.elapsedRealtime();
codecInitializingTimestamp = getClock().elapsedRealtime();
MediaCodecAdapter.Configuration configuration =
getMediaCodecConfiguration(codecInfo, inputFormat, crypto, codecOperatingRate);
if (Util.SDK_INT >= 31) {
@ -1151,7 +1151,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
} finally {
TraceUtil.endSection();
}
codecInitializedTimestamp = SystemClock.elapsedRealtime();
codecInitializedTimestamp = getClock().elapsedRealtime();
if (!codecInfo.isFormatSupported(inputFormat)) {
Log.w(
@ -1187,7 +1187,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
}
if (getState() == STATE_STARTED) {
codecHotswapDeadlineMs = SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS;
codecHotswapDeadlineMs = getClock().elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS;
}
decoderCounters.decoderInitCount++;
@ -1197,7 +1197,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
private boolean shouldContinueRendering(long renderStartTimeMs) {
return renderTimeLimitMs == C.TIME_UNSET
|| SystemClock.elapsedRealtime() - renderStartTimeMs < renderTimeLimitMs;
|| getClock().elapsedRealtime() - renderStartTimeMs < renderTimeLimitMs;
}
private boolean hasOutputBuffer() {
@ -1709,7 +1709,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
&& (isSourceReady()
|| hasOutputBuffer()
|| (codecHotswapDeadlineMs != C.TIME_UNSET
&& SystemClock.elapsedRealtime() < codecHotswapDeadlineMs));
&& getClock().elapsedRealtime() < codecHotswapDeadlineMs));
}
/** Returns the current playback speed, as set by {@link #setPlaybackSpeed}. */