mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Compare commits
9 Commits
c8a34ec846
...
d133300627
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d133300627 | ||
![]() |
a9c0349214 | ||
![]() |
0dd43b0183 | ||
![]() |
0517cea4d2 | ||
![]() |
9e80d6d263 | ||
![]() |
c0e518df97 | ||
![]() |
a1ed0d4ff6 | ||
![]() |
e710179fee | ||
![]() |
d315d90f7a |
@ -12,6 +12,13 @@
|
|||||||
* Fix issue where media item transition fails due to recoverable renderer
|
* Fix issue where media item transition fails due to recoverable renderer
|
||||||
error during initialization of the next media item
|
error during initialization of the next media item
|
||||||
([#2229](https://github.com/androidx/media/issues/2229)).
|
([#2229](https://github.com/androidx/media/issues/2229)).
|
||||||
|
* Add `ExoPlayer.setScrubbingModeEnabled(boolean)` method. This optimizes
|
||||||
|
the player for many frequent seeks (for example, from a user dragging a
|
||||||
|
scrubber bar around).
|
||||||
|
* `AdPlaybackState.withAdDurationsUs(long[][])` can be used after ad
|
||||||
|
groups have been removed. The user still needs to pass in an array of
|
||||||
|
durations for removed ad groups which can be empty or null
|
||||||
|
([#2267](https://github.com/androidx/media/issues/2267)).
|
||||||
* Transformer:
|
* Transformer:
|
||||||
* Filling an initial gap (added via `addGap()`) with silent audio now
|
* Filling an initial gap (added via `addGap()`) with silent audio now
|
||||||
requires explicitly setting `setForceAudioTrack(true)` in
|
requires explicitly setting `setForceAudioTrack(true)` in
|
||||||
@ -52,6 +59,8 @@
|
|||||||
* IMA extension:
|
* IMA extension:
|
||||||
* Session:
|
* Session:
|
||||||
* UI:
|
* UI:
|
||||||
|
* Enable `PlayerSurface` to work with `ExoPlayer.setVideoEffects` and
|
||||||
|
`CompositionPlayer`.
|
||||||
* Downloads:
|
* Downloads:
|
||||||
* Add partial download support for progressive streams. Apps can prepare a
|
* Add partial download support for progressive streams. Apps can prepare a
|
||||||
progressive stream with `DownloadHelper`, and request a
|
progressive stream with `DownloadHelper`, and request a
|
||||||
|
3
api.txt
3
api.txt
@ -885,6 +885,7 @@ package androidx.media3.common {
|
|||||||
field public static final int MEDIA_ITEM_TRANSITION_REASON_REPEAT = 0; // 0x0
|
field public static final int MEDIA_ITEM_TRANSITION_REASON_REPEAT = 0; // 0x0
|
||||||
field public static final int MEDIA_ITEM_TRANSITION_REASON_SEEK = 2; // 0x2
|
field public static final int MEDIA_ITEM_TRANSITION_REASON_SEEK = 2; // 0x2
|
||||||
field public static final int PLAYBACK_SUPPRESSION_REASON_NONE = 0; // 0x0
|
field public static final int PLAYBACK_SUPPRESSION_REASON_NONE = 0; // 0x0
|
||||||
|
field public static final int PLAYBACK_SUPPRESSION_REASON_SCRUBBING = 4; // 0x4
|
||||||
field public static final int PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS = 1; // 0x1
|
field public static final int PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS = 1; // 0x1
|
||||||
field public static final int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT = 3; // 0x3
|
field public static final int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT = 3; // 0x3
|
||||||
field @Deprecated public static final int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE = 2; // 0x2
|
field @Deprecated public static final int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE = 2; // 0x2
|
||||||
@ -969,7 +970,7 @@ package androidx.media3.common {
|
|||||||
@IntDef({androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_REMOTE, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_SUPPRESSED_TOO_LONG}) @java.lang.annotation.Documented @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) @java.lang.annotation.Target({java.lang.annotation.ElementType.FIELD, java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.PARAMETER, java.lang.annotation.ElementType.LOCAL_VARIABLE, java.lang.annotation.ElementType.TYPE_USE}) public static @interface Player.PlayWhenReadyChangeReason {
|
@IntDef({androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_REMOTE, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM, androidx.media3.common.Player.PLAY_WHEN_READY_CHANGE_REASON_SUPPRESSED_TOO_LONG}) @java.lang.annotation.Documented @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) @java.lang.annotation.Target({java.lang.annotation.ElementType.FIELD, java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.PARAMETER, java.lang.annotation.ElementType.LOCAL_VARIABLE, java.lang.annotation.ElementType.TYPE_USE}) public static @interface Player.PlayWhenReadyChangeReason {
|
||||||
}
|
}
|
||||||
|
|
||||||
@IntDef({androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_NONE, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT}) @java.lang.annotation.Documented @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) @java.lang.annotation.Target({java.lang.annotation.ElementType.FIELD, java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.PARAMETER, java.lang.annotation.ElementType.LOCAL_VARIABLE, java.lang.annotation.ElementType.TYPE_USE}) public static @interface Player.PlaybackSuppressionReason {
|
@IntDef({androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_NONE, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT, androidx.media3.common.Player.PLAYBACK_SUPPRESSION_REASON_SCRUBBING}) @java.lang.annotation.Documented @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) @java.lang.annotation.Target({java.lang.annotation.ElementType.FIELD, java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.PARAMETER, java.lang.annotation.ElementType.LOCAL_VARIABLE, java.lang.annotation.ElementType.TYPE_USE}) public static @interface Player.PlaybackSuppressionReason {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class Player.PositionInfo {
|
public static final class Player.PositionInfo {
|
||||||
|
@ -1072,14 +1072,23 @@ public final class AdPlaybackState {
|
|||||||
/**
|
/**
|
||||||
* Returns an instance with the specified ad durations, in microseconds.
|
* Returns an instance with the specified ad durations, in microseconds.
|
||||||
*
|
*
|
||||||
* <p>Must only be used if {@link #removedAdGroupCount} is 0.
|
* <p>The number of arrays of durations ({@code adDurations.length}) must always be equal to
|
||||||
|
* {@link #adGroupCount}. This is required even on an instance created with {@link
|
||||||
|
* #withRemovedAdGroupCount(int)}. The array of durations at the index of a removed ad group can
|
||||||
|
* be null or empty.
|
||||||
|
*
|
||||||
|
* @throws IllegalArgumentException if {@code adDurations.length != adGroupCount}.
|
||||||
*/
|
*/
|
||||||
@CheckResult
|
@CheckResult
|
||||||
public AdPlaybackState withAdDurationsUs(long[][] adDurationUs) {
|
public AdPlaybackState withAdDurationsUs(long[][] adDurationUs) {
|
||||||
checkState(removedAdGroupCount == 0);
|
checkArgument(adDurationUs.length == adGroupCount);
|
||||||
AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length);
|
AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length);
|
||||||
for (int adGroupIndex = 0; adGroupIndex < adGroupCount; adGroupIndex++) {
|
for (int correctedAdGroupIndex = 0;
|
||||||
adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdDurationsUs(adDurationUs[adGroupIndex]);
|
correctedAdGroupIndex < adGroupCount - removedAdGroupCount;
|
||||||
|
correctedAdGroupIndex++) {
|
||||||
|
adGroups[correctedAdGroupIndex] =
|
||||||
|
adGroups[correctedAdGroupIndex].withAdDurationsUs(
|
||||||
|
adDurationUs[removedAdGroupCount + correctedAdGroupIndex]);
|
||||||
}
|
}
|
||||||
return new AdPlaybackState(
|
return new AdPlaybackState(
|
||||||
adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount);
|
adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount);
|
||||||
|
@ -321,7 +321,7 @@ public abstract class BasePlayer implements Player {
|
|||||||
long duration = getDuration();
|
long duration = getDuration();
|
||||||
return position == C.TIME_UNSET || duration == C.TIME_UNSET
|
return position == C.TIME_UNSET || duration == C.TIME_UNSET
|
||||||
? 0
|
? 0
|
||||||
: duration == 0 ? 100 : Util.constrainValue((int) ((position * 100) / duration), 0, 100);
|
: duration == 0 ? 100 : Util.constrainValue(Util.percentInt(position, duration), 0, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1284,11 +1284,17 @@ public interface Player {
|
|||||||
int PLAY_WHEN_READY_CHANGE_REASON_SUPPRESSED_TOO_LONG = 6;
|
int PLAY_WHEN_READY_CHANGE_REASON_SUPPRESSED_TOO_LONG = 6;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reason why playback is suppressed even though {@link #getPlayWhenReady()} is {@code true}. One
|
* Reason why playback is suppressed even though {@link #getPlayWhenReady()} is {@code true}.
|
||||||
* of {@link #PLAYBACK_SUPPRESSION_REASON_NONE}, {@link
|
*
|
||||||
* #PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS}, {@link
|
* <p>One of:
|
||||||
* #PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE} or {@link
|
*
|
||||||
* #PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT}.
|
* <ul>
|
||||||
|
* <li>{@link #PLAYBACK_SUPPRESSION_REASON_NONE}
|
||||||
|
* <li>{@link #PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS}
|
||||||
|
* <li>{@link #PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE}
|
||||||
|
* <li>{@link #PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT}
|
||||||
|
* <li>{@link #PLAYBACK_SUPPRESSION_REASON_SCRUBBING}
|
||||||
|
* </ul>
|
||||||
*/
|
*/
|
||||||
// @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility
|
// @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility
|
||||||
// with Kotlin usages from before TYPE_USE was added.
|
// with Kotlin usages from before TYPE_USE was added.
|
||||||
@ -1300,7 +1306,8 @@ public interface Player {
|
|||||||
PLAYBACK_SUPPRESSION_REASON_NONE,
|
PLAYBACK_SUPPRESSION_REASON_NONE,
|
||||||
PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS,
|
PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS,
|
||||||
PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE,
|
PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_ROUTE,
|
||||||
PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT
|
PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT,
|
||||||
|
PLAYBACK_SUPPRESSION_REASON_SCRUBBING
|
||||||
})
|
})
|
||||||
@interface PlaybackSuppressionReason {}
|
@interface PlaybackSuppressionReason {}
|
||||||
|
|
||||||
@ -1321,6 +1328,9 @@ public interface Player {
|
|||||||
*/
|
*/
|
||||||
int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT = 3;
|
int PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT = 3;
|
||||||
|
|
||||||
|
/** Playback is suppressed because the player is currently scrubbing. */
|
||||||
|
int PLAYBACK_SUPPRESSION_REASON_SCRUBBING = 4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Repeat modes for playback. One of {@link #REPEAT_MODE_OFF}, {@link #REPEAT_MODE_ONE} or {@link
|
* Repeat modes for playback. One of {@link #REPEAT_MODE_OFF}, {@link #REPEAT_MODE_ONE} or {@link
|
||||||
* #REPEAT_MODE_ALL}.
|
* #REPEAT_MODE_ALL}.
|
||||||
|
@ -1224,15 +1224,26 @@ public final class Util {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the percentage of numerator divided by denominator. Note that this may return {@link
|
* Returns the integer percentage of {@code numerator} divided by {@code denominator}. This uses
|
||||||
* Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY} or {@link Float#NaN} if the
|
* integer arithmetic (round down).
|
||||||
* denominator is zero.
|
|
||||||
*
|
|
||||||
* @param numerator The numerator.
|
|
||||||
* @param denominator The denominator.
|
|
||||||
*/
|
*/
|
||||||
@UnstableApi
|
@UnstableApi
|
||||||
public static float percent(long numerator, long denominator) {
|
public static int percentInt(long numerator, long denominator) {
|
||||||
|
long numeratorTimes100 = LongMath.saturatedMultiply(numerator, 100);
|
||||||
|
long result =
|
||||||
|
numeratorTimes100 != Long.MAX_VALUE && numeratorTimes100 != Long.MIN_VALUE
|
||||||
|
? numeratorTimes100 / denominator
|
||||||
|
: (numerator / (denominator / 100));
|
||||||
|
return Ints.checkedCast(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the floating point percentage of {@code numerator} divided by {@code denominator}. Note
|
||||||
|
* that this may return {@link Float#POSITIVE_INFINITY}, {@link Float#NEGATIVE_INFINITY} or {@link
|
||||||
|
* Float#NaN} if the denominator is zero.
|
||||||
|
*/
|
||||||
|
@UnstableApi
|
||||||
|
public static float percentFloat(long numerator, long denominator) {
|
||||||
if (denominator != 0 && numerator == denominator) {
|
if (denominator != 0 && numerator == denominator) {
|
||||||
return 100f;
|
return 100f;
|
||||||
}
|
}
|
||||||
|
@ -1091,4 +1091,110 @@ public class AdPlaybackStateTest {
|
|||||||
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle()).ids[1]).isNull();
|
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle()).ids[1]).isNull();
|
||||||
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle())).isEqualTo(adGroup);
|
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle())).isEqualTo(adGroup);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void setDurationsUs_withRemovedAdGroups_updatedCorrectlyAndSafely() {
|
||||||
|
AdPlaybackState adPlaybackState =
|
||||||
|
new AdPlaybackState("adsId")
|
||||||
|
.withLivePostrollPlaceholderAppended(false)
|
||||||
|
.withNewAdGroup(/* adGroupIndex= */ 0, 10_000)
|
||||||
|
.withAdCount(/* adGroupIndex= */ 0, 1)
|
||||||
|
.withAvailableAdMediaItem(
|
||||||
|
/* adGroupIndex= */ 0,
|
||||||
|
/* adIndexInAdGroup= */ 0,
|
||||||
|
MediaItem.fromUri("http://example.com/0-0"))
|
||||||
|
.withNewAdGroup(/* adGroupIndex= */ 1, 11_000)
|
||||||
|
.withAdCount(/* adGroupIndex= */ 1, 2)
|
||||||
|
.withAvailableAdMediaItem(
|
||||||
|
/* adGroupIndex= */ 1,
|
||||||
|
/* adIndexInAdGroup= */ 0,
|
||||||
|
MediaItem.fromUri("http://example.com/1-0"))
|
||||||
|
.withAvailableAdMediaItem(
|
||||||
|
/* adGroupIndex= */ 1,
|
||||||
|
/* adIndexInAdGroup= */ 1,
|
||||||
|
MediaItem.fromUri("http://example.com/1-1"))
|
||||||
|
.withNewAdGroup(/* adGroupIndex= */ 2, 12_000)
|
||||||
|
.withAdCount(/* adGroupIndex= */ 2, 1)
|
||||||
|
.withAvailableAdMediaItem(
|
||||||
|
/* adGroupIndex= */ 2,
|
||||||
|
/* adIndexInAdGroup= */ 0,
|
||||||
|
MediaItem.fromUri("http://example.com/2-0"));
|
||||||
|
long[][] adDurationsUs = {
|
||||||
|
new long[] {10L}, new long[] {20L, 21L}, new long[] {30L}, new long[] {C.TIME_END_OF_SOURCE}
|
||||||
|
};
|
||||||
|
|
||||||
|
adPlaybackState =
|
||||||
|
adPlaybackState
|
||||||
|
.withAdDurationsUs(adDurationsUs)
|
||||||
|
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1);
|
||||||
|
|
||||||
|
assertThat(adPlaybackState.adGroupCount).isEqualTo(4);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).durationsUs).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).count).isEqualTo(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).states).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).isPlaceholder).isFalse();
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).mediaItems).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).ids).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(20L, 21L)
|
||||||
|
.inOrder();
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(30L);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||||
|
|
||||||
|
adDurationsUs[1][0] = 120L;
|
||||||
|
adDurationsUs[1][1] = 121L;
|
||||||
|
adPlaybackState = adPlaybackState.withAdDurationsUs(adDurationsUs);
|
||||||
|
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(120L, 121L)
|
||||||
|
.inOrder();
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(30L);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||||
|
|
||||||
|
adDurationsUs[0] = null;
|
||||||
|
adDurationsUs[1] = null;
|
||||||
|
adDurationsUs[2][0] = C.TIME_UNSET;
|
||||||
|
adPlaybackState =
|
||||||
|
adPlaybackState
|
||||||
|
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 2)
|
||||||
|
.withAdDurationsUs(adDurationsUs);
|
||||||
|
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(C.TIME_UNSET);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||||
|
|
||||||
|
adDurationsUs[2] = null;
|
||||||
|
adDurationsUs[3][0] = 0L;
|
||||||
|
adPlaybackState =
|
||||||
|
adPlaybackState
|
||||||
|
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 3)
|
||||||
|
.withAdDurationsUs(adDurationsUs);
|
||||||
|
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs).hasLength(0);
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||||
|
.asList()
|
||||||
|
.containsExactly(0L);
|
||||||
|
|
||||||
|
adDurationsUs[3] = null;
|
||||||
|
adPlaybackState =
|
||||||
|
adPlaybackState
|
||||||
|
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 4)
|
||||||
|
.withAdDurationsUs(adDurationsUs);
|
||||||
|
|
||||||
|
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs).hasLength(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ import static androidx.media3.common.util.Util.maxValue;
|
|||||||
import static androidx.media3.common.util.Util.minValue;
|
import static androidx.media3.common.util.Util.minValue;
|
||||||
import static androidx.media3.common.util.Util.parseXsDateTime;
|
import static androidx.media3.common.util.Util.parseXsDateTime;
|
||||||
import static androidx.media3.common.util.Util.parseXsDuration;
|
import static androidx.media3.common.util.Util.parseXsDuration;
|
||||||
import static androidx.media3.common.util.Util.percent;
|
import static androidx.media3.common.util.Util.percentFloat;
|
||||||
import static androidx.media3.common.util.Util.unescapeFileName;
|
import static androidx.media3.common.util.Util.unescapeFileName;
|
||||||
import static androidx.media3.test.utils.TestUtil.buildTestData;
|
import static androidx.media3.test.utils.TestUtil.buildTestData;
|
||||||
import static androidx.media3.test.utils.TestUtil.buildTestString;
|
import static androidx.media3.test.utils.TestUtil.buildTestString;
|
||||||
@ -149,31 +149,53 @@ public class UtilTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void percent_numeratorEqualToDenominator_returnsOneHundred() {
|
public void percentInt_smallValues() {
|
||||||
|
assertThat(Util.percentInt(3, 9)).isEqualTo(33);
|
||||||
|
assertThat(Util.percentInt(3, 3)).isEqualTo(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void percentInt_smallNegativeValues() {
|
||||||
|
assertThat(Util.percentInt(-3, -9)).isEqualTo(33);
|
||||||
|
assertThat(Util.percentInt(-3, -3)).isEqualTo(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void percentInt_largeValuesDontOverflow() {
|
||||||
|
assertThat(Util.percentInt(Long.MAX_VALUE / 4, Long.MAX_VALUE / 2)).isEqualTo(50);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void percentInt_largeNegativeValuesDontOverflow() {
|
||||||
|
assertThat(Util.percentInt(Long.MIN_VALUE / 4, Long.MIN_VALUE / 2)).isEqualTo(50);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void percentFloat_numeratorEqualToDenominator_returnsOneHundred() {
|
||||||
// With numerator and denominator both being 812345L, the percentage calculated in another way
|
// With numerator and denominator both being 812345L, the percentage calculated in another way
|
||||||
// (numerator * 100f / denominator) will be 99.99999f. We then use this value to verify that
|
// (numerator * 100f / denominator) will be 99.99999f. We then use this value to verify that
|
||||||
// this doesn't happen for Util.percent() method.
|
// this doesn't happen for Util.percent() method.
|
||||||
assertThat(percent(812345L, 812345L)).isEqualTo(100f);
|
assertThat(percentFloat(812345L, 812345L)).isEqualTo(100f);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void percent_numeratorNotEqualToDenominator_returnsCorrectValue() {
|
public void percentFloat_numeratorNotEqualToDenominator_returnsCorrectValue() {
|
||||||
assertThat(percent(500L, 2000L)).isEqualTo(25f);
|
assertThat(percentFloat(500L, 2000L)).isEqualTo(25f);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void percent_positiveNumeratorAndZeroDenominator_returnsPositiveInfinity() {
|
public void percentFloat_positiveNumeratorAndZeroDenominator_returnsPositiveInfinity() {
|
||||||
assertThat(percent(1L, 0L)).isPositiveInfinity();
|
assertThat(percentFloat(1L, 0L)).isPositiveInfinity();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void percent_negativeNumeratorAndZeroDenominator_returnsNegativeInfinity() {
|
public void percentFloat_negativeNumeratorAndZeroDenominator_returnsNegativeInfinity() {
|
||||||
assertThat(percent(-1L, 0L)).isNegativeInfinity();
|
assertThat(percentFloat(-1L, 0L)).isNegativeInfinity();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void percent_numeratorAndDenominatorAreBothZero_returnsNaN() {
|
public void percentFloat_numeratorAndDenominatorAreBothZero_returnsNaN() {
|
||||||
assertThat(percent(0L, 0L)).isNaN();
|
assertThat(percentFloat(0L, 0L)).isNaN();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -27,6 +27,7 @@ import androidx.media3.common.ParserException;
|
|||||||
import androidx.media3.test.utils.TestUtil;
|
import androidx.media3.test.utils.TestUtil;
|
||||||
import androidx.test.core.app.ApplicationProvider;
|
import androidx.test.core.app.ApplicationProvider;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
|
import androidx.test.filters.SdkSuppress;
|
||||||
import com.google.common.io.Files;
|
import com.google.common.io.Files;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import com.google.common.util.concurrent.MoreExecutors;
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
@ -48,6 +49,7 @@ import org.junit.runner.RunWith;
|
|||||||
* <p>This test needs to run as an androidTest because robolectric's BitmapFactory is not fully
|
* <p>This test needs to run as an androidTest because robolectric's BitmapFactory is not fully
|
||||||
* functional.
|
* functional.
|
||||||
*/
|
*/
|
||||||
|
@SdkSuppress(minSdkVersion = 22) // This test OOMs on the API 21 emulator used in CI.
|
||||||
@RunWith(AndroidJUnit4.class)
|
@RunWith(AndroidJUnit4.class)
|
||||||
public class DataSourceBitmapLoaderTest {
|
public class DataSourceBitmapLoaderTest {
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import androidx.media3.test.utils.DataSourceContractTest;
|
|||||||
import androidx.media3.test.utils.TestUtil;
|
import androidx.media3.test.utils.TestUtil;
|
||||||
import androidx.test.core.app.ApplicationProvider;
|
import androidx.test.core.app.ApplicationProvider;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
|
import androidx.test.filters.SdkSuppress;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -30,6 +31,8 @@ import org.junit.runner.RunWith;
|
|||||||
* {@link DataSource} contract tests for {@link FileDescriptorDataSource} using {@link
|
* {@link DataSource} contract tests for {@link FileDescriptorDataSource} using {@link
|
||||||
* AssetFileDescriptor}.
|
* AssetFileDescriptor}.
|
||||||
*/
|
*/
|
||||||
|
// TODO: b/407727748 - Run this on lower API levels when it's no longer flaky.
|
||||||
|
@SdkSuppress(minSdkVersion = 29)
|
||||||
@RunWith(AndroidJUnit4.class)
|
@RunWith(AndroidJUnit4.class)
|
||||||
public class FileDescriptorDataSourceUsingAssetFileDescriptorContractTest
|
public class FileDescriptorDataSourceUsingAssetFileDescriptorContractTest
|
||||||
extends DataSourceContractTest {
|
extends DataSourceContractTest {
|
||||||
|
@ -485,6 +485,9 @@ public interface ExoPlayer extends Player {
|
|||||||
* <p>If enabled, ExoPlayer's playback loop will run as rarely as possible by scheduling work
|
* <p>If enabled, ExoPlayer's playback loop will run as rarely as possible by scheduling work
|
||||||
* for when {@link Renderer} progress can be made.
|
* for when {@link Renderer} progress can be made.
|
||||||
*
|
*
|
||||||
|
* <p>If a custom {@link AudioSink} is used then it must correctly implement {@link
|
||||||
|
* AudioSink#getAudioTrackBufferSizeUs()} to enable dynamic scheduling for audio playback.
|
||||||
|
*
|
||||||
* <p>This method is experimental, and will be renamed or removed in a future release.
|
* <p>This method is experimental, and will be renamed or removed in a future release.
|
||||||
*
|
*
|
||||||
* @param dynamicSchedulingEnabled Whether to enable dynamic scheduling.
|
* @param dynamicSchedulingEnabled Whether to enable dynamic scheduling.
|
||||||
@ -1449,6 +1452,20 @@ public interface ExoPlayer extends Player {
|
|||||||
@UnstableApi
|
@UnstableApi
|
||||||
boolean getSkipSilenceEnabled();
|
boolean getSkipSilenceEnabled();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets whether to optimize the player for scrubbing (many frequent seeks).
|
||||||
|
*
|
||||||
|
* <p>The player may consume more resources in this mode, so it should only be used for short
|
||||||
|
* periods of time in response to user interaction (e.g. dragging on a progress bar UI element).
|
||||||
|
*
|
||||||
|
* <p>During scrubbing mode playback is {@linkplain Player#getPlaybackSuppressionReason()
|
||||||
|
* suppressed} with {@link Player#PLAYBACK_SUPPRESSION_REASON_SCRUBBING}.
|
||||||
|
*
|
||||||
|
* @param scrubbingModeEnabled Whether scrubbing mode should be enabled.
|
||||||
|
*/
|
||||||
|
@UnstableApi
|
||||||
|
void setScrubbingModeEnabled(boolean scrubbingModeEnabled);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets a {@link List} of {@linkplain Effect video effects} that will be applied to each video
|
* Sets a {@link List} of {@linkplain Effect video effects} that will be applied to each video
|
||||||
* frame.
|
* frame.
|
||||||
|
@ -35,7 +35,6 @@ import static androidx.media3.exoplayer.Renderer.MSG_SET_PRIORITY;
|
|||||||
import static androidx.media3.exoplayer.Renderer.MSG_SET_SCALING_MODE;
|
import static androidx.media3.exoplayer.Renderer.MSG_SET_SCALING_MODE;
|
||||||
import static androidx.media3.exoplayer.Renderer.MSG_SET_SKIP_SILENCE_ENABLED;
|
import static androidx.media3.exoplayer.Renderer.MSG_SET_SKIP_SILENCE_ENABLED;
|
||||||
import static androidx.media3.exoplayer.Renderer.MSG_SET_VIDEO_EFFECTS;
|
import static androidx.media3.exoplayer.Renderer.MSG_SET_VIDEO_EFFECTS;
|
||||||
import static androidx.media3.exoplayer.Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER;
|
|
||||||
import static androidx.media3.exoplayer.Renderer.MSG_SET_VIDEO_OUTPUT_RESOLUTION;
|
import static androidx.media3.exoplayer.Renderer.MSG_SET_VIDEO_OUTPUT_RESOLUTION;
|
||||||
import static java.lang.Math.max;
|
import static java.lang.Math.max;
|
||||||
import static java.lang.Math.min;
|
import static java.lang.Math.min;
|
||||||
@ -182,6 +181,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
private @DiscontinuityReason int pendingDiscontinuityReason;
|
private @DiscontinuityReason int pendingDiscontinuityReason;
|
||||||
private boolean pendingDiscontinuity;
|
private boolean pendingDiscontinuity;
|
||||||
private boolean foregroundMode;
|
private boolean foregroundMode;
|
||||||
|
private boolean scrubbingModeEnabled;
|
||||||
private SeekParameters seekParameters;
|
private SeekParameters seekParameters;
|
||||||
private ShuffleOrder shuffleOrder;
|
private ShuffleOrder shuffleOrder;
|
||||||
private PreloadConfiguration preloadConfiguration;
|
private PreloadConfiguration preloadConfiguration;
|
||||||
@ -370,7 +370,8 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
playbackInfoUpdateListener,
|
playbackInfoUpdateListener,
|
||||||
playerId,
|
playerId,
|
||||||
builder.playbackLooperProvider,
|
builder.playbackLooperProvider,
|
||||||
preloadConfiguration);
|
preloadConfiguration,
|
||||||
|
frameMetadataListener);
|
||||||
Looper playbackLooper = internalPlayer.getPlaybackLooper();
|
Looper playbackLooper = internalPlayer.getPlaybackLooper();
|
||||||
|
|
||||||
volume = 1;
|
volume = 1;
|
||||||
@ -447,8 +448,6 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
sendRendererMessage(
|
sendRendererMessage(
|
||||||
TRACK_TYPE_VIDEO, MSG_SET_CHANGE_FRAME_RATE_STRATEGY, videoChangeFrameRateStrategy);
|
TRACK_TYPE_VIDEO, MSG_SET_CHANGE_FRAME_RATE_STRATEGY, videoChangeFrameRateStrategy);
|
||||||
sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_SKIP_SILENCE_ENABLED, skipSilenceEnabled);
|
sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_SKIP_SILENCE_ENABLED, skipSilenceEnabled);
|
||||||
sendRendererMessage(
|
|
||||||
TRACK_TYPE_VIDEO, MSG_SET_VIDEO_FRAME_METADATA_LISTENER, frameMetadataListener);
|
|
||||||
sendRendererMessage(
|
sendRendererMessage(
|
||||||
TRACK_TYPE_CAMERA_MOTION, MSG_SET_CAMERA_MOTION_LISTENER, frameMetadataListener);
|
TRACK_TYPE_CAMERA_MOTION, MSG_SET_CAMERA_MOTION_LISTENER, frameMetadataListener);
|
||||||
sendRendererMessage(MSG_SET_PRIORITY, priority);
|
sendRendererMessage(MSG_SET_PRIORITY, priority);
|
||||||
@ -1553,6 +1552,17 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
listener -> listener.onSkipSilenceEnabledChanged(newSkipSilenceEnabled));
|
listener -> listener.onSkipSilenceEnabledChanged(newSkipSilenceEnabled));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScrubbingModeEnabled(boolean scrubbingModeEnabled) {
|
||||||
|
verifyApplicationThread();
|
||||||
|
if (scrubbingModeEnabled == this.scrubbingModeEnabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.scrubbingModeEnabled = scrubbingModeEnabled;
|
||||||
|
internalPlayer.setScrubbingModeEnabled(scrubbingModeEnabled);
|
||||||
|
maybeUpdatePlaybackSuppressionReason();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AnalyticsCollector getAnalyticsCollector() {
|
public AnalyticsCollector getAnalyticsCollector() {
|
||||||
verifyApplicationThread();
|
verifyApplicationThread();
|
||||||
@ -2730,6 +2740,10 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void maybeUpdatePlaybackSuppressionReason() {
|
||||||
|
updatePlayWhenReady(playbackInfo.playWhenReady, playbackInfo.playWhenReadyChangeReason);
|
||||||
|
}
|
||||||
|
|
||||||
private void updatePlayWhenReady(
|
private void updatePlayWhenReady(
|
||||||
boolean playWhenReady, @Player.PlayWhenReadyChangeReason int playWhenReadyChangeReason) {
|
boolean playWhenReady, @Player.PlayWhenReadyChangeReason int playWhenReadyChangeReason) {
|
||||||
@PlaybackSuppressionReason
|
@PlaybackSuppressionReason
|
||||||
@ -2761,6 +2775,9 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private @PlaybackSuppressionReason int computePlaybackSuppressionReason(boolean playWhenReady) {
|
private @PlaybackSuppressionReason int computePlaybackSuppressionReason(boolean playWhenReady) {
|
||||||
|
if (scrubbingModeEnabled) {
|
||||||
|
return Player.PLAYBACK_SUPPRESSION_REASON_SCRUBBING;
|
||||||
|
}
|
||||||
if (suitableOutputChecker != null
|
if (suitableOutputChecker != null
|
||||||
&& !suitableOutputChecker.isSelectedOutputSuitableForPlayback()) {
|
&& !suitableOutputChecker.isSelectedOutputSuitableForPlayback()) {
|
||||||
return Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT;
|
return Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT;
|
||||||
@ -2888,10 +2905,10 @@ import java.util.concurrent.CopyOnWriteArraySet;
|
|||||||
if (isSelectedOutputSuitableForPlayback) {
|
if (isSelectedOutputSuitableForPlayback) {
|
||||||
if (playbackInfo.playbackSuppressionReason
|
if (playbackInfo.playbackSuppressionReason
|
||||||
== Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT) {
|
== Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT) {
|
||||||
updatePlayWhenReady(playbackInfo.playWhenReady, PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST);
|
maybeUpdatePlaybackSuppressionReason();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
updatePlayWhenReady(playbackInfo.playWhenReady, PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST);
|
maybeUpdatePlaybackSuppressionReason();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@ import static java.lang.Math.max;
|
|||||||
import static java.lang.Math.min;
|
import static java.lang.Math.min;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
import android.media.MediaFormat;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.Looper;
|
import android.os.Looper;
|
||||||
import android.os.Message;
|
import android.os.Message;
|
||||||
@ -72,7 +73,7 @@ import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
|||||||
import androidx.media3.exoplayer.trackselection.TrackSelector;
|
import androidx.media3.exoplayer.trackselection.TrackSelector;
|
||||||
import androidx.media3.exoplayer.trackselection.TrackSelectorResult;
|
import androidx.media3.exoplayer.trackselection.TrackSelectorResult;
|
||||||
import androidx.media3.exoplayer.upstream.BandwidthMeter;
|
import androidx.media3.exoplayer.upstream.BandwidthMeter;
|
||||||
import com.google.common.base.Supplier;
|
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -89,7 +90,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
MediaSourceList.MediaSourceListInfoRefreshListener,
|
MediaSourceList.MediaSourceListInfoRefreshListener,
|
||||||
PlaybackParametersListener,
|
PlaybackParametersListener,
|
||||||
PlayerMessage.Sender,
|
PlayerMessage.Sender,
|
||||||
AudioFocusManager.PlayerControl {
|
AudioFocusManager.PlayerControl,
|
||||||
|
VideoFrameMetadataListener {
|
||||||
|
|
||||||
private static final String TAG = "ExoPlayerImplInternal";
|
private static final String TAG = "ExoPlayerImplInternal";
|
||||||
|
|
||||||
@ -168,6 +170,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
private static final int MSG_SET_VOLUME = 32;
|
private static final int MSG_SET_VOLUME = 32;
|
||||||
private static final int MSG_AUDIO_FOCUS_PLAYER_COMMAND = 33;
|
private static final int MSG_AUDIO_FOCUS_PLAYER_COMMAND = 33;
|
||||||
private static final int MSG_AUDIO_FOCUS_VOLUME_MULTIPLIER = 34;
|
private static final int MSG_AUDIO_FOCUS_VOLUME_MULTIPLIER = 34;
|
||||||
|
private static final int MSG_SET_VIDEO_FRAME_METADATA_LISTENER = 35;
|
||||||
|
private static final int MSG_SET_SCRUBBING_MODE_ENABLED = 36;
|
||||||
|
private static final int MSG_SEEK_COMPLETED_IN_SCRUBBING_MODE = 37;
|
||||||
|
|
||||||
private static final long BUFFERING_MAXIMUM_INTERVAL_MS =
|
private static final long BUFFERING_MAXIMUM_INTERVAL_MS =
|
||||||
Util.usToMs(Renderer.DEFAULT_DURATION_TO_PROGRESS_US);
|
Util.usToMs(Renderer.DEFAULT_DURATION_TO_PROGRESS_US);
|
||||||
@ -216,9 +221,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
private final boolean hasSecondaryRenderers;
|
private final boolean hasSecondaryRenderers;
|
||||||
private final AudioFocusManager audioFocusManager;
|
private final AudioFocusManager audioFocusManager;
|
||||||
private SeekParameters seekParameters;
|
private SeekParameters seekParameters;
|
||||||
|
private boolean scrubbingModeEnabled;
|
||||||
|
private boolean seekIsPendingWhileScrubbing;
|
||||||
|
@Nullable private SeekPosition queuedSeekWhileScrubbing;
|
||||||
private PlaybackInfo playbackInfo;
|
private PlaybackInfo playbackInfo;
|
||||||
private PlaybackInfoUpdate playbackInfoUpdate;
|
private PlaybackInfoUpdate playbackInfoUpdate;
|
||||||
private boolean released;
|
private boolean releasedOnApplicationThread;
|
||||||
private boolean pauseAtEndOfWindow;
|
private boolean pauseAtEndOfWindow;
|
||||||
private boolean pendingPauseAtEndOfPeriod;
|
private boolean pendingPauseAtEndOfPeriod;
|
||||||
private boolean isRebuffering;
|
private boolean isRebuffering;
|
||||||
@ -265,7 +273,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
PlaybackInfoUpdateListener playbackInfoUpdateListener,
|
PlaybackInfoUpdateListener playbackInfoUpdateListener,
|
||||||
PlayerId playerId,
|
PlayerId playerId,
|
||||||
@Nullable PlaybackLooperProvider playbackLooperProvider,
|
@Nullable PlaybackLooperProvider playbackLooperProvider,
|
||||||
PreloadConfiguration preloadConfiguration) {
|
PreloadConfiguration preloadConfiguration,
|
||||||
|
VideoFrameMetadataListener videoFrameMetadataListener) {
|
||||||
this.playbackInfoUpdateListener = playbackInfoUpdateListener;
|
this.playbackInfoUpdateListener = playbackInfoUpdateListener;
|
||||||
this.trackSelector = trackSelector;
|
this.trackSelector = trackSelector;
|
||||||
this.emptyTrackSelectorResult = emptyTrackSelectorResult;
|
this.emptyTrackSelectorResult = emptyTrackSelectorResult;
|
||||||
@ -340,6 +349,15 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
handler = clock.createHandler(this.playbackLooper, this);
|
handler = clock.createHandler(this.playbackLooper, this);
|
||||||
|
|
||||||
audioFocusManager = new AudioFocusManager(context, playbackLooper, /* playerControl= */ this);
|
audioFocusManager = new AudioFocusManager(context, playbackLooper, /* playerControl= */ this);
|
||||||
|
VideoFrameMetadataListener internalVideoFrameMetadataListener =
|
||||||
|
(presentationTimeUs, releaseTimeNs, format, mediaFormat) -> {
|
||||||
|
videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
|
||||||
|
presentationTimeUs, releaseTimeNs, format, mediaFormat);
|
||||||
|
onVideoFrameAboutToBeRendered(presentationTimeUs, releaseTimeNs, format, mediaFormat);
|
||||||
|
};
|
||||||
|
handler
|
||||||
|
.obtainMessage(MSG_SET_VIDEO_FRAME_METADATA_LISTENER, internalVideoFrameMetadataListener)
|
||||||
|
.sendToTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
private MediaPeriodHolder createMediaPeriodHolder(
|
private MediaPeriodHolder createMediaPeriodHolder(
|
||||||
@ -405,6 +423,10 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
handler.obtainMessage(MSG_SET_SEEK_PARAMETERS, seekParameters).sendToTarget();
|
handler.obtainMessage(MSG_SET_SEEK_PARAMETERS, seekParameters).sendToTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setScrubbingModeEnabled(boolean scrubbingModeEnabled) {
|
||||||
|
handler.obtainMessage(MSG_SET_SCRUBBING_MODE_ENABLED, scrubbingModeEnabled).sendToTarget();
|
||||||
|
}
|
||||||
|
|
||||||
public void stop() {
|
public void stop() {
|
||||||
handler.obtainMessage(MSG_STOP).sendToTarget();
|
handler.obtainMessage(MSG_STOP).sendToTarget();
|
||||||
}
|
}
|
||||||
@ -483,9 +505,16 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
setVolumeInternal(volume);
|
setVolumeInternal(volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void setVideoFrameMetadataListenerInternal(
|
||||||
|
VideoFrameMetadataListener videoFrameMetadataListener) throws ExoPlaybackException {
|
||||||
|
for (RendererHolder renderer : renderers) {
|
||||||
|
renderer.setVideoFrameMetadataListener(videoFrameMetadataListener);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void sendMessage(PlayerMessage message) {
|
public void sendMessage(PlayerMessage message) {
|
||||||
if (released || !playbackLooper.getThread().isAlive()) {
|
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
|
||||||
Log.w(TAG, "Ignoring messages sent after release.");
|
Log.w(TAG, "Ignoring messages sent after release.");
|
||||||
message.markAsProcessed(/* isDelivered= */ false);
|
message.markAsProcessed(/* isDelivered= */ false);
|
||||||
return;
|
return;
|
||||||
@ -499,8 +528,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
* @param foregroundMode Whether foreground mode should be enabled.
|
* @param foregroundMode Whether foreground mode should be enabled.
|
||||||
* @return Whether the operations succeeded. If false, the operation timed out.
|
* @return Whether the operations succeeded. If false, the operation timed out.
|
||||||
*/
|
*/
|
||||||
public synchronized boolean setForegroundMode(boolean foregroundMode) {
|
public boolean setForegroundMode(boolean foregroundMode) {
|
||||||
if (released || !playbackLooper.getThread().isAlive()) {
|
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (foregroundMode) {
|
if (foregroundMode) {
|
||||||
@ -511,7 +540,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
handler
|
handler
|
||||||
.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedFlag)
|
.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedFlag)
|
||||||
.sendToTarget();
|
.sendToTarget();
|
||||||
waitUninterruptibly(/* condition= */ processedFlag::get, setForegroundModeTimeoutMs);
|
waitUninterruptibly(processedFlag, setForegroundModeTimeoutMs);
|
||||||
return processedFlag.get();
|
return processedFlag.get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -527,8 +556,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
* C#TIME_UNSET} then the method will not block on the message delivery.
|
* C#TIME_UNSET} then the method will not block on the message delivery.
|
||||||
* @return Whether the operation succeeded. If false, the operation timed out.
|
* @return Whether the operation succeeded. If false, the operation timed out.
|
||||||
*/
|
*/
|
||||||
public synchronized boolean setVideoOutput(@Nullable Object videoOutput, long timeoutMs) {
|
public boolean setVideoOutput(@Nullable Object videoOutput, long timeoutMs) {
|
||||||
if (released || !playbackLooper.getThread().isAlive()) {
|
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
AtomicBoolean processedFlag = new AtomicBoolean();
|
AtomicBoolean processedFlag = new AtomicBoolean();
|
||||||
@ -536,7 +565,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
.obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedFlag))
|
.obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedFlag))
|
||||||
.sendToTarget();
|
.sendToTarget();
|
||||||
if (timeoutMs != C.TIME_UNSET) {
|
if (timeoutMs != C.TIME_UNSET) {
|
||||||
waitUninterruptibly(/* condition= */ processedFlag::get, timeoutMs);
|
waitUninterruptibly(processedFlag, timeoutMs);
|
||||||
return processedFlag.get();
|
return processedFlag.get();
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -547,13 +576,15 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
*
|
*
|
||||||
* @return Whether the release succeeded. If false, the release timed out.
|
* @return Whether the release succeeded. If false, the release timed out.
|
||||||
*/
|
*/
|
||||||
public synchronized boolean release() {
|
public boolean release() {
|
||||||
if (released || !playbackLooper.getThread().isAlive()) {
|
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
handler.sendEmptyMessage(MSG_RELEASE);
|
releasedOnApplicationThread = true;
|
||||||
waitUninterruptibly(/* condition= */ () -> released, releaseTimeoutMs);
|
AtomicBoolean processedFlag = new AtomicBoolean();
|
||||||
return released;
|
handler.obtainMessage(MSG_RELEASE, processedFlag).sendToTarget();
|
||||||
|
waitUninterruptibly(processedFlag, releaseTimeoutMs);
|
||||||
|
return processedFlag.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Looper getPlaybackLooper() {
|
public Looper getPlaybackLooper() {
|
||||||
@ -613,6 +644,19 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
handler.obtainMessage(MSG_AUDIO_FOCUS_PLAYER_COMMAND, playerCommand, 0).sendToTarget();
|
handler.obtainMessage(MSG_AUDIO_FOCUS_PLAYER_COMMAND, playerCommand, 0).sendToTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// VideoFrameMetadataListener implementation
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onVideoFrameAboutToBeRendered(
|
||||||
|
long presentationTimeUs,
|
||||||
|
long releaseTimeNs,
|
||||||
|
Format format,
|
||||||
|
@Nullable MediaFormat mediaFormat) {
|
||||||
|
if (seekIsPendingWhileScrubbing) {
|
||||||
|
handler.obtainMessage(MSG_SEEK_COMPLETED_IN_SCRUBBING_MODE).sendToTarget();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Handler.Callback implementation.
|
// Handler.Callback implementation.
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "WrongConstant"}) // Casting message payload types and IntDef.
|
@SuppressWarnings({"unchecked", "WrongConstant"}) // Casting message payload types and IntDef.
|
||||||
@ -643,7 +687,14 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
doSomeWork();
|
doSomeWork();
|
||||||
break;
|
break;
|
||||||
case MSG_SEEK_TO:
|
case MSG_SEEK_TO:
|
||||||
seekToInternal((SeekPosition) msg.obj);
|
seekToInternal((SeekPosition) msg.obj, /* incrementAcks= */ true);
|
||||||
|
break;
|
||||||
|
case MSG_SEEK_COMPLETED_IN_SCRUBBING_MODE:
|
||||||
|
seekIsPendingWhileScrubbing = false;
|
||||||
|
if (queuedSeekWhileScrubbing != null) {
|
||||||
|
seekToInternal(queuedSeekWhileScrubbing, /* incrementAcks= */ false);
|
||||||
|
queuedSeekWhileScrubbing = null;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case MSG_SET_PLAYBACK_PARAMETERS:
|
case MSG_SET_PLAYBACK_PARAMETERS:
|
||||||
setPlaybackParametersInternal((PlaybackParameters) msg.obj);
|
setPlaybackParametersInternal((PlaybackParameters) msg.obj);
|
||||||
@ -651,6 +702,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
case MSG_SET_SEEK_PARAMETERS:
|
case MSG_SET_SEEK_PARAMETERS:
|
||||||
setSeekParametersInternal((SeekParameters) msg.obj);
|
setSeekParametersInternal((SeekParameters) msg.obj);
|
||||||
break;
|
break;
|
||||||
|
case MSG_SET_SCRUBBING_MODE_ENABLED:
|
||||||
|
setScrubbingModeEnabledInternal((Boolean) msg.obj);
|
||||||
|
break;
|
||||||
case MSG_SET_FOREGROUND_MODE:
|
case MSG_SET_FOREGROUND_MODE:
|
||||||
setForegroundModeInternal(
|
setForegroundModeInternal(
|
||||||
/* foregroundMode= */ msg.arg1 != 0, /* processedFlag= */ (AtomicBoolean) msg.obj);
|
/* foregroundMode= */ msg.arg1 != 0, /* processedFlag= */ (AtomicBoolean) msg.obj);
|
||||||
@ -725,8 +779,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
case MSG_AUDIO_FOCUS_VOLUME_MULTIPLIER:
|
case MSG_AUDIO_FOCUS_VOLUME_MULTIPLIER:
|
||||||
handleAudioFocusVolumeMultiplierChange();
|
handleAudioFocusVolumeMultiplierChange();
|
||||||
break;
|
break;
|
||||||
|
case MSG_SET_VIDEO_FRAME_METADATA_LISTENER:
|
||||||
|
setVideoFrameMetadataListenerInternal((VideoFrameMetadataListener) msg.obj);
|
||||||
|
break;
|
||||||
case MSG_RELEASE:
|
case MSG_RELEASE:
|
||||||
releaseInternal();
|
releaseInternal(/* processedFlag= */ (AtomicBoolean) msg.obj);
|
||||||
// Return immediately to not send playback info updates after release.
|
// Return immediately to not send playback info updates after release.
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
@ -870,7 +927,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
* @param condition The condition.
|
* @param condition The condition.
|
||||||
* @param timeoutMs The time in milliseconds to wait for the condition to become true.
|
* @param timeoutMs The time in milliseconds to wait for the condition to become true.
|
||||||
*/
|
*/
|
||||||
private synchronized void waitUninterruptibly(Supplier<Boolean> condition, long timeoutMs) {
|
private synchronized void waitUninterruptibly(AtomicBoolean condition, long timeoutMs) {
|
||||||
long deadlineMs = clock.elapsedRealtime() + timeoutMs;
|
long deadlineMs = clock.elapsedRealtime() + timeoutMs;
|
||||||
long remainingMs = timeoutMs;
|
long remainingMs = timeoutMs;
|
||||||
boolean wasInterrupted = false;
|
boolean wasInterrupted = false;
|
||||||
@ -1486,8 +1543,13 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
MSG_DO_SOME_WORK, thisOperationStartTimeMs + wakeUpTimeIntervalMs);
|
MSG_DO_SOME_WORK, thisOperationStartTimeMs + wakeUpTimeIntervalMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackException {
|
private void seekToInternal(SeekPosition seekPosition, boolean incrementAcks)
|
||||||
playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1);
|
throws ExoPlaybackException {
|
||||||
|
playbackInfoUpdate.incrementPendingOperationAcks(incrementAcks ? 1 : 0);
|
||||||
|
if (seekIsPendingWhileScrubbing) {
|
||||||
|
queuedSeekWhileScrubbing = seekPosition;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
MediaPeriodId periodId;
|
MediaPeriodId periodId;
|
||||||
long periodPositionUs;
|
long periodPositionUs;
|
||||||
@ -1568,6 +1630,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
seekIsPendingWhileScrubbing = scrubbingModeEnabled;
|
||||||
newPeriodPositionUs =
|
newPeriodPositionUs =
|
||||||
seekToPeriodPosition(
|
seekToPeriodPosition(
|
||||||
periodId,
|
periodId,
|
||||||
@ -1698,6 +1761,20 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
this.seekParameters = seekParameters;
|
this.seekParameters = seekParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void setScrubbingModeEnabledInternal(boolean scrubbingModeEnabled)
|
||||||
|
throws ExoPlaybackException {
|
||||||
|
this.scrubbingModeEnabled = scrubbingModeEnabled;
|
||||||
|
if (!scrubbingModeEnabled) {
|
||||||
|
seekIsPendingWhileScrubbing = false;
|
||||||
|
handler.removeMessages(MSG_SEEK_COMPLETED_IN_SCRUBBING_MODE);
|
||||||
|
if (queuedSeekWhileScrubbing != null) {
|
||||||
|
// Immediately seek to the latest received scrub position (interrupting a pending seek).
|
||||||
|
seekToInternal(queuedSeekWhileScrubbing, /* incrementAcks= */ false);
|
||||||
|
queuedSeekWhileScrubbing = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void setForegroundModeInternal(
|
private void setForegroundModeInternal(
|
||||||
boolean foregroundMode, @Nullable AtomicBoolean processedFlag) {
|
boolean foregroundMode, @Nullable AtomicBoolean processedFlag) {
|
||||||
if (this.foregroundMode != foregroundMode) {
|
if (this.foregroundMode != foregroundMode) {
|
||||||
@ -1746,7 +1823,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
setState(Player.STATE_IDLE);
|
setState(Player.STATE_IDLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void releaseInternal() {
|
private void releaseInternal(AtomicBoolean processedFlag) {
|
||||||
try {
|
try {
|
||||||
resetInternal(
|
resetInternal(
|
||||||
/* resetRenderers= */ true,
|
/* resetRenderers= */ true,
|
||||||
@ -1761,7 +1838,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
} finally {
|
} finally {
|
||||||
playbackLooperProvider.releaseLooper();
|
playbackLooperProvider.releaseLooper();
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
released = true;
|
processedFlag.set(true);
|
||||||
notifyAll();
|
notifyAll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1773,6 +1850,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
boolean releaseMediaSourceList,
|
boolean releaseMediaSourceList,
|
||||||
boolean resetError) {
|
boolean resetError) {
|
||||||
handler.removeMessages(MSG_DO_SOME_WORK);
|
handler.removeMessages(MSG_DO_SOME_WORK);
|
||||||
|
seekIsPendingWhileScrubbing = false;
|
||||||
|
queuedSeekWhileScrubbing = null;
|
||||||
pendingRecoverableRendererError = null;
|
pendingRecoverableRendererError = null;
|
||||||
updateRebufferingState(/* isRebuffering= */ false, /* resetLastRebufferRealtimeMs= */ true);
|
updateRebufferingState(/* isRebuffering= */ false, /* resetLastRebufferRealtimeMs= */ true);
|
||||||
mediaClock.stop();
|
mediaClock.stop();
|
||||||
|
@ -38,6 +38,7 @@ import androidx.media3.exoplayer.source.SampleStream;
|
|||||||
import androidx.media3.exoplayer.text.TextRenderer;
|
import androidx.media3.exoplayer.text.TextRenderer;
|
||||||
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
||||||
import androidx.media3.exoplayer.trackselection.TrackSelectorResult;
|
import androidx.media3.exoplayer.trackselection.TrackSelectorResult;
|
||||||
|
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.annotation.Documented;
|
import java.lang.annotation.Documented;
|
||||||
import java.lang.annotation.Retention;
|
import java.lang.annotation.Retention;
|
||||||
@ -784,6 +785,19 @@ import java.util.Objects;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setVideoFrameMetadataListener(VideoFrameMetadataListener videoFrameMetadataListener)
|
||||||
|
throws ExoPlaybackException {
|
||||||
|
if (getTrackType() != TRACK_TYPE_VIDEO) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
primaryRenderer.handleMessage(
|
||||||
|
Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER, videoFrameMetadataListener);
|
||||||
|
if (secondaryRenderer != null) {
|
||||||
|
secondaryRenderer.handleMessage(
|
||||||
|
Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER, videoFrameMetadataListener);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Sets the volume on the renderer. */
|
/** Sets the volume on the renderer. */
|
||||||
public void setVolume(float volume) throws ExoPlaybackException {
|
public void setVolume(float volume) throws ExoPlaybackException {
|
||||||
if (getTrackType() != TRACK_TYPE_AUDIO) {
|
if (getTrackType() != TRACK_TYPE_AUDIO) {
|
||||||
|
@ -622,6 +622,12 @@ public class SimpleExoPlayer extends BasePlayer implements ExoPlayer {
|
|||||||
player.setSkipSilenceEnabled(skipSilenceEnabled);
|
player.setSkipSilenceEnabled(skipSilenceEnabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScrubbingModeEnabled(boolean scrubbingModeEnabled) {
|
||||||
|
blockUntilConstructorFinished();
|
||||||
|
player.setScrubbingModeEnabled(scrubbingModeEnabled);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AnalyticsCollector getAnalyticsCollector() {
|
public AnalyticsCollector getAnalyticsCollector() {
|
||||||
blockUntilConstructorFinished();
|
blockUntilConstructorFinished();
|
||||||
|
@ -593,7 +593,7 @@ public interface AudioSink {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the size of the underlying {@link AudioTrack} buffer in microseconds. If unsupported or
|
* Returns the size of the underlying {@link AudioTrack} buffer in microseconds. If unsupported or
|
||||||
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET};
|
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET}.
|
||||||
*
|
*
|
||||||
* <p>If the {@link AudioTrack} is configured with a compressed encoding, then the returned
|
* <p>If the {@link AudioTrack} is configured with a compressed encoding, then the returned
|
||||||
* duration is an estimated minimum based on the encoding's maximum encoded byte rate.
|
* duration is an estimated minimum based on the encoding's maximum encoded byte rate.
|
||||||
|
@ -170,6 +170,7 @@ public abstract class DecoderAudioRenderer<
|
|||||||
private long largestQueuedPresentationTimeUs;
|
private long largestQueuedPresentationTimeUs;
|
||||||
private long lastBufferInStreamPresentationTimeUs;
|
private long lastBufferInStreamPresentationTimeUs;
|
||||||
private long nextBufferToWritePresentationTimeUs;
|
private long nextBufferToWritePresentationTimeUs;
|
||||||
|
private boolean isRendereringToEndOfStream;
|
||||||
|
|
||||||
public DecoderAudioRenderer() {
|
public DecoderAudioRenderer() {
|
||||||
this(/* eventHandler= */ null, /* eventListener= */ null);
|
this(/* eventHandler= */ null, /* eventListener= */ null);
|
||||||
@ -247,9 +248,14 @@ public abstract class DecoderAudioRenderer<
|
|||||||
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
|
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
|
||||||
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||||
}
|
}
|
||||||
|
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||||
|
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
||||||
|
// to end of stream.
|
||||||
|
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||||
|
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||||
|
}
|
||||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
|
||||||
long bufferedDurationUs =
|
long bufferedDurationUs =
|
||||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
audioTrackBufferDurationUs != C.TIME_UNSET
|
||||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
||||||
@ -312,6 +318,7 @@ public abstract class DecoderAudioRenderer<
|
|||||||
try {
|
try {
|
||||||
audioSink.playToEndOfStream();
|
audioSink.playToEndOfStream();
|
||||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||||
|
isRendereringToEndOfStream = true;
|
||||||
} catch (AudioSink.WriteException e) {
|
} catch (AudioSink.WriteException e) {
|
||||||
throw createRendererException(
|
throw createRendererException(
|
||||||
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
|
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
|
||||||
@ -593,6 +600,7 @@ public abstract class DecoderAudioRenderer<
|
|||||||
outputStreamEnded = true;
|
outputStreamEnded = true;
|
||||||
audioSink.playToEndOfStream();
|
audioSink.playToEndOfStream();
|
||||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||||
|
isRendereringToEndOfStream = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void flushDecoder() throws ExoPlaybackException {
|
private void flushDecoder() throws ExoPlaybackException {
|
||||||
@ -668,6 +676,7 @@ public abstract class DecoderAudioRenderer<
|
|||||||
|
|
||||||
currentPositionUs = positionUs;
|
currentPositionUs = positionUs;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
isRendereringToEndOfStream = false;
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
allowPositionDiscontinuity = true;
|
allowPositionDiscontinuity = true;
|
||||||
inputStreamEnded = false;
|
inputStreamEnded = false;
|
||||||
@ -697,6 +706,7 @@ public abstract class DecoderAudioRenderer<
|
|||||||
setOutputStreamOffsetUs(C.TIME_UNSET);
|
setOutputStreamOffsetUs(C.TIME_UNSET);
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
isRendereringToEndOfStream = false;
|
||||||
try {
|
try {
|
||||||
setSourceDrmSession(null);
|
setSourceDrmSession(null);
|
||||||
releaseDecoder();
|
releaseDecoder();
|
||||||
|
@ -126,6 +126,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
private int rendererPriority;
|
private int rendererPriority;
|
||||||
private boolean isStarted;
|
private boolean isStarted;
|
||||||
private long nextBufferToWritePresentationTimeUs;
|
private long nextBufferToWritePresentationTimeUs;
|
||||||
|
private boolean isRendereringToEndOfStream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param context A context.
|
* @param context A context.
|
||||||
@ -523,9 +524,15 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
return super.getDurationToProgressUs(
|
return super.getDurationToProgressUs(
|
||||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||||
}
|
}
|
||||||
|
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||||
|
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
||||||
|
// to end of stream.
|
||||||
|
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||||
|
return super.getDurationToProgressUs(
|
||||||
|
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||||
|
}
|
||||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
|
||||||
long bufferedDurationUs =
|
long bufferedDurationUs =
|
||||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
audioTrackBufferDurationUs != C.TIME_UNSET
|
||||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
||||||
@ -687,6 +694,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
|
|
||||||
currentPositionUs = positionUs;
|
currentPositionUs = positionUs;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
isRendereringToEndOfStream = false;
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
allowPositionDiscontinuity = true;
|
allowPositionDiscontinuity = true;
|
||||||
}
|
}
|
||||||
@ -711,6 +719,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
audioSinkNeedsReset = true;
|
audioSinkNeedsReset = true;
|
||||||
inputFormat = null;
|
inputFormat = null;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
isRendereringToEndOfStream = false;
|
||||||
try {
|
try {
|
||||||
audioSink.flush();
|
audioSink.flush();
|
||||||
} finally {
|
} finally {
|
||||||
@ -726,6 +735,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
protected void onReset() {
|
protected void onReset() {
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
isRendereringToEndOfStream = false;
|
||||||
try {
|
try {
|
||||||
super.onReset();
|
super.onReset();
|
||||||
} finally {
|
} finally {
|
||||||
@ -865,6 +875,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
|
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
|
||||||
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
|
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
|
||||||
}
|
}
|
||||||
|
isRendereringToEndOfStream = true;
|
||||||
} catch (AudioSink.WriteException e) {
|
} catch (AudioSink.WriteException e) {
|
||||||
throw createRendererException(
|
throw createRendererException(
|
||||||
e,
|
e,
|
||||||
|
@ -17,7 +17,7 @@ package androidx.media3.exoplayer.offline;
|
|||||||
|
|
||||||
import static androidx.annotation.VisibleForTesting.PRIVATE;
|
import static androidx.annotation.VisibleForTesting.PRIVATE;
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Util.percent;
|
import static androidx.media3.common.util.Util.percentFloat;
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.annotation.VisibleForTesting;
|
import androidx.annotation.VisibleForTesting;
|
||||||
@ -217,7 +217,7 @@ public final class ProgressiveDownloader implements Downloader {
|
|||||||
float percentDownloaded =
|
float percentDownloaded =
|
||||||
contentLength == C.LENGTH_UNSET || contentLength == 0
|
contentLength == C.LENGTH_UNSET || contentLength == 0
|
||||||
? C.PERCENTAGE_UNSET
|
? C.PERCENTAGE_UNSET
|
||||||
: percent(bytesCached, contentLength);
|
: percentFloat(bytesCached, contentLength);
|
||||||
checkNotNull(progressListener).onProgress(contentLength, bytesCached, percentDownloaded);
|
checkNotNull(progressListener).onProgress(contentLength, bytesCached, percentDownloaded);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
package androidx.media3.exoplayer.offline;
|
package androidx.media3.exoplayer.offline;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Util.percent;
|
import static androidx.media3.common.util.Util.percentFloat;
|
||||||
|
|
||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
@ -537,9 +537,9 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
|
|||||||
|
|
||||||
private float getPercentDownloaded() {
|
private float getPercentDownloaded() {
|
||||||
if (contentLength != C.LENGTH_UNSET && contentLength != 0) {
|
if (contentLength != C.LENGTH_UNSET && contentLength != 0) {
|
||||||
return percent(bytesDownloaded, contentLength);
|
return percentFloat(bytesDownloaded, contentLength);
|
||||||
} else if (totalSegments != 0) {
|
} else if (totalSegments != 0) {
|
||||||
return percent(segmentsDownloaded, totalSegments);
|
return percentFloat(segmentsDownloaded, totalSegments);
|
||||||
} else {
|
} else {
|
||||||
return C.PERCENTAGE_UNSET;
|
return C.PERCENTAGE_UNSET;
|
||||||
}
|
}
|
||||||
|
@ -741,6 +741,8 @@ public class EventLogger implements AnalyticsListener {
|
|||||||
return "TRANSIENT_AUDIO_FOCUS_LOSS";
|
return "TRANSIENT_AUDIO_FOCUS_LOSS";
|
||||||
case Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT:
|
case Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT:
|
||||||
return "UNSUITABLE_AUDIO_OUTPUT";
|
return "UNSUITABLE_AUDIO_OUTPUT";
|
||||||
|
case Player.PLAYBACK_SUPPRESSION_REASON_SCRUBBING:
|
||||||
|
return "SCRUBBING";
|
||||||
default:
|
default:
|
||||||
return "?";
|
return "?";
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,151 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.exoplayer;
|
||||||
|
|
||||||
|
import static androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition.DEFAULT_WINDOW_DURATION_US;
|
||||||
|
import static androidx.media3.test.utils.robolectric.TestPlayerRunHelper.advance;
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.ArgumentMatchers.anyLong;
|
||||||
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
import static org.mockito.Mockito.atLeastOnce;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
|
import android.graphics.SurfaceTexture;
|
||||||
|
import android.view.Surface;
|
||||||
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.Player;
|
||||||
|
import androidx.media3.common.Player.PositionInfo;
|
||||||
|
import androidx.media3.common.Timeline;
|
||||||
|
import androidx.media3.exoplayer.drm.DrmSessionManager;
|
||||||
|
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
|
||||||
|
import androidx.media3.test.utils.ExoPlayerTestRunner;
|
||||||
|
import androidx.media3.test.utils.FakeMediaPeriod.TrackDataFactory;
|
||||||
|
import androidx.media3.test.utils.FakeMediaSource;
|
||||||
|
import androidx.media3.test.utils.FakeRenderer;
|
||||||
|
import androidx.media3.test.utils.FakeTimeline;
|
||||||
|
import androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition;
|
||||||
|
import androidx.media3.test.utils.TestExoPlayerBuilder;
|
||||||
|
import androidx.test.core.app.ApplicationProvider;
|
||||||
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
|
/** Tests for {@linkplain ExoPlayer#setScrubbingModeEnabled(boolean) scrubbing mode}. */
|
||||||
|
@RunWith(AndroidJUnit4.class)
|
||||||
|
public final class ExoPlayerScrubbingTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void scrubbingMode_suppressesPlayback() throws Exception {
|
||||||
|
Timeline timeline = new FakeTimeline();
|
||||||
|
FakeRenderer renderer = new FakeRenderer(C.TRACK_TYPE_VIDEO);
|
||||||
|
ExoPlayer player =
|
||||||
|
new TestExoPlayerBuilder(ApplicationProvider.getApplicationContext())
|
||||||
|
.setRenderers(renderer)
|
||||||
|
.build();
|
||||||
|
Player.Listener mockListener = mock(Player.Listener.class);
|
||||||
|
player.addListener(mockListener);
|
||||||
|
|
||||||
|
player.setMediaSource(new FakeMediaSource(timeline, ExoPlayerTestRunner.VIDEO_FORMAT));
|
||||||
|
player.prepare();
|
||||||
|
player.play();
|
||||||
|
|
||||||
|
advance(player).untilPosition(0, 2000);
|
||||||
|
|
||||||
|
player.setScrubbingModeEnabled(true);
|
||||||
|
verify(mockListener)
|
||||||
|
.onPlaybackSuppressionReasonChanged(Player.PLAYBACK_SUPPRESSION_REASON_SCRUBBING);
|
||||||
|
|
||||||
|
player.setScrubbingModeEnabled(false);
|
||||||
|
verify(mockListener)
|
||||||
|
.onPlaybackSuppressionReasonChanged(Player.PLAYBACK_SUPPRESSION_REASON_NONE);
|
||||||
|
|
||||||
|
player.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void scrubbingMode_pendingSeekIsNotPreempted() throws Exception {
|
||||||
|
Timeline timeline =
|
||||||
|
new FakeTimeline(
|
||||||
|
new TimelineWindowDefinition.Builder().setWindowPositionInFirstPeriodUs(0).build());
|
||||||
|
ExoPlayer player =
|
||||||
|
new TestExoPlayerBuilder(ApplicationProvider.getApplicationContext()).build();
|
||||||
|
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
|
||||||
|
player.setVideoSurface(surface);
|
||||||
|
Player.Listener mockListener = mock(Player.Listener.class);
|
||||||
|
player.addListener(mockListener);
|
||||||
|
|
||||||
|
player.setMediaSource(
|
||||||
|
new FakeMediaSource(
|
||||||
|
timeline,
|
||||||
|
DrmSessionManager.DRM_UNSUPPORTED,
|
||||||
|
TrackDataFactory.samplesWithRateDurationAndKeyframeInterval(
|
||||||
|
/* initialSampleTimeUs= */ 0,
|
||||||
|
/* sampleRate= */ 30,
|
||||||
|
/* durationUs= */ DEFAULT_WINDOW_DURATION_US,
|
||||||
|
/* keyFrameInterval= */ 60),
|
||||||
|
ExoPlayerTestRunner.VIDEO_FORMAT));
|
||||||
|
player.prepare();
|
||||||
|
player.play();
|
||||||
|
|
||||||
|
advance(player).untilPosition(0, 1000);
|
||||||
|
|
||||||
|
VideoFrameMetadataListener mockVideoFrameMetadataListener =
|
||||||
|
mock(VideoFrameMetadataListener.class);
|
||||||
|
player.setVideoFrameMetadataListener(mockVideoFrameMetadataListener);
|
||||||
|
player.setScrubbingModeEnabled(true);
|
||||||
|
advance(player).untilPendingCommandsAreFullyHandled();
|
||||||
|
player.seekTo(2500);
|
||||||
|
player.seekTo(3000);
|
||||||
|
player.seekTo(3500);
|
||||||
|
// Allow the 2500 and 3500 seeks to complete (the 3000 seek should be dropped).
|
||||||
|
advance(player).untilPendingCommandsAreFullyHandled();
|
||||||
|
|
||||||
|
player.seekTo(4000);
|
||||||
|
player.seekTo(4500);
|
||||||
|
// Disabling scrubbing mode should immediately execute the last received seek (pre-empting a
|
||||||
|
// previous one), so we expect the 4500 seek to be resolved and the 4000 seek to be dropped.
|
||||||
|
player.setScrubbingModeEnabled(false);
|
||||||
|
advance(player).untilPendingCommandsAreFullyHandled();
|
||||||
|
player.clearVideoFrameMetadataListener(mockVideoFrameMetadataListener);
|
||||||
|
|
||||||
|
advance(player).untilState(Player.STATE_ENDED);
|
||||||
|
player.release();
|
||||||
|
surface.release();
|
||||||
|
|
||||||
|
ArgumentCaptor<Long> presentationTimeUsCaptor = ArgumentCaptor.forClass(Long.class);
|
||||||
|
verify(mockVideoFrameMetadataListener, atLeastOnce())
|
||||||
|
.onVideoFrameAboutToBeRendered(presentationTimeUsCaptor.capture(), anyLong(), any(), any());
|
||||||
|
|
||||||
|
assertThat(presentationTimeUsCaptor.getAllValues())
|
||||||
|
.containsExactly(2_500_000L, 3_500_000L, 4_500_000L)
|
||||||
|
.inOrder();
|
||||||
|
|
||||||
|
// Confirm that even though we dropped some intermediate seeks, every seek request still
|
||||||
|
// resulted in a position discontinuity callback.
|
||||||
|
ArgumentCaptor<PositionInfo> newPositionCaptor = ArgumentCaptor.forClass(PositionInfo.class);
|
||||||
|
verify(mockListener, atLeastOnce())
|
||||||
|
.onPositionDiscontinuity(
|
||||||
|
/* oldPosition= */ any(),
|
||||||
|
newPositionCaptor.capture(),
|
||||||
|
eq(Player.DISCONTINUITY_REASON_SEEK));
|
||||||
|
assertThat(newPositionCaptor.getAllValues().stream().map(p -> p.positionMs))
|
||||||
|
.containsExactly(2500L, 3000L, 3500L, 4000L, 4500L)
|
||||||
|
.inOrder();
|
||||||
|
}
|
||||||
|
}
|
@ -226,178 +226,6 @@ public class DecoderAudioRendererTest {
|
|||||||
inOrderAudioSink.verify(mockAudioSink, times(2)).handleBuffer(any(), anyLong(), anyInt());
|
inOrderAudioSink.verify(mockAudioSink, times(2)).handleBuffer(any(), anyLong(), anyInt());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getDurationToProgressUs_usingWrittenDurationUs_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
|
||||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
|
||||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
|
||||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
|
||||||
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
|
||||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ FORMAT,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
audioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {FORMAT},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ true,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
|
||||||
when(mockAudioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
audioRenderer.start();
|
|
||||||
while (latchDecode.getCount() != 0) {
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
}
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
audioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(75_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_usingWrittenDurationUsWithDoublePlaybackSpeed_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
|
||||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
PlaybackParameters playbackParametersWithDoubleSpeed =
|
|
||||||
new PlaybackParameters(/* speed= */ 2.0f);
|
|
||||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
|
|
||||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
|
||||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
|
||||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
|
||||||
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
|
||||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ FORMAT,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
|
||||||
when(mockAudioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
audioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {FORMAT},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ true,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
audioRenderer.start();
|
|
||||||
while (latchDecode.getCount() != 0) {
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
}
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
audioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(37_500L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_usingWrittenDurationUsWithPlaybackAdvancement_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
|
||||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
|
||||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
|
||||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
|
||||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
|
||||||
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
|
||||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, fakeClock);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ FORMAT,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
|
||||||
when(mockAudioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
audioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {FORMAT},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ true,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
audioRenderer.start();
|
|
||||||
long rendererPositionElapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
|
|
||||||
while (latchDecode.getCount() != 0) {
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
|
||||||
}
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
|
||||||
|
|
||||||
// Simulate playback progressing between render() and getDurationToProgressUs call
|
|
||||||
fakeClock.advanceTime(/* timeDiffMs= */ 10);
|
|
||||||
long durationToProgressUs =
|
|
||||||
audioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(65_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
@ -571,8 +399,64 @@ public class DecoderAudioRendererTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterReadToEndOfStream_returnsCalculatedDuration()
|
public void
|
||||||
throws Exception {
|
getDurationToProgressUs_usingAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
|
||||||
|
throws Exception {
|
||||||
|
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||||
|
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||||
|
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||||
|
CountDownLatch latchDecode = new CountDownLatch(4);
|
||||||
|
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
||||||
|
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
||||||
|
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
||||||
|
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||||
|
FakeSampleStream fakeSampleStream =
|
||||||
|
new FakeSampleStream(
|
||||||
|
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||||
|
/* mediaSourceEventDispatcher= */ null,
|
||||||
|
DrmSessionManager.DRM_UNSUPPORTED,
|
||||||
|
new DrmSessionEventListener.EventDispatcher(),
|
||||||
|
/* initialFormat= */ FORMAT,
|
||||||
|
ImmutableList.of(
|
||||||
|
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
END_OF_STREAM_ITEM));
|
||||||
|
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||||
|
audioRenderer.enable(
|
||||||
|
RendererConfiguration.DEFAULT,
|
||||||
|
new Format[] {FORMAT},
|
||||||
|
fakeSampleStream,
|
||||||
|
/* positionUs= */ 0,
|
||||||
|
/* joining= */ false,
|
||||||
|
/* mayRenderStartOfStream= */ true,
|
||||||
|
/* startPositionUs= */ 0,
|
||||||
|
/* offsetUs= */ 0,
|
||||||
|
new MediaSource.MediaPeriodId(new Object()));
|
||||||
|
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
||||||
|
when(mockAudioSink.handleBuffer(
|
||||||
|
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
|
||||||
|
.thenReturn(false);
|
||||||
|
audioRenderer.start();
|
||||||
|
while (latchDecode.getCount() != 0) {
|
||||||
|
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
}
|
||||||
|
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
long durationToProgressUs =
|
||||||
|
audioRenderer.getDurationToProgressUs(
|
||||||
|
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
assertThat(durationToProgressUs).isEqualTo(10_000L);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterProcessEndOfStream_returnsCalculatedDuration()
|
||||||
|
throws Exception {
|
||||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||||
@ -628,6 +512,65 @@ public class DecoderAudioRendererTest {
|
|||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
|
||||||
|
throws Exception {
|
||||||
|
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||||
|
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||||
|
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||||
|
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||||
|
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
|
||||||
|
ForwardingAudioSink forwardingAudioSink =
|
||||||
|
new ForwardingAudioSink(mockAudioSink) {
|
||||||
|
@Override
|
||||||
|
public void playToEndOfStream() throws WriteException {
|
||||||
|
super.playToEndOfStream();
|
||||||
|
hasCalledPlayToEndOfStream.set(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
audioRenderer = createAudioRenderer(forwardingAudioSink);
|
||||||
|
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||||
|
FakeSampleStream fakeSampleStream =
|
||||||
|
new FakeSampleStream(
|
||||||
|
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||||
|
/* mediaSourceEventDispatcher= */ null,
|
||||||
|
DrmSessionManager.DRM_UNSUPPORTED,
|
||||||
|
new DrmSessionEventListener.EventDispatcher(),
|
||||||
|
/* initialFormat= */ FORMAT,
|
||||||
|
ImmutableList.of(
|
||||||
|
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
END_OF_STREAM_ITEM));
|
||||||
|
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||||
|
audioRenderer.enable(
|
||||||
|
RendererConfiguration.DEFAULT,
|
||||||
|
new Format[] {FORMAT},
|
||||||
|
fakeSampleStream,
|
||||||
|
/* positionUs= */ 0,
|
||||||
|
/* joining= */ false,
|
||||||
|
/* mayRenderStartOfStream= */ true,
|
||||||
|
/* startPositionUs= */ 0,
|
||||||
|
/* offsetUs= */ 0,
|
||||||
|
new MediaSource.MediaPeriodId(new Object()));
|
||||||
|
audioRenderer.start();
|
||||||
|
audioRenderer.setCurrentStreamFinal();
|
||||||
|
while (!hasCalledPlayToEndOfStream.get()) {
|
||||||
|
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
}
|
||||||
|
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
long durationToProgressUs =
|
||||||
|
audioRenderer.getDurationToProgressUs(
|
||||||
|
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||||
|
@ -734,177 +734,6 @@ public class MediaCodecAudioRendererTest {
|
|||||||
verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132);
|
verify(audioSink).setOffloadDelayPadding(/* delayInFrames= */ 312, /* paddingInFrames= */ 132);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getDurationToProgressUs_usingWrittenDurationUs_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ AUDIO_AAC,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
mediaCodecAudioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {AUDIO_AAC},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ false,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150_000 us sample.
|
|
||||||
when(audioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
mediaCodecAudioRenderer.start();
|
|
||||||
for (int i = 0; i < 10; i++) {
|
|
||||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
|
||||||
}
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(75_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_usingWrittenDurationUsWithDoublePlaybackSpeed_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ AUDIO_AAC,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
PlaybackParameters playbackParametersWithDoubleSpeed =
|
|
||||||
new PlaybackParameters(/* speed= */ 2.0f);
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
mediaCodecAudioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {AUDIO_AAC},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ false,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150_000 us sample.
|
|
||||||
when(audioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
when(audioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
|
|
||||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
mediaCodecAudioRenderer.start();
|
|
||||||
for (int i = 0; i < 10; i++) {
|
|
||||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
|
||||||
}
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(37_500L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_usingWrittenDurationUsWithPlaybackAdvancement_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
|
||||||
mediaCodecAudioRenderer =
|
|
||||||
new MediaCodecAudioRenderer(
|
|
||||||
ApplicationProvider.getApplicationContext(),
|
|
||||||
new DefaultMediaCodecAdapterFactory(
|
|
||||||
ApplicationProvider.getApplicationContext(),
|
|
||||||
() -> {
|
|
||||||
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
|
|
||||||
return callbackThread;
|
|
||||||
},
|
|
||||||
() -> {
|
|
||||||
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
|
|
||||||
return queueingThread;
|
|
||||||
}),
|
|
||||||
mediaCodecSelector,
|
|
||||||
/* enableDecoderFallback= */ false,
|
|
||||||
/* eventHandler= */ new Handler(Looper.getMainLooper()),
|
|
||||||
audioRendererEventListener,
|
|
||||||
audioSink);
|
|
||||||
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, fakeClock);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ AUDIO_AAC,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
mediaCodecAudioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {AUDIO_AAC},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ false,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
// Represents audio sink buffers being full when trying to write 150_000 us sample.
|
|
||||||
when(audioSink.handleBuffer(
|
|
||||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
|
|
||||||
.thenReturn(false);
|
|
||||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
mediaCodecAudioRenderer.start();
|
|
||||||
for (int i = 0; i < 10; i++) {
|
|
||||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, fakeClock.elapsedRealtime() * 1000);
|
|
||||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Simulate playback progressing between render() and getDurationToProgressUs call
|
|
||||||
long rendererPositionElapsedRealtimeUs = fakeClock.elapsedRealtime() * 1000;
|
|
||||||
fakeClock.advanceTime(/* timeDiffMs= */ 10);
|
|
||||||
long durationToProgressUs =
|
|
||||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(65_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
@ -1077,8 +906,58 @@ public class MediaCodecAudioRendererTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterRenderToEndOfStream_returnsCalculatedDuration()
|
public void
|
||||||
throws Exception {
|
getDurationToProgressUs_withAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
|
||||||
|
throws Exception {
|
||||||
|
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||||
|
FakeSampleStream fakeSampleStream =
|
||||||
|
new FakeSampleStream(
|
||||||
|
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||||
|
/* mediaSourceEventDispatcher= */ null,
|
||||||
|
DrmSessionManager.DRM_UNSUPPORTED,
|
||||||
|
new DrmSessionEventListener.EventDispatcher(),
|
||||||
|
/* initialFormat= */ AUDIO_AAC,
|
||||||
|
ImmutableList.of(
|
||||||
|
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
END_OF_STREAM_ITEM));
|
||||||
|
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||||
|
mediaCodecAudioRenderer.enable(
|
||||||
|
RendererConfiguration.DEFAULT,
|
||||||
|
new Format[] {AUDIO_AAC},
|
||||||
|
fakeSampleStream,
|
||||||
|
/* positionUs= */ 0,
|
||||||
|
/* joining= */ false,
|
||||||
|
/* mayRenderStartOfStream= */ false,
|
||||||
|
/* startPositionUs= */ 0,
|
||||||
|
/* offsetUs= */ 0,
|
||||||
|
new MediaSource.MediaPeriodId(new Object()));
|
||||||
|
// Represents audio sink buffers being full when trying to write 150_000 us sample.
|
||||||
|
when(audioSink.handleBuffer(
|
||||||
|
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
|
||||||
|
.thenReturn(false);
|
||||||
|
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||||
|
mediaCodecAudioRenderer.start();
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
||||||
|
}
|
||||||
|
|
||||||
|
long durationToProgressUs =
|
||||||
|
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||||
|
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
assertThat(durationToProgressUs).isEqualTo(10_000L);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterRenderToEndOfStream_returnsCalculatedDuration()
|
||||||
|
throws Exception {
|
||||||
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
||||||
mediaCodecAudioRenderer =
|
mediaCodecAudioRenderer =
|
||||||
new MediaCodecAudioRenderer(
|
new MediaCodecAudioRenderer(
|
||||||
@ -1147,6 +1026,78 @@ public class MediaCodecAudioRendererTest {
|
|||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
|
||||||
|
throws Exception {
|
||||||
|
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
||||||
|
mediaCodecAudioRenderer =
|
||||||
|
new MediaCodecAudioRenderer(
|
||||||
|
ApplicationProvider.getApplicationContext(),
|
||||||
|
new DefaultMediaCodecAdapterFactory(
|
||||||
|
ApplicationProvider.getApplicationContext(),
|
||||||
|
() -> {
|
||||||
|
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
|
||||||
|
return callbackThread;
|
||||||
|
},
|
||||||
|
() -> {
|
||||||
|
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
|
||||||
|
return queueingThread;
|
||||||
|
}),
|
||||||
|
mediaCodecSelector,
|
||||||
|
/* enableDecoderFallback= */ false,
|
||||||
|
new Handler(Looper.getMainLooper()),
|
||||||
|
audioRendererEventListener,
|
||||||
|
audioSink) {
|
||||||
|
@Override
|
||||||
|
protected void renderToEndOfStream() throws ExoPlaybackException {
|
||||||
|
super.renderToEndOfStream();
|
||||||
|
hasCalledRenderToEndOfStream.set(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||||
|
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||||
|
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||||
|
FakeSampleStream fakeSampleStream =
|
||||||
|
new FakeSampleStream(
|
||||||
|
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||||
|
/* mediaSourceEventDispatcher= */ null,
|
||||||
|
DrmSessionManager.DRM_UNSUPPORTED,
|
||||||
|
new DrmSessionEventListener.EventDispatcher(),
|
||||||
|
/* initialFormat= */ AUDIO_AAC,
|
||||||
|
ImmutableList.of(
|
||||||
|
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||||
|
END_OF_STREAM_ITEM));
|
||||||
|
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||||
|
mediaCodecAudioRenderer.enable(
|
||||||
|
RendererConfiguration.DEFAULT,
|
||||||
|
new Format[] {AUDIO_AAC},
|
||||||
|
fakeSampleStream,
|
||||||
|
/* positionUs= */ 0,
|
||||||
|
/* joining= */ false,
|
||||||
|
/* mayRenderStartOfStream= */ false,
|
||||||
|
/* startPositionUs= */ 0,
|
||||||
|
/* offsetUs= */ 0,
|
||||||
|
new MediaSource.MediaPeriodId(new Object()));
|
||||||
|
mediaCodecAudioRenderer.start();
|
||||||
|
mediaCodecAudioRenderer.setCurrentStreamFinal();
|
||||||
|
while (!hasCalledRenderToEndOfStream.get()) {
|
||||||
|
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
||||||
|
}
|
||||||
|
|
||||||
|
long durationToProgressUs =
|
||||||
|
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||||
|
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||||
|
|
||||||
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
||||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
||||||
|
@ -757,12 +757,6 @@ public final class Mp4Extractor implements Extractor, SeekMap {
|
|||||||
roleFlags |=
|
roleFlags |=
|
||||||
firstVideoTrackIndex == C.INDEX_UNSET ? C.ROLE_FLAG_MAIN : C.ROLE_FLAG_ALTERNATE;
|
firstVideoTrackIndex == C.INDEX_UNSET ? C.ROLE_FLAG_MAIN : C.ROLE_FLAG_ALTERNATE;
|
||||||
}
|
}
|
||||||
if (track.format.frameRate == Format.NO_VALUE
|
|
||||||
&& trackDurationUs > 0
|
|
||||||
&& trackSampleTable.sampleCount > 0) {
|
|
||||||
float frameRate = trackSampleTable.sampleCount / (trackDurationUs / 1000000f);
|
|
||||||
formatBuilder.setFrameRate(frameRate);
|
|
||||||
}
|
|
||||||
if (readingAuxiliaryTracks) {
|
if (readingAuxiliaryTracks) {
|
||||||
roleFlags |= C.ROLE_FLAG_AUXILIARY;
|
roleFlags |= C.ROLE_FLAG_AUXILIARY;
|
||||||
formatBuilder.setAuxiliaryTrackType(auxiliaryTrackTypesForAuxiliaryTracks.get(i));
|
formatBuilder.setAuxiliaryTrackType(auxiliaryTrackTypesForAuxiliaryTracks.get(i));
|
||||||
|
@ -181,7 +181,7 @@ import java.util.List;
|
|||||||
? 0
|
? 0
|
||||||
: durationMs == 0
|
: durationMs == 0
|
||||||
? 100
|
? 100
|
||||||
: Util.constrainValue((int) ((bufferedPositionMs * 100) / durationMs), 0, 100);
|
: Util.constrainValue(Util.percentInt(bufferedPositionMs, durationMs), 0, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -215,6 +215,11 @@ public class StubExoPlayer extends StubPlayer implements ExoPlayer {
|
|||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScrubbingModeEnabled(boolean scrubbingModeEnabled) {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setVideoEffects(List<Effect> videoEffects) {
|
public void setVideoEffects(List<Effect> videoEffects) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
package androidx.media3.transformer;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Util.percentInt;
|
||||||
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
|
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
|
||||||
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS;
|
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS;
|
||||||
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_MAX_BUFFER_MS;
|
import static androidx.media3.exoplayer.DefaultLoadControl.DEFAULT_MAX_BUFFER_MS;
|
||||||
@ -275,7 +276,7 @@ public final class ExoPlayerAssetLoader implements AssetLoader {
|
|||||||
// The player position can become greater than the duration. This happens if the player is
|
// The player position can become greater than the duration. This happens if the player is
|
||||||
// using a StandaloneMediaClock because the renderers have ended.
|
// using a StandaloneMediaClock because the renderers have ended.
|
||||||
long positionMs = min(player.getCurrentPosition(), durationMs);
|
long positionMs = min(player.getCurrentPosition(), durationMs);
|
||||||
progressHolder.progress = (int) (positionMs * 100 / durationMs);
|
progressHolder.progress = percentInt(positionMs, durationMs);
|
||||||
}
|
}
|
||||||
return progressState;
|
return progressState;
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ package androidx.media3.transformer;
|
|||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
|
import static androidx.media3.common.util.Util.percentInt;
|
||||||
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
|
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
|
||||||
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_END_OF_STREAM;
|
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_END_OF_STREAM;
|
||||||
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_TRY_AGAIN_LATER;
|
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_TRY_AGAIN_LATER;
|
||||||
@ -26,7 +27,6 @@ import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED
|
|||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
||||||
import static androidx.media3.transformer.TransformerUtil.getValidColor;
|
import static androidx.media3.transformer.TransformerUtil.getValidColor;
|
||||||
import static java.lang.Math.min;
|
import static java.lang.Math.min;
|
||||||
import static java.lang.Math.round;
|
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -141,7 +141,7 @@ public final class RawAssetLoader implements AssetLoader {
|
|||||||
if (lastTimestampUs == Long.MAX_VALUE) {
|
if (lastTimestampUs == Long.MAX_VALUE) {
|
||||||
lastTimestampUs = 0;
|
lastTimestampUs = 0;
|
||||||
}
|
}
|
||||||
progressHolder.progress = round((lastTimestampUs / (float) editedMediaItem.durationUs) * 100);
|
progressHolder.progress = percentInt(lastTimestampUs, editedMediaItem.durationUs);
|
||||||
}
|
}
|
||||||
return progressState;
|
return progressState;
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
|
|||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
|
import static androidx.media3.common.util.Util.percentInt;
|
||||||
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_ASSET_LOADER;
|
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_ASSET_LOADER;
|
||||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_INPUT_FORMAT;
|
import static androidx.media3.effect.DebugTraceUtil.EVENT_INPUT_FORMAT;
|
||||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_FORMAT;
|
import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_FORMAT;
|
||||||
@ -178,7 +179,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
return progressState;
|
return progressState;
|
||||||
}
|
}
|
||||||
|
|
||||||
int progress = currentMediaItemIndex * 100 / mediaItemCount;
|
int progress = percentInt(currentMediaItemIndex, mediaItemCount);
|
||||||
if (progressState == PROGRESS_STATE_AVAILABLE) {
|
if (progressState == PROGRESS_STATE_AVAILABLE) {
|
||||||
progress += progressHolder.progress / mediaItemCount;
|
progress += progressHolder.progress / mediaItemCount;
|
||||||
}
|
}
|
||||||
|
@ -17,13 +17,13 @@ package androidx.media3.transformer;
|
|||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Util.percentInt;
|
||||||
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
|
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
|
||||||
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_END_OF_STREAM;
|
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_END_OF_STREAM;
|
||||||
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_TRY_AGAIN_LATER;
|
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_TRY_AGAIN_LATER;
|
||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
||||||
import static androidx.media3.transformer.TransformerUtil.getValidColor;
|
import static androidx.media3.transformer.TransformerUtil.getValidColor;
|
||||||
import static java.lang.Math.round;
|
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -106,7 +106,7 @@ public final class TextureAssetLoader implements AssetLoader {
|
|||||||
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
|
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
|
||||||
if (progressState == PROGRESS_STATE_AVAILABLE) {
|
if (progressState == PROGRESS_STATE_AVAILABLE) {
|
||||||
progressHolder.progress =
|
progressHolder.progress =
|
||||||
round((lastQueuedPresentationTimeUs / (float) editedMediaItem.durationUs) * 100);
|
percentInt(lastQueuedPresentationTimeUs, editedMediaItem.durationUs);
|
||||||
}
|
}
|
||||||
return progressState;
|
return progressState;
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@ package androidx.media3.transformer;
|
|||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
import static java.lang.Math.min;
|
|
||||||
import static java.lang.Math.round;
|
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -118,8 +116,7 @@ public class RawAssetLoaderTest {
|
|||||||
|
|
||||||
assertThat(queuedAudioData).isTrue();
|
assertThat(queuedAudioData).isTrue();
|
||||||
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
||||||
assertThat(progressHolder.progress)
|
assertThat(progressHolder.progress).isEqualTo(10);
|
||||||
.isEqualTo(round(audioSamplePresentationTimeUs * 100 / (float) audioDurationUs));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -168,8 +165,7 @@ public class RawAssetLoaderTest {
|
|||||||
|
|
||||||
assertThat(queuedInputTexture).isTrue();
|
assertThat(queuedInputTexture).isTrue();
|
||||||
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
||||||
assertThat(progressHolder.progress)
|
assertThat(progressHolder.progress).isEqualTo(10);
|
||||||
.isEqualTo(round(videoSamplePresentationTimeUs * 100 / (float) videoDurationUs));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -199,12 +195,7 @@ public class RawAssetLoaderTest {
|
|||||||
assertThat(queuedAudioData).isTrue();
|
assertThat(queuedAudioData).isTrue();
|
||||||
assertThat(queuedInputTexture).isTrue();
|
assertThat(queuedInputTexture).isTrue();
|
||||||
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
|
||||||
assertThat(progressHolder.progress)
|
assertThat(progressHolder.progress).isEqualTo(10);
|
||||||
.isEqualTo(
|
|
||||||
round(
|
|
||||||
min(audioSamplePresentationTimeUs, videoSamplePresentationTimeUs)
|
|
||||||
* 100
|
|
||||||
/ (float) mediaDurationUs));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static EditedMediaItem getEditedMediaItem(long mediaDurationUs) {
|
private static EditedMediaItem getEditedMediaItem(long mediaDurationUs) {
|
||||||
|
@ -16,24 +16,22 @@
|
|||||||
|
|
||||||
package androidx.media3.ui.compose
|
package androidx.media3.ui.compose
|
||||||
|
|
||||||
import android.view.Surface
|
import android.view.SurfaceView
|
||||||
|
import android.view.TextureView
|
||||||
import androidx.annotation.IntDef
|
import androidx.annotation.IntDef
|
||||||
import androidx.compose.foundation.AndroidEmbeddedExternalSurface
|
|
||||||
import androidx.compose.foundation.AndroidExternalSurface
|
|
||||||
import androidx.compose.foundation.AndroidExternalSurfaceScope
|
|
||||||
import androidx.compose.runtime.Composable
|
import androidx.compose.runtime.Composable
|
||||||
import androidx.compose.runtime.getValue
|
import androidx.compose.runtime.getValue
|
||||||
import androidx.compose.runtime.rememberUpdatedState
|
import androidx.compose.runtime.rememberUpdatedState
|
||||||
import androidx.compose.ui.Modifier
|
import androidx.compose.ui.Modifier
|
||||||
|
import androidx.compose.ui.viewinterop.AndroidView
|
||||||
import androidx.media3.common.Player
|
import androidx.media3.common.Player
|
||||||
import androidx.media3.common.util.UnstableApi
|
import androidx.media3.common.util.UnstableApi
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides a dedicated drawing [Surface] for media playbacks using a [Player].
|
* Provides a dedicated drawing [Surface] for media playbacks using a [Player].
|
||||||
*
|
*
|
||||||
* The player's video output is displayed with either a
|
* The player's video output is displayed with either a [android.view.SurfaceView] or a
|
||||||
* [android.view.SurfaceView]/[AndroidExternalSurface] or a
|
* [android.view.TextureView].
|
||||||
* [android.view.TextureView]/[AndroidEmbeddedExternalSurface].
|
|
||||||
*
|
*
|
||||||
* [Player] takes care of attaching the rendered output to the [Surface] and clearing it, when it is
|
* [Player] takes care of attaching the rendered output to the [Surface] and clearing it, when it is
|
||||||
* destroyed.
|
* destroyed.
|
||||||
@ -52,32 +50,36 @@ fun PlayerSurface(
|
|||||||
// Player might change between compositions,
|
// Player might change between compositions,
|
||||||
// we need long-lived surface-related lambdas to always use the latest value
|
// we need long-lived surface-related lambdas to always use the latest value
|
||||||
val currentPlayer by rememberUpdatedState(player)
|
val currentPlayer by rememberUpdatedState(player)
|
||||||
val onSurfaceCreated: (Surface) -> Unit = { surface ->
|
|
||||||
if (currentPlayer.isCommandAvailable(Player.COMMAND_SET_VIDEO_SURFACE))
|
|
||||||
currentPlayer.setVideoSurface(surface)
|
|
||||||
}
|
|
||||||
val onSurfaceDestroyed: () -> Unit = {
|
|
||||||
if (currentPlayer.isCommandAvailable(Player.COMMAND_SET_VIDEO_SURFACE))
|
|
||||||
currentPlayer.clearVideoSurface()
|
|
||||||
}
|
|
||||||
val onSurfaceInitialized: AndroidExternalSurfaceScope.() -> Unit = {
|
|
||||||
onSurface { surface, _, _ ->
|
|
||||||
onSurfaceCreated(surface)
|
|
||||||
surface.onDestroyed { onSurfaceDestroyed() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
when (surfaceType) {
|
when (surfaceType) {
|
||||||
SURFACE_TYPE_SURFACE_VIEW ->
|
SURFACE_TYPE_SURFACE_VIEW ->
|
||||||
AndroidExternalSurface(modifier = modifier, onInit = onSurfaceInitialized)
|
AndroidView(
|
||||||
|
factory = {
|
||||||
|
SurfaceView(it).apply {
|
||||||
|
if (currentPlayer.isCommandAvailable(Player.COMMAND_SET_VIDEO_SURFACE))
|
||||||
|
currentPlayer.setVideoSurfaceView(this)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onReset = {},
|
||||||
|
modifier = modifier,
|
||||||
|
)
|
||||||
SURFACE_TYPE_TEXTURE_VIEW ->
|
SURFACE_TYPE_TEXTURE_VIEW ->
|
||||||
AndroidEmbeddedExternalSurface(modifier = modifier, onInit = onSurfaceInitialized)
|
AndroidView(
|
||||||
|
factory = {
|
||||||
|
TextureView(it).apply {
|
||||||
|
if (currentPlayer.isCommandAvailable(Player.COMMAND_SET_VIDEO_SURFACE))
|
||||||
|
currentPlayer.setVideoTextureView(this)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onReset = {},
|
||||||
|
modifier = modifier,
|
||||||
|
)
|
||||||
else -> throw IllegalArgumentException("Unrecognized surface type: $surfaceType")
|
else -> throw IllegalArgumentException("Unrecognized surface type: $surfaceType")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The type of surface view used for media playbacks. One of [SURFACE_TYPE_SURFACE_VIEW] or
|
* The type of surface used for media playbacks. One of [SURFACE_TYPE_SURFACE_VIEW] or
|
||||||
* [SURFACE_TYPE_TEXTURE_VIEW].
|
* [SURFACE_TYPE_TEXTURE_VIEW].
|
||||||
*/
|
*/
|
||||||
@UnstableApi
|
@UnstableApi
|
||||||
@ -86,7 +88,7 @@ fun PlayerSurface(
|
|||||||
@IntDef(SURFACE_TYPE_SURFACE_VIEW, SURFACE_TYPE_TEXTURE_VIEW)
|
@IntDef(SURFACE_TYPE_SURFACE_VIEW, SURFACE_TYPE_TEXTURE_VIEW)
|
||||||
annotation class SurfaceType
|
annotation class SurfaceType
|
||||||
|
|
||||||
/** Surface type equivalent to [android.view.SurfaceView]. */
|
/** Surface type to create [android.view.SurfaceView]. */
|
||||||
@UnstableApi const val SURFACE_TYPE_SURFACE_VIEW = 1
|
@UnstableApi const val SURFACE_TYPE_SURFACE_VIEW = 1
|
||||||
/** Surface type equivalent to [android.view.TextureView]. */
|
/** Surface type to create [android.view.TextureView]. */
|
||||||
@UnstableApi const val SURFACE_TYPE_TEXTURE_VIEW = 2
|
@UnstableApi const val SURFACE_TYPE_TEXTURE_VIEW = 2
|
||||||
|
@ -0,0 +1,90 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.ui.compose
|
||||||
|
|
||||||
|
import android.view.SurfaceView
|
||||||
|
import android.view.TextureView
|
||||||
|
import androidx.compose.runtime.MutableIntState
|
||||||
|
import androidx.compose.runtime.mutableIntStateOf
|
||||||
|
import androidx.compose.runtime.remember
|
||||||
|
import androidx.compose.ui.test.junit4.createComposeRule
|
||||||
|
import androidx.media3.common.Player
|
||||||
|
import androidx.media3.ui.compose.utils.TestPlayer
|
||||||
|
import androidx.test.ext.junit.runners.AndroidJUnit4
|
||||||
|
import com.google.common.truth.Truth.assertThat
|
||||||
|
import org.junit.Rule
|
||||||
|
import org.junit.Test
|
||||||
|
import org.junit.runner.RunWith
|
||||||
|
|
||||||
|
/** Unit test for [PlayerSurface]. */
|
||||||
|
@RunWith(AndroidJUnit4::class)
|
||||||
|
class PlayerSurfaceTest {
|
||||||
|
|
||||||
|
@get:Rule val composeTestRule = createComposeRule()
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun playerSurface_withSurfaceViewType_setsSurfaceViewOnPlayer() {
|
||||||
|
val player = TestPlayer()
|
||||||
|
|
||||||
|
composeTestRule.setContent {
|
||||||
|
PlayerSurface(player = player, surfaceType = SURFACE_TYPE_SURFACE_VIEW)
|
||||||
|
}
|
||||||
|
composeTestRule.waitForIdle()
|
||||||
|
|
||||||
|
assertThat(player.videoOutput).isInstanceOf(SurfaceView::class.java)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun playerSurface_withTextureViewType_setsTextureViewOnPlayer() {
|
||||||
|
val player = TestPlayer()
|
||||||
|
|
||||||
|
composeTestRule.setContent {
|
||||||
|
PlayerSurface(player = player, surfaceType = SURFACE_TYPE_TEXTURE_VIEW)
|
||||||
|
}
|
||||||
|
composeTestRule.waitForIdle()
|
||||||
|
|
||||||
|
assertThat(player.videoOutput).isInstanceOf(TextureView::class.java)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun playerSurface_withoutSupportedCommand_doesNotSetSurfaceOnPlayer() {
|
||||||
|
val player = TestPlayer()
|
||||||
|
player.removeCommands(Player.COMMAND_SET_VIDEO_SURFACE)
|
||||||
|
|
||||||
|
composeTestRule.setContent {
|
||||||
|
PlayerSurface(player = player, surfaceType = SURFACE_TYPE_TEXTURE_VIEW)
|
||||||
|
}
|
||||||
|
composeTestRule.waitForIdle()
|
||||||
|
|
||||||
|
assertThat(player.videoOutput).isNull()
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun playerSurface_withUpdateSurfaceType_setsNewSurfaceOnPlayer() {
|
||||||
|
val player = TestPlayer()
|
||||||
|
|
||||||
|
lateinit var surfaceType: MutableIntState
|
||||||
|
composeTestRule.setContent {
|
||||||
|
surfaceType = remember { mutableIntStateOf(SURFACE_TYPE_TEXTURE_VIEW) }
|
||||||
|
PlayerSurface(player = player, surfaceType = surfaceType.intValue)
|
||||||
|
}
|
||||||
|
composeTestRule.waitForIdle()
|
||||||
|
surfaceType.intValue = SURFACE_TYPE_SURFACE_VIEW
|
||||||
|
composeTestRule.waitForIdle()
|
||||||
|
|
||||||
|
assertThat(player.videoOutput).isInstanceOf(SurfaceView::class.java)
|
||||||
|
}
|
||||||
|
}
|
@ -41,6 +41,9 @@ internal class TestPlayer : SimpleBasePlayer(Looper.myLooper()!!) {
|
|||||||
.setPlayWhenReady(true, PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST)
|
.setPlayWhenReady(true, PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST)
|
||||||
.build()
|
.build()
|
||||||
|
|
||||||
|
var videoOutput: Any? = null
|
||||||
|
private set
|
||||||
|
|
||||||
override fun getState(): State {
|
override fun getState(): State {
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
@ -99,6 +102,18 @@ internal class TestPlayer : SimpleBasePlayer(Looper.myLooper()!!) {
|
|||||||
return Futures.immediateVoidFuture()
|
return Futures.immediateVoidFuture()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun handleSetVideoOutput(videoOutput: Any): ListenableFuture<*> {
|
||||||
|
this.videoOutput = videoOutput
|
||||||
|
return Futures.immediateVoidFuture()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun handleClearVideoOutput(videoOutput: Any?): ListenableFuture<*> {
|
||||||
|
if (videoOutput == null || videoOutput == this.videoOutput) {
|
||||||
|
this.videoOutput = null
|
||||||
|
}
|
||||||
|
return Futures.immediateVoidFuture()
|
||||||
|
}
|
||||||
|
|
||||||
fun setPlaybackState(playbackState: @Player.State Int) {
|
fun setPlaybackState(playbackState: @Player.State Int) {
|
||||||
state = state.buildUpon().setPlaybackState(playbackState).build()
|
state = state.buildUpon().setPlaybackState(playbackState).build()
|
||||||
invalidateState()
|
invalidateState()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user