mirror of
https://github.com/androidx/media.git
synced 2025-04-29 14:26:50 +08:00
Compare commits
37 Commits
51efcad672
...
839c4a90f2
Author | SHA1 | Date | |
---|---|---|---|
![]() |
839c4a90f2 | ||
![]() |
184416adfc | ||
![]() |
aea0637c95 | ||
![]() |
f533f55cd6 | ||
![]() |
177092d935 | ||
![]() |
153974e06c | ||
![]() |
d7234a6a4e | ||
![]() |
24a8185f4d | ||
![]() |
d652ba6ae2 | ||
![]() |
75e2522862 | ||
![]() |
157fd8a260 | ||
![]() |
ce0c98c4d4 | ||
![]() |
b16f9fc4ec | ||
![]() |
5b4ca08ca0 | ||
![]() |
c0dd97e770 | ||
![]() |
f35c59b176 | ||
![]() |
82d7c628da | ||
![]() |
9cfaf78994 | ||
![]() |
cb80fe4c7c | ||
![]() |
567ee030b3 | ||
![]() |
2bdf632369 | ||
![]() |
7c274caa1f | ||
![]() |
9d09840bad | ||
![]() |
9483cbfed4 | ||
![]() |
358e002076 | ||
![]() |
1ee030afb1 | ||
![]() |
ef8f72d684 | ||
![]() |
2939bfccbe | ||
![]() |
1d2019b770 | ||
![]() |
3c4410e967 | ||
![]() |
d79d6b2b8b | ||
![]() |
d920cf87a6 | ||
![]() |
963bae9dd8 | ||
![]() |
dcb67102b9 | ||
![]() |
75e28d82e3 | ||
![]() |
673da97fde | ||
![]() |
57c82710ea |
4
.github/ISSUE_TEMPLATE/bug.yml
vendored
4
.github/ISSUE_TEMPLATE/bug.yml
vendored
@ -19,6 +19,7 @@ body:
|
||||
options:
|
||||
- Media3 main branch
|
||||
- Media3 pre-release (alpha, beta or RC not in this list)
|
||||
- Media3 1.6.1
|
||||
- Media3 1.6.0
|
||||
- Media3 1.5.1
|
||||
- Media3 1.5.0
|
||||
@ -44,9 +45,6 @@ body:
|
||||
- ExoPlayer 2.16.0
|
||||
- ExoPlayer 2.15.1
|
||||
- ExoPlayer 2.15.0
|
||||
- ExoPlayer 2.14.2
|
||||
- ExoPlayer 2.14.1
|
||||
- ExoPlayer 2.14.0
|
||||
- ExoPlayer dev-v2 branch
|
||||
- Older (unsupported)
|
||||
validations:
|
||||
|
@ -2,6 +2,69 @@
|
||||
|
||||
## 1.6
|
||||
|
||||
### 1.6.1 (2025-04-14)
|
||||
|
||||
This release includes the following changes since the
|
||||
[1.6.0 release](#160-2025-03-26):
|
||||
|
||||
* Common Library:
|
||||
* Add `PlaybackParameters.withPitch(float)` method for easily copying a
|
||||
`PlaybackParameters` with a new `pitch` value
|
||||
([#2257](https://github.com/androidx/media/issues/2257)).
|
||||
* ExoPlayer:
|
||||
* Fix issue where media item transition fails due to recoverable renderer
|
||||
error during initialization of the next media item
|
||||
([#2229](https://github.com/androidx/media/issues/2229)).
|
||||
* Fix issue where `ProgressiveMediaPeriod` throws an
|
||||
`IllegalStateException` as `PreloadMediaSource` attempts to call its
|
||||
`getBufferedDurationUs()` before it is prepared
|
||||
([#2315](https://github.com/androidx/media/issues/2315)).
|
||||
* Fix sending `CmcdData` in manifest requests for DASH, HLS, and
|
||||
SmoothStreaming ([#2253](https://github.com/androidx/media/pull/2253)).
|
||||
* Ensure `AdPlaybackState.withAdDurationsUs(long[][])` can be used after
|
||||
ad groups have been removed. The user still needs to pass in an array of
|
||||
durations for removed ad groups which can be empty or null
|
||||
([#2267](https://github.com/androidx/media/issues/2267)).
|
||||
* Extractors:
|
||||
* MP4: Parse `alternate_group` from the `tkhd` box and expose it as an
|
||||
`Mp4AlternateGroupData` entry in each track's `Format.metadata`
|
||||
([#2242](https://github.com/androidx/media/issues/2242)).
|
||||
* Audio:
|
||||
* Fix offload issue where the position might get stuck when playing a
|
||||
playlist of short content
|
||||
([#1920](https://github.com/androidx/media/issues/1920)).
|
||||
* Session:
|
||||
* Lower aggregation timeout for platform `MediaSession` callbacks from 500
|
||||
to 100 milliseconds and add an experimental setter to allow apps to
|
||||
configure this value.
|
||||
* Fix issue where notifications reappear after they have been dismissed by
|
||||
the user ([#2302](https://github.com/androidx/media/issues/2302)).
|
||||
* Fix a bug where the session returned a single-item timeline when the
|
||||
wrapped player is actually empty. This happened when the wrapped player
|
||||
doesn't have `COMMAND_GET_TIMELINE` available while
|
||||
`COMMAND_GET_CURRENT_MEDIA_ITEM` is available and the wrapped player is
|
||||
empty ([#2320](https://github.com/androidx/media/issues/2320)).
|
||||
* Fix a bug where calling
|
||||
`MediaSessionService.setMediaNotificationProvider` is silently ignored
|
||||
after other interactions with the service like
|
||||
`setForegroundServiceTimeoutMs`
|
||||
([#2305](https://github.com/androidx/media/issues/2305)).
|
||||
* UI:
|
||||
* Enable `PlayerSurface` to work with `ExoPlayer.setVideoEffects` and
|
||||
`CompositionPlayer`.
|
||||
* Fix bug where `PlayerSurface` can't be recomposed with a new `Player`.
|
||||
* HLS extension:
|
||||
* Fix issue where chunk duration wasn't set in `CmcdData` for HLS media,
|
||||
causing an assertion failure when processing encrypted media segments
|
||||
([#2312](https://github.com/androidx/media/issues/2312)).
|
||||
* RTSP extension:
|
||||
* Add support for URI with RTSPT scheme as a way to configure the RTSP
|
||||
session to use TCP
|
||||
([#1484](https://github.com/androidx/media/issues/1484)).
|
||||
* Cast extension:
|
||||
* Add support for playlist metadata
|
||||
([#2235](https://github.com/androidx/media/pull/2235)).
|
||||
|
||||
### 1.6.0 (2025-03-26)
|
||||
|
||||
This release includes the following changes since the
|
||||
|
@ -12,8 +12,8 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
project.ext {
|
||||
releaseVersion = '1.6.0'
|
||||
releaseVersionCode = 1_006_000_3_00
|
||||
releaseVersion = '1.6.1'
|
||||
releaseVersionCode = 1_006_001_3_00
|
||||
minSdkVersion = 21
|
||||
// See https://developer.android.com/training/cars/media/automotive-os#automotive-module
|
||||
automotiveMinSdkVersion = 28
|
||||
|
@ -16,6 +16,7 @@
|
||||
package androidx.media3.cast;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.common.util.Util.castNonNull;
|
||||
import static java.lang.Math.min;
|
||||
@ -166,6 +167,7 @@ public final class CastPlayer extends BasePlayer {
|
||||
private long pendingSeekPositionMs;
|
||||
@Nullable private PositionInfo pendingMediaItemRemovalPosition;
|
||||
private MediaMetadata mediaMetadata;
|
||||
private MediaMetadata playlistMetadata;
|
||||
private DeviceInfo deviceInfo;
|
||||
|
||||
/**
|
||||
@ -268,6 +270,7 @@ public final class CastPlayer extends BasePlayer {
|
||||
playbackState = STATE_IDLE;
|
||||
currentTimeline = CastTimeline.EMPTY_CAST_TIMELINE;
|
||||
mediaMetadata = MediaMetadata.EMPTY;
|
||||
playlistMetadata = MediaMetadata.EMPTY;
|
||||
currentTracks = Tracks.EMPTY;
|
||||
availableCommands = new Commands.Builder().addAll(PERMANENT_AVAILABLE_COMMANDS).build();
|
||||
pendingSeekWindowIndex = C.INDEX_UNSET;
|
||||
@ -656,14 +659,19 @@ public final class CastPlayer extends BasePlayer {
|
||||
|
||||
@Override
|
||||
public MediaMetadata getPlaylistMetadata() {
|
||||
// CastPlayer does not currently support metadata.
|
||||
return MediaMetadata.EMPTY;
|
||||
return playlistMetadata;
|
||||
}
|
||||
|
||||
/** This method is not supported and does nothing. */
|
||||
@Override
|
||||
public void setPlaylistMetadata(MediaMetadata mediaMetadata) {
|
||||
// CastPlayer does not currently support metadata.
|
||||
public void setPlaylistMetadata(MediaMetadata playlistMetadata) {
|
||||
checkNotNull(playlistMetadata);
|
||||
if (playlistMetadata.equals(this.playlistMetadata)) {
|
||||
return;
|
||||
}
|
||||
this.playlistMetadata = playlistMetadata;
|
||||
listeners.sendEvent(
|
||||
EVENT_PLAYLIST_METADATA_CHANGED,
|
||||
listener -> listener.onPlaylistMetadataChanged(this.playlistMetadata));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1800,7 +1800,7 @@ public class CastPlayerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setMediaItems_doesNotifyOnMetadataChanged() {
|
||||
public void setMediaItems_doesNotifyOnMediaMetadataChanged() {
|
||||
when(mockRemoteMediaClient.queueJumpToItem(anyInt(), anyLong(), eq(null)))
|
||||
.thenReturn(mockPendingResult);
|
||||
ArgumentCaptor<MediaMetadata> metadataCaptor = ArgumentCaptor.forClass(MediaMetadata.class);
|
||||
@ -1827,7 +1827,7 @@ public class CastPlayerTest {
|
||||
.build());
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata intitalMetadata = castPlayer.getMediaMetadata();
|
||||
MediaMetadata initialMetadata = castPlayer.getMediaMetadata();
|
||||
castPlayer.setMediaItems(firstPlaylist, /* startIndex= */ 0, /* startPositionMs= */ 2000L);
|
||||
updateTimeLine(firstPlaylist, /* mediaQueueItemIds= */ new int[] {1}, /* currentItemId= */ 1);
|
||||
MediaMetadata firstMetadata = castPlayer.getMediaMetadata();
|
||||
@ -1850,7 +1850,7 @@ public class CastPlayerTest {
|
||||
secondPlaylist.get(1).mediaMetadata,
|
||||
secondPlaylist.get(0).mediaMetadata)
|
||||
.inOrder();
|
||||
assertThat(intitalMetadata).isEqualTo(MediaMetadata.EMPTY);
|
||||
assertThat(initialMetadata).isEqualTo(MediaMetadata.EMPTY);
|
||||
assertThat(ImmutableList.of(firstMetadata, secondMetadata, thirdMetadata))
|
||||
.containsExactly(
|
||||
firstPlaylist.get(0).mediaMetadata,
|
||||
@ -1898,6 +1898,35 @@ public class CastPlayerTest {
|
||||
verify(mockListener, never()).onMediaMetadataChanged(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setPlaylistMetadata_doesNotifyOnPlaylistMetadataChanged() {
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(MediaMetadata.EMPTY);
|
||||
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
|
||||
|
||||
verify(mockListener).onPlaylistMetadataChanged(metadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setPlaylistMetadata_equalMetadata_doesNotNotifyOnPlaylistMetadataChanged() {
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
|
||||
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
|
||||
|
||||
verify(mockListener, times(1)).onPlaylistMetadataChanged(metadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getDeviceInfo_returnsCorrectDeviceInfoWithPlaybackTypeRemote() {
|
||||
DeviceInfo deviceInfo = castPlayer.getDeviceInfo();
|
||||
|
@ -1072,14 +1072,23 @@ public final class AdPlaybackState {
|
||||
/**
|
||||
* Returns an instance with the specified ad durations, in microseconds.
|
||||
*
|
||||
* <p>Must only be used if {@link #removedAdGroupCount} is 0.
|
||||
* <p>The number of arrays of durations ({@code adDurations.length}) must always be equal to
|
||||
* {@link #adGroupCount}. This is required even on an instance created with {@link
|
||||
* #withRemovedAdGroupCount(int)}. The array of durations at the index of a removed ad group can
|
||||
* be null or empty.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code adDurations.length != adGroupCount}.
|
||||
*/
|
||||
@CheckResult
|
||||
public AdPlaybackState withAdDurationsUs(long[][] adDurationUs) {
|
||||
checkState(removedAdGroupCount == 0);
|
||||
checkArgument(adDurationUs.length == adGroupCount);
|
||||
AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length);
|
||||
for (int adGroupIndex = 0; adGroupIndex < adGroupCount; adGroupIndex++) {
|
||||
adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdDurationsUs(adDurationUs[adGroupIndex]);
|
||||
for (int correctedAdGroupIndex = 0;
|
||||
correctedAdGroupIndex < adGroupCount - removedAdGroupCount;
|
||||
correctedAdGroupIndex++) {
|
||||
adGroups[correctedAdGroupIndex] =
|
||||
adGroups[correctedAdGroupIndex].withAdDurationsUs(
|
||||
adDurationUs[removedAdGroupCount + correctedAdGroupIndex]);
|
||||
}
|
||||
return new AdPlaybackState(
|
||||
adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount);
|
||||
|
@ -1039,7 +1039,10 @@ public final class Format {
|
||||
/** The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. */
|
||||
public final int sampleRate;
|
||||
|
||||
/** The {@link C.PcmEncoding} for PCM audio. Set to {@link #NO_VALUE} for other media types. */
|
||||
/**
|
||||
* The {@link C.PcmEncoding} for PCM or losslessly compressed audio. Set to {@link #NO_VALUE} for
|
||||
* other media types.
|
||||
*/
|
||||
@UnstableApi public final @C.PcmEncoding int pcmEncoding;
|
||||
|
||||
/**
|
||||
|
@ -29,11 +29,11 @@ public final class MediaLibraryInfo {
|
||||
|
||||
/** The version of the library expressed as a string, for example "1.2.3" or "1.2.0-beta01". */
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
|
||||
public static final String VERSION = "1.6.0";
|
||||
public static final String VERSION = "1.6.1";
|
||||
|
||||
/** The version of the library expressed as {@code TAG + "/" + VERSION}. */
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
|
||||
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.0";
|
||||
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.1";
|
||||
|
||||
/**
|
||||
* The version of the library expressed as an integer, for example 1002003300.
|
||||
@ -47,7 +47,7 @@ public final class MediaLibraryInfo {
|
||||
* (123-045-006-3-00).
|
||||
*/
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
|
||||
public static final int VERSION_INT = 1_006_000_3_00;
|
||||
public static final int VERSION_INT = 1_006_001_3_00;
|
||||
|
||||
/** Whether the library was compiled with {@link Assertions} checks enabled. */
|
||||
public static final boolean ASSERTIONS_ENABLED = true;
|
||||
|
@ -88,6 +88,18 @@ public final class PlaybackParameters {
|
||||
return new PlaybackParameters(speed, pitch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy with the given pitch.
|
||||
*
|
||||
* @param pitch The new pitch. Must be greater than zero.
|
||||
* @return The copied playback parameters.
|
||||
*/
|
||||
@UnstableApi
|
||||
@CheckResult
|
||||
public PlaybackParameters withPitch(@FloatRange(from = 0, fromInclusive = false) float pitch) {
|
||||
return new PlaybackParameters(speed, pitch);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object obj) {
|
||||
if (this == obj) {
|
||||
|
@ -1613,7 +1613,7 @@ public interface Player {
|
||||
/** {@link #getDeviceInfo()} changed. */
|
||||
int EVENT_DEVICE_INFO_CHANGED = 29;
|
||||
|
||||
/** {@link #getDeviceVolume()} changed. */
|
||||
/** {@link #getDeviceVolume()} or {@link #isDeviceMuted()} changed. */
|
||||
int EVENT_DEVICE_VOLUME_CHANGED = 30;
|
||||
|
||||
/**
|
||||
|
@ -220,10 +220,7 @@ public final class AudioManagerCompat {
|
||||
try {
|
||||
return audioManager.getStreamVolume(streamType);
|
||||
} catch (RuntimeException e) {
|
||||
Log.w(
|
||||
"AudioManagerCompat",
|
||||
"Could not retrieve stream volume for stream type " + streamType,
|
||||
e);
|
||||
Log.w(TAG, "Could not retrieve stream volume for stream type " + streamType, e);
|
||||
return audioManager.getStreamMaxVolume(streamType);
|
||||
}
|
||||
}
|
||||
|
@ -128,6 +128,10 @@ public final class MediaFormatUtil {
|
||||
|
||||
formatBuilder.setInitializationData(csdBuffers.build());
|
||||
|
||||
if (mediaFormat.containsKey(MediaFormat.KEY_TRACK_ID)) {
|
||||
formatBuilder.setId(mediaFormat.getInteger(MediaFormat.KEY_TRACK_ID));
|
||||
}
|
||||
|
||||
return formatBuilder.build();
|
||||
}
|
||||
|
||||
@ -175,6 +179,10 @@ public final class MediaFormatUtil {
|
||||
result.setInteger(MediaFormat.KEY_ENCODER_PADDING, format.encoderPadding);
|
||||
|
||||
maybeSetPixelAspectRatio(result, format.pixelWidthHeightRatio);
|
||||
|
||||
if (format.id != null) {
|
||||
result.setInteger(MediaFormat.KEY_TRACK_ID, Integer.parseInt(format.id));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -2570,7 +2570,8 @@ public final class Util {
|
||||
*/
|
||||
public static @ContentType int inferContentType(Uri uri) {
|
||||
@Nullable String scheme = uri.getScheme();
|
||||
if (scheme != null && Ascii.equalsIgnoreCase("rtsp", scheme)) {
|
||||
if (scheme != null
|
||||
&& (Ascii.equalsIgnoreCase("rtsp", scheme) || Ascii.equalsIgnoreCase("rtspt", scheme))) {
|
||||
return C.CONTENT_TYPE_RTSP;
|
||||
}
|
||||
|
||||
|
@ -1036,4 +1036,110 @@ public class AdPlaybackStateTest {
|
||||
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle()).ids[1]).isNull();
|
||||
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle())).isEqualTo(adGroup);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setDurationsUs_withRemovedAdGroups_updatedCorrectlyAndSafely() {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState("adsId")
|
||||
.withLivePostrollPlaceholderAppended(false)
|
||||
.withNewAdGroup(/* adGroupIndex= */ 0, 10_000)
|
||||
.withAdCount(/* adGroupIndex= */ 0, 1)
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 0,
|
||||
/* adIndexInAdGroup= */ 0,
|
||||
MediaItem.fromUri("http://example.com/0-0"))
|
||||
.withNewAdGroup(/* adGroupIndex= */ 1, 11_000)
|
||||
.withAdCount(/* adGroupIndex= */ 1, 2)
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 1,
|
||||
/* adIndexInAdGroup= */ 0,
|
||||
MediaItem.fromUri("http://example.com/1-0"))
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 1,
|
||||
/* adIndexInAdGroup= */ 1,
|
||||
MediaItem.fromUri("http://example.com/1-1"))
|
||||
.withNewAdGroup(/* adGroupIndex= */ 2, 12_000)
|
||||
.withAdCount(/* adGroupIndex= */ 2, 1)
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 2,
|
||||
/* adIndexInAdGroup= */ 0,
|
||||
MediaItem.fromUri("http://example.com/2-0"));
|
||||
long[][] adDurationsUs = {
|
||||
new long[] {10L}, new long[] {20L, 21L}, new long[] {30L}, new long[] {C.TIME_END_OF_SOURCE}
|
||||
};
|
||||
|
||||
adPlaybackState =
|
||||
adPlaybackState
|
||||
.withAdDurationsUs(adDurationsUs)
|
||||
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1);
|
||||
|
||||
assertThat(adPlaybackState.adGroupCount).isEqualTo(4);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).durationsUs).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).count).isEqualTo(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).states).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).isPlaceholder).isFalse();
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).mediaItems).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).ids).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(20L, 21L)
|
||||
.inOrder();
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(30L);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||
|
||||
adDurationsUs[1][0] = 120L;
|
||||
adDurationsUs[1][1] = 121L;
|
||||
adPlaybackState = adPlaybackState.withAdDurationsUs(adDurationsUs);
|
||||
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(120L, 121L)
|
||||
.inOrder();
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(30L);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||
|
||||
adDurationsUs[0] = null;
|
||||
adDurationsUs[1] = null;
|
||||
adDurationsUs[2][0] = C.TIME_UNSET;
|
||||
adPlaybackState =
|
||||
adPlaybackState
|
||||
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 2)
|
||||
.withAdDurationsUs(adDurationsUs);
|
||||
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(C.TIME_UNSET);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(C.TIME_END_OF_SOURCE);
|
||||
|
||||
adDurationsUs[2] = null;
|
||||
adDurationsUs[3][0] = 0L;
|
||||
adPlaybackState =
|
||||
adPlaybackState
|
||||
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 3)
|
||||
.withAdDurationsUs(adDurationsUs);
|
||||
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs).hasLength(0);
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
|
||||
.asList()
|
||||
.containsExactly(0L);
|
||||
|
||||
adDurationsUs[3] = null;
|
||||
adPlaybackState =
|
||||
adPlaybackState
|
||||
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 4)
|
||||
.withAdDurationsUs(adDurationsUs);
|
||||
|
||||
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs).hasLength(0);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.container;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.Metadata;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
|
||||
/** Stores MP4 {@code alternate_group} info parsed from a {@code tkhd} box. */
|
||||
@UnstableApi
|
||||
public final class Mp4AlternateGroupData implements Metadata.Entry {
|
||||
|
||||
public final int alternateGroup;
|
||||
|
||||
public Mp4AlternateGroupData(int alternateGroup) {
|
||||
this.alternateGroup = alternateGroup;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof Mp4AlternateGroupData)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Mp4AlternateGroupData other = (Mp4AlternateGroupData) obj;
|
||||
return alternateGroup == other.alternateGroup;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return alternateGroup;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Mp4AlternateGroup: " + alternateGroup;
|
||||
}
|
||||
}
|
@ -245,6 +245,9 @@ public abstract class Mp4Box {
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_esds = 0x65736473;
|
||||
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_btrt = 0x62747274;
|
||||
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_moof = 0x6d6f6f66;
|
||||
|
||||
|
@ -701,11 +701,13 @@ public class MediaExtractorCompatTest {
|
||||
public void getTrackFormat_withMultipleTracks_returnsCorrectTrackId() throws IOException {
|
||||
fakeExtractor.addReadAction(
|
||||
(input, seekPosition) -> {
|
||||
TrackOutput output1 = extractorOutput.track(/* id= */ 1, C.TRACK_TYPE_VIDEO);
|
||||
TrackOutput output2 = extractorOutput.track(/* id= */ 2, C.TRACK_TYPE_AUDIO);
|
||||
TrackOutput output1 = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_VIDEO);
|
||||
TrackOutput output2 = extractorOutput.track(/* id= */ 1, C.TRACK_TYPE_AUDIO);
|
||||
extractorOutput.endTracks();
|
||||
output1.format(PLACEHOLDER_FORMAT_VIDEO);
|
||||
output2.format(PLACEHOLDER_FORMAT_AUDIO);
|
||||
output1.format(
|
||||
new Format.Builder().setId(1).setSampleMimeType(MimeTypes.VIDEO_H264).build());
|
||||
output2.format(
|
||||
new Format.Builder().setId(2).setSampleMimeType(MimeTypes.AUDIO_AAC).build());
|
||||
return Extractor.RESULT_CONTINUE;
|
||||
});
|
||||
|
||||
|
@ -485,6 +485,9 @@ public interface ExoPlayer extends Player {
|
||||
* <p>If enabled, ExoPlayer's playback loop will run as rarely as possible by scheduling work
|
||||
* for when {@link Renderer} progress can be made.
|
||||
*
|
||||
* <p>If a custom {@link AudioSink} is used then it must correctly implement {@link
|
||||
* AudioSink#getAudioTrackBufferSizeUs()} to enable dynamic scheduling for audio playback.
|
||||
*
|
||||
* <p>This method is experimental, and will be renamed or removed in a future release.
|
||||
*
|
||||
* @param dynamicSchedulingEnabled Whether to enable dynamic scheduling.
|
||||
|
@ -750,25 +750,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
: readingPeriod.info.id);
|
||||
}
|
||||
}
|
||||
if (e.isRecoverable
|
||||
&& (pendingRecoverableRendererError == null
|
||||
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|
||||
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
|
||||
// If pendingRecoverableRendererError != null and error was
|
||||
// ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED then upon retry, renderer will attempt with
|
||||
// offload disabled.
|
||||
Log.w(TAG, "Recoverable renderer error", e);
|
||||
if (pendingRecoverableRendererError != null) {
|
||||
pendingRecoverableRendererError.addSuppressed(e);
|
||||
e = pendingRecoverableRendererError;
|
||||
} else {
|
||||
pendingRecoverableRendererError = e;
|
||||
}
|
||||
// Given that the player is now in an unhandled exception state, the error needs to be
|
||||
// recovered or the player stopped before any other message is handled.
|
||||
handler.sendMessageAtFrontOfQueue(
|
||||
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
|
||||
} else if (e.type == ExoPlaybackException.TYPE_RENDERER
|
||||
if (e.type == ExoPlaybackException.TYPE_RENDERER
|
||||
&& renderers[e.rendererIndex % renderers.length].isRendererPrewarming(
|
||||
/* id= */ e.rendererIndex)) {
|
||||
// TODO(b/380273486): Investigate recovery for pre-warming renderer errors
|
||||
@ -792,11 +774,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
pendingRecoverableRendererError.addSuppressed(e);
|
||||
e = pendingRecoverableRendererError;
|
||||
}
|
||||
Log.e(TAG, "Playback error", e);
|
||||
|
||||
if (e.type == ExoPlaybackException.TYPE_RENDERER
|
||||
&& queue.getPlayingPeriod() != queue.getReadingPeriod()) {
|
||||
// We encountered a renderer error while reading ahead. Force-update the playback position
|
||||
// to the failing item to ensure the user-visible error is reported after the transition.
|
||||
// to the failing item to ensure correct retry or that the user-visible error is reported
|
||||
// after the transition.
|
||||
while (queue.getPlayingPeriod() != queue.getReadingPeriod()) {
|
||||
queue.advancePlayingPeriod();
|
||||
}
|
||||
@ -812,8 +795,24 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
/* reportDiscontinuity= */ true,
|
||||
Player.DISCONTINUITY_REASON_AUTO_TRANSITION);
|
||||
}
|
||||
stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false);
|
||||
playbackInfo = playbackInfo.copyWithPlaybackError(e);
|
||||
|
||||
if (e.isRecoverable
|
||||
&& (pendingRecoverableRendererError == null
|
||||
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|
||||
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
|
||||
// Given that the player is now in an unhandled exception state, the error needs to be
|
||||
// recovered or the player stopped before any other message is handled.
|
||||
Log.w(TAG, "Recoverable renderer error", e);
|
||||
if (pendingRecoverableRendererError == null) {
|
||||
pendingRecoverableRendererError = e;
|
||||
}
|
||||
handler.sendMessageAtFrontOfQueue(
|
||||
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
|
||||
} else {
|
||||
Log.e(TAG, "Playback error", e);
|
||||
stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false);
|
||||
playbackInfo = playbackInfo.copyWithPlaybackError(e);
|
||||
}
|
||||
}
|
||||
} catch (DrmSession.DrmSessionException e) {
|
||||
handleIoException(e, e.errorCode);
|
||||
@ -2783,6 +2782,10 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
private void maybeUpdateOffloadScheduling() {
|
||||
// If playing period is audio-only with offload mode preference to enable, then offload
|
||||
// scheduling should be enabled.
|
||||
if (queue.getPlayingPeriod() != queue.getReadingPeriod()) {
|
||||
// Do not enable offload scheduling when starting to process the next media item.
|
||||
return;
|
||||
}
|
||||
@Nullable MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
|
||||
if (playingPeriodHolder != null) {
|
||||
TrackSelectorResult trackSelectorResult = playingPeriodHolder.getTrackSelectorResult();
|
||||
|
@ -1002,7 +1002,6 @@ public final class MediaExtractorCompat {
|
||||
FormatHolder scratchFormatHolder, DecoderInputBuffer scratchNoDataDecoderInputBuffer) {
|
||||
Format format = getFormat(scratchFormatHolder, scratchNoDataDecoderInputBuffer);
|
||||
MediaFormat mediaFormatResult = MediaFormatUtil.createMediaFormatFromFormat(format);
|
||||
mediaFormatResult.setInteger(MediaFormat.KEY_TRACK_ID, getIdOfBackingTrack());
|
||||
if (compatibilityTrackMimeType != null) {
|
||||
if (Util.SDK_INT >= 29) {
|
||||
mediaFormatResult.removeKey(MediaFormat.KEY_CODECS_STRING);
|
||||
|
@ -591,6 +591,15 @@ public interface AudioSink {
|
||||
*/
|
||||
default void setOutputStreamOffsetUs(long outputStreamOffsetUs) {}
|
||||
|
||||
/**
|
||||
* Returns the size of the underlying {@link AudioTrack} buffer in microseconds. If unsupported or
|
||||
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET}.
|
||||
*
|
||||
* <p>If the {@link AudioTrack} is configured with a compressed encoding, then the returned
|
||||
* duration is an estimated minimum based on the encoding's maximum encoded byte rate.
|
||||
*/
|
||||
long getAudioTrackBufferSizeUs();
|
||||
|
||||
/**
|
||||
* Enables tunneling, if possible. The sink is reset if tunneling was previously disabled.
|
||||
* Enabling tunneling is only possible if the sink is based on a platform {@link AudioTrack}, and
|
||||
|
@ -22,6 +22,7 @@ import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
|
||||
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||
import static com.google.common.base.MoreObjects.firstNonNull;
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.media.AudioDeviceInfo;
|
||||
@ -169,6 +170,7 @@ public abstract class DecoderAudioRenderer<
|
||||
private long largestQueuedPresentationTimeUs;
|
||||
private long lastBufferInStreamPresentationTimeUs;
|
||||
private long nextBufferToWritePresentationTimeUs;
|
||||
private boolean isRendereringToEndOfStream;
|
||||
|
||||
public DecoderAudioRenderer() {
|
||||
this(/* eventHandler= */ null, /* eventListener= */ null);
|
||||
@ -246,16 +248,28 @@ public abstract class DecoderAudioRenderer<
|
||||
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
|
||||
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||
}
|
||||
long durationUs =
|
||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
||||
// to end of stream.
|
||||
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||
}
|
||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||
long bufferedDurationUs =
|
||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
||||
: writtenDurationUs;
|
||||
bufferedDurationUs =
|
||||
(long)
|
||||
((nextBufferToWritePresentationTimeUs - positionUs)
|
||||
(bufferedDurationUs
|
||||
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
|
||||
/ 2);
|
||||
if (isStarted) {
|
||||
// Account for the elapsed time since the start of this iteration of the rendering loop.
|
||||
durationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
|
||||
bufferedDurationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
|
||||
}
|
||||
return max(DEFAULT_DURATION_TO_PROGRESS_US, durationUs);
|
||||
return max(DEFAULT_DURATION_TO_PROGRESS_US, bufferedDurationUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -304,6 +318,7 @@ public abstract class DecoderAudioRenderer<
|
||||
try {
|
||||
audioSink.playToEndOfStream();
|
||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||
isRendereringToEndOfStream = true;
|
||||
} catch (AudioSink.WriteException e) {
|
||||
throw createRendererException(
|
||||
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
|
||||
@ -585,6 +600,7 @@ public abstract class DecoderAudioRenderer<
|
||||
outputStreamEnded = true;
|
||||
audioSink.playToEndOfStream();
|
||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||
isRendereringToEndOfStream = true;
|
||||
}
|
||||
|
||||
private void flushDecoder() throws ExoPlaybackException {
|
||||
@ -660,6 +676,7 @@ public abstract class DecoderAudioRenderer<
|
||||
|
||||
currentPositionUs = positionUs;
|
||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||
isRendereringToEndOfStream = false;
|
||||
hasPendingReportedSkippedSilence = false;
|
||||
allowPositionDiscontinuity = true;
|
||||
inputStreamEnded = false;
|
||||
@ -689,6 +706,7 @@ public abstract class DecoderAudioRenderer<
|
||||
setOutputStreamOffsetUs(C.TIME_UNSET);
|
||||
hasPendingReportedSkippedSilence = false;
|
||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||
isRendereringToEndOfStream = false;
|
||||
try {
|
||||
setSourceDrmSession(null);
|
||||
releaseDecoder();
|
||||
|
@ -71,6 +71,7 @@ import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.math.RoundingMode;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayDeque;
|
||||
@ -1454,6 +1455,23 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getAudioTrackBufferSizeUs() {
|
||||
if (!isAudioTrackInitialized()) {
|
||||
return C.TIME_UNSET;
|
||||
}
|
||||
if (Util.SDK_INT >= 23) {
|
||||
return Api23.getAudioTrackBufferSizeUs(audioTrack, configuration);
|
||||
}
|
||||
long byteRate =
|
||||
configuration.outputMode == OUTPUT_MODE_PCM
|
||||
? (long) configuration.outputSampleRate * configuration.outputPcmFrameSize
|
||||
: DefaultAudioTrackBufferSizeProvider.getMaximumEncodedRateBytesPerSecond(
|
||||
configuration.outputEncoding);
|
||||
return Util.scaleLargeValue(
|
||||
configuration.bufferSize, C.MICROS_PER_SECOND, byteRate, RoundingMode.DOWN);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void enableTunnelingV21() {
|
||||
Assertions.checkState(externalAudioSessionIdProvided);
|
||||
@ -2365,6 +2383,18 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
audioTrack.setPreferredDevice(
|
||||
audioDeviceInfo == null ? null : audioDeviceInfo.audioDeviceInfo);
|
||||
}
|
||||
|
||||
public static long getAudioTrackBufferSizeUs(
|
||||
AudioTrack audioTrack, Configuration configuration) {
|
||||
return configuration.outputMode == OUTPUT_MODE_PCM
|
||||
? configuration.framesToDurationUs(audioTrack.getBufferSizeInFrames())
|
||||
: Util.scaleLargeValue(
|
||||
audioTrack.getBufferSizeInFrames(),
|
||||
C.MICROS_PER_SECOND,
|
||||
DefaultAudioTrackBufferSizeProvider.getMaximumEncodedRateBytesPerSecond(
|
||||
configuration.outputEncoding),
|
||||
RoundingMode.DOWN);
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(31)
|
||||
|
@ -162,6 +162,11 @@ public class ForwardingAudioSink implements AudioSink {
|
||||
sink.setOutputStreamOffsetUs(outputStreamOffsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getAudioTrackBufferSizeUs() {
|
||||
return sink.getAudioTrackBufferSizeUs();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void enableTunnelingV21() {
|
||||
sink.enableTunnelingV21();
|
||||
|
@ -20,6 +20,7 @@ import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_MA
|
||||
import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
|
||||
import static com.google.common.base.MoreObjects.firstNonNull;
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Context;
|
||||
@ -125,6 +126,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
private int rendererPriority;
|
||||
private boolean isStarted;
|
||||
private long nextBufferToWritePresentationTimeUs;
|
||||
private boolean isRendereringToEndOfStream;
|
||||
|
||||
/**
|
||||
* @param context A context.
|
||||
@ -518,20 +520,33 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
@Override
|
||||
protected long getDurationToProgressUs(
|
||||
long positionUs, long elapsedRealtimeUs, boolean isOnBufferAvailableListenerRegistered) {
|
||||
if (nextBufferToWritePresentationTimeUs != C.TIME_UNSET) {
|
||||
long durationUs =
|
||||
(long)
|
||||
((nextBufferToWritePresentationTimeUs - positionUs)
|
||||
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
|
||||
/ 2);
|
||||
if (isStarted) {
|
||||
// Account for the elapsed time since the start of this iteration of the rendering loop.
|
||||
durationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
|
||||
}
|
||||
return max(DEFAULT_DURATION_TO_PROGRESS_US, durationUs);
|
||||
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
|
||||
return super.getDurationToProgressUs(
|
||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||
}
|
||||
return super.getDurationToProgressUs(
|
||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
||||
// to end of stream.
|
||||
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||
return super.getDurationToProgressUs(
|
||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||
}
|
||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||
long bufferedDurationUs =
|
||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
||||
: writtenDurationUs;
|
||||
bufferedDurationUs =
|
||||
(long)
|
||||
(bufferedDurationUs
|
||||
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
|
||||
/ 2);
|
||||
if (isStarted) {
|
||||
// Account for the elapsed time since the start of this iteration of the rendering loop.
|
||||
bufferedDurationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
|
||||
}
|
||||
return max(DEFAULT_DURATION_TO_PROGRESS_US, bufferedDurationUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -679,6 +694,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
|
||||
currentPositionUs = positionUs;
|
||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||
isRendereringToEndOfStream = false;
|
||||
hasPendingReportedSkippedSilence = false;
|
||||
allowPositionDiscontinuity = true;
|
||||
}
|
||||
@ -703,6 +719,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
audioSinkNeedsReset = true;
|
||||
inputFormat = null;
|
||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||
isRendereringToEndOfStream = false;
|
||||
try {
|
||||
audioSink.flush();
|
||||
} finally {
|
||||
@ -718,6 +735,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
protected void onReset() {
|
||||
hasPendingReportedSkippedSilence = false;
|
||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||
isRendereringToEndOfStream = false;
|
||||
try {
|
||||
super.onReset();
|
||||
} finally {
|
||||
@ -857,6 +875,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
|
||||
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
|
||||
}
|
||||
isRendereringToEndOfStream = true;
|
||||
} catch (AudioSink.WriteException e) {
|
||||
throw createRendererException(
|
||||
e,
|
||||
|
@ -41,6 +41,7 @@ import java.util.List;
|
||||
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
|
||||
|
||||
private final MediaPeriod[] periods;
|
||||
private final boolean[] periodsWithTimeOffsets;
|
||||
private final IdentityHashMap<SampleStream, Integer> streamPeriodIndices;
|
||||
private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory;
|
||||
private final ArrayList<MediaPeriod> childrenPendingPreparation;
|
||||
@ -62,8 +63,10 @@ import java.util.List;
|
||||
compositeSequenceableLoader = compositeSequenceableLoaderFactory.empty();
|
||||
streamPeriodIndices = new IdentityHashMap<>();
|
||||
enabledPeriods = new MediaPeriod[0];
|
||||
periodsWithTimeOffsets = new boolean[periods.length];
|
||||
for (int i = 0; i < periods.length; i++) {
|
||||
if (periodTimeOffsetsUs[i] != 0) {
|
||||
periodsWithTimeOffsets[i] = true;
|
||||
this.periods[i] = new TimeOffsetMediaPeriod(periods[i], periodTimeOffsetsUs[i]);
|
||||
}
|
||||
}
|
||||
@ -75,7 +78,7 @@ import java.util.List;
|
||||
* specified index.
|
||||
*/
|
||||
public MediaPeriod getChildPeriod(int index) {
|
||||
return periods[index] instanceof TimeOffsetMediaPeriod
|
||||
return periodsWithTimeOffsets[index]
|
||||
? ((TimeOffsetMediaPeriod) periods[index]).getWrappedMediaPeriod()
|
||||
: periods[index];
|
||||
}
|
||||
|
@ -505,17 +505,18 @@ public final class PreloadMediaSource extends WrappingMediaSource {
|
||||
return;
|
||||
}
|
||||
PreloadMediaPeriod preloadMediaPeriod = (PreloadMediaPeriod) mediaPeriod;
|
||||
long bufferedPositionUs = mediaPeriod.getBufferedPositionUs();
|
||||
if (prepared && bufferedPositionUs == C.TIME_END_OF_SOURCE) {
|
||||
preloadControl.onLoadedToTheEndOfSource(PreloadMediaSource.this);
|
||||
stopPreloading();
|
||||
return;
|
||||
}
|
||||
if (prepared
|
||||
&& !preloadControl.onContinueLoadingRequested(
|
||||
PreloadMediaSource.this, bufferedPositionUs - periodStartPositionUs)) {
|
||||
stopPreloading();
|
||||
return;
|
||||
if (prepared) {
|
||||
long bufferedPositionUs = mediaPeriod.getBufferedPositionUs();
|
||||
if (bufferedPositionUs == C.TIME_END_OF_SOURCE) {
|
||||
preloadControl.onLoadedToTheEndOfSource(PreloadMediaSource.this);
|
||||
stopPreloading();
|
||||
return;
|
||||
}
|
||||
if (!preloadControl.onContinueLoadingRequested(
|
||||
PreloadMediaSource.this, bufferedPositionUs - periodStartPositionUs)) {
|
||||
stopPreloading();
|
||||
return;
|
||||
}
|
||||
}
|
||||
preloadMediaPeriod.continueLoading(
|
||||
new LoadingInfo.Builder().setPlaybackPositionUs(periodStartPositionUs).build());
|
||||
|
@ -739,6 +739,8 @@ public class EventLogger implements AnalyticsListener {
|
||||
return "NONE";
|
||||
case Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS:
|
||||
return "TRANSIENT_AUDIO_FOCUS_LOSS";
|
||||
case Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT:
|
||||
return "UNSUITABLE_AUDIO_OUTPUT";
|
||||
default:
|
||||
return "?";
|
||||
}
|
||||
|
@ -149,6 +149,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
||||
*/
|
||||
private static final long OFFSET_FROM_PERIOD_END_TO_TREAT_AS_LAST_US = 100_000L;
|
||||
|
||||
/**
|
||||
* The offset from {@link #getLastResetPositionUs()} in microseconds, before which input buffers
|
||||
* are not allowed to be dropped.
|
||||
*
|
||||
* <p>This value must be greater than the pre-roll distance used by common audio codecs, such as
|
||||
* 80ms used by Opus <a
|
||||
* href="https://opus-codec.org/docs/opus_in_isobmff.html#4.3.6.2">Encapsulation of Opus in ISO
|
||||
* Base Media File Format</a>
|
||||
*/
|
||||
private static final long OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US = 200_000L;
|
||||
|
||||
/**
|
||||
* The maximum number of consecutive dropped input buffers that allow discarding frame headers.
|
||||
*
|
||||
@ -616,7 +627,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
||||
boolean treatDroppedBuffersAsSkipped)
|
||||
throws ExoPlaybackException {
|
||||
if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) {
|
||||
shouldDropDecoderInputBuffers = earlyUs < minEarlyUsToDropDecoderInput;
|
||||
// TODO: b/161996553 - Remove the isAwayFromLastResetPosition check when audio pre-rolling
|
||||
// is implemented correctly. Audio codecs such as Opus require pre-roll samples to be decoded
|
||||
// and discarded on a seek. Depending on the audio decoder, the positionUs may jump forward
|
||||
// by the pre-roll duration. Do not drop more frames than necessary when this happens.
|
||||
boolean isAwayFromLastResetPosition =
|
||||
positionUs
|
||||
> getLastResetPositionUs()
|
||||
+ OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US;
|
||||
shouldDropDecoderInputBuffers =
|
||||
isAwayFromLastResetPosition && earlyUs < minEarlyUsToDropDecoderInput;
|
||||
}
|
||||
return shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastFrame)
|
||||
&& maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped);
|
||||
|
@ -147,6 +147,7 @@ import androidx.media3.exoplayer.analytics.PlayerId;
|
||||
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
|
||||
import androidx.media3.exoplayer.drm.DrmSessionEventListener;
|
||||
import androidx.media3.exoplayer.drm.DrmSessionManager;
|
||||
import androidx.media3.exoplayer.mediacodec.MediaCodecRenderer;
|
||||
import androidx.media3.exoplayer.metadata.MetadataOutput;
|
||||
import androidx.media3.exoplayer.source.ClippingMediaSource;
|
||||
import androidx.media3.exoplayer.source.ConcatenatingMediaSource;
|
||||
@ -11786,6 +11787,54 @@ public class ExoPlayerTest {
|
||||
player.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void enablingOffload_withFastReadingPeriodAdvancement_playerDoesNotSleep()
|
||||
throws Exception {
|
||||
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO);
|
||||
AtomicInteger sleepingForOffloadCounter = new AtomicInteger();
|
||||
ExoPlayer player =
|
||||
parameterizeTestExoPlayerBuilder(
|
||||
new TestExoPlayerBuilder(context).setRenderers(sleepRenderer))
|
||||
.build();
|
||||
ExoPlayer.AudioOffloadListener listener =
|
||||
new ExoPlayer.AudioOffloadListener() {
|
||||
@Override
|
||||
public void onSleepingForOffloadChanged(boolean sleepingForOffload) {
|
||||
if (sleepingForOffload) {
|
||||
sleepingForOffloadCounter.getAndIncrement();
|
||||
}
|
||||
}
|
||||
};
|
||||
player.addAudioOffloadListener(listener);
|
||||
// Set a playlist of multiple, short audio-only items such that the reading period quickly
|
||||
// advances past the playing period.
|
||||
Timeline timeline = new FakeTimeline();
|
||||
player.setMediaSources(
|
||||
ImmutableList.of(
|
||||
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
|
||||
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
|
||||
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT)));
|
||||
player.setTrackSelectionParameters(
|
||||
player
|
||||
.getTrackSelectionParameters()
|
||||
.buildUpon()
|
||||
.setAudioOffloadPreferences(
|
||||
new AudioOffloadPreferences.Builder()
|
||||
.setAudioOffloadMode(AudioOffloadPreferences.AUDIO_OFFLOAD_MODE_REQUIRED)
|
||||
.build())
|
||||
.build());
|
||||
player.prepare();
|
||||
player.play();
|
||||
advance(player).untilStartOfMediaItem(/* mediaItemIndex= */ 1);
|
||||
|
||||
sleepRenderer.sleepOnNextRender();
|
||||
runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||
|
||||
assertThat(sleepingForOffloadCounter.get()).isEqualTo(0);
|
||||
|
||||
player.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void wakeupListenerWhileSleepingForOffload_isWokenUp_renderingResumes() throws Exception {
|
||||
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO).sleepOnNextRender();
|
||||
@ -16700,6 +16749,84 @@ public class ExoPlayerTest {
|
||||
assertThat(videoScalingSetOnSecondaryVideoRenderer.get()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
play_withRecoverableErrorAfterAdvancingReadingPeriod_advancesPlayingPeriodWhileErrorHandling()
|
||||
throws Exception {
|
||||
Clock fakeClock = new FakeClock(/* isAutoAdvancing= */ true);
|
||||
AtomicBoolean shouldRendererThrowRecoverableError = new AtomicBoolean(false);
|
||||
AtomicInteger onStreamChangedCount = new AtomicInteger(0);
|
||||
ExoPlayer player =
|
||||
new TestExoPlayerBuilder(context)
|
||||
.setClock(fakeClock)
|
||||
.setRenderersFactory(
|
||||
new RenderersFactory() {
|
||||
@Override
|
||||
public Renderer[] createRenderers(
|
||||
Handler eventHandler,
|
||||
VideoRendererEventListener videoRendererEventListener,
|
||||
AudioRendererEventListener audioRendererEventListener,
|
||||
TextOutput textRendererOutput,
|
||||
MetadataOutput metadataRendererOutput) {
|
||||
return new Renderer[] {
|
||||
new FakeVideoRenderer(
|
||||
SystemClock.DEFAULT.createHandler(
|
||||
eventHandler.getLooper(), /* callback= */ null),
|
||||
videoRendererEventListener) {
|
||||
@Override
|
||||
protected void onStreamChanged(
|
||||
Format[] formats,
|
||||
long startPositionUs,
|
||||
long offsetUs,
|
||||
MediaSource.MediaPeriodId mediaPeriodId)
|
||||
throws ExoPlaybackException {
|
||||
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
|
||||
onStreamChangedCount.getAndIncrement();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void render(long positionUs, long elapsedRealtimeUs)
|
||||
throws ExoPlaybackException {
|
||||
if (!shouldRendererThrowRecoverableError.get()) {
|
||||
super.render(positionUs, elapsedRealtimeUs);
|
||||
} else {
|
||||
shouldRendererThrowRecoverableError.set(false);
|
||||
throw createRendererException(
|
||||
new MediaCodecRenderer.DecoderInitializationException(
|
||||
new Format.Builder().build(),
|
||||
new IllegalArgumentException(),
|
||||
false,
|
||||
0),
|
||||
this.getFormatHolder().format,
|
||||
true,
|
||||
PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
})
|
||||
.build();
|
||||
player.setMediaSources(
|
||||
ImmutableList.of(
|
||||
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
|
||||
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT)));
|
||||
player.prepare();
|
||||
|
||||
// Play a bit until the reading period has advanced.
|
||||
player.play();
|
||||
advance(player).untilBackgroundThreadCondition(() -> onStreamChangedCount.get() == 2);
|
||||
shouldRendererThrowRecoverableError.set(true);
|
||||
runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||
|
||||
player.release();
|
||||
|
||||
// onStreamChanged should occur thrice;
|
||||
// 1 during first enable, 2 during replace stream, 3 during error recovery
|
||||
assertThat(onStreamChangedCount.get()).isEqualTo(3);
|
||||
assertThat(shouldRendererThrowRecoverableError.get()).isFalse();
|
||||
}
|
||||
|
||||
// Internal methods.
|
||||
|
||||
private void addWatchAsSystemFeature() {
|
||||
|
@ -1524,6 +1524,91 @@ public class ExoPlayerWithPrewarmingRenderersTest {
|
||||
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_ENABLED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
play_recoverableErrorWithPrimaryRendererDuringPrewarming_doesNotResetSecondaryRenderer()
|
||||
throws Exception {
|
||||
Clock fakeClock = new FakeClock(/* isAutoAdvancing= */ true);
|
||||
Player.Listener listener = mock(Player.Listener.class);
|
||||
AtomicBoolean shouldPrimaryRendererThrowRecoverable = new AtomicBoolean(false);
|
||||
ExoPlayer player =
|
||||
new TestExoPlayerBuilder(context)
|
||||
.setClock(fakeClock)
|
||||
.setRenderersFactory(
|
||||
new FakeRenderersFactorySupportingSecondaryVideoRenderer(fakeClock) {
|
||||
@Override
|
||||
public Renderer[] createRenderers(
|
||||
Handler eventHandler,
|
||||
VideoRendererEventListener videoRendererEventListener,
|
||||
AudioRendererEventListener audioRendererEventListener,
|
||||
TextOutput textRendererOutput,
|
||||
MetadataOutput metadataRendererOutput) {
|
||||
HandlerWrapper clockAwareHandler =
|
||||
clock.createHandler(eventHandler.getLooper(), /* callback= */ null);
|
||||
return new Renderer[] {
|
||||
new FakeVideoRenderer(clockAwareHandler, videoRendererEventListener) {
|
||||
@Override
|
||||
public void render(long positionUs, long elapsedRealtimeUs)
|
||||
throws ExoPlaybackException {
|
||||
if (!shouldPrimaryRendererThrowRecoverable.get()) {
|
||||
super.render(positionUs, elapsedRealtimeUs);
|
||||
} else {
|
||||
shouldPrimaryRendererThrowRecoverable.set(false);
|
||||
throw createRendererException(
|
||||
new MediaCodecRenderer.DecoderInitializationException(
|
||||
new Format.Builder().build(),
|
||||
new IllegalArgumentException(),
|
||||
false,
|
||||
0),
|
||||
this.getFormatHolder().format,
|
||||
true,
|
||||
PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
|
||||
}
|
||||
}
|
||||
},
|
||||
new FakeAudioRenderer(clockAwareHandler, audioRendererEventListener)
|
||||
};
|
||||
}
|
||||
})
|
||||
.build();
|
||||
player.addListener(listener);
|
||||
Renderer videoRenderer = player.getRenderer(/* index= */ 0);
|
||||
Renderer secondaryVideoRenderer = player.getSecondaryRenderer(/* index= */ 0);
|
||||
// Set a playlist that allows a new renderer to be enabled early.
|
||||
player.setMediaSources(
|
||||
ImmutableList.of(
|
||||
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
|
||||
new FakeBlockingMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
|
||||
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT)));
|
||||
player.prepare();
|
||||
|
||||
// Play a bit until the second renderer is pre-warming.
|
||||
player.play();
|
||||
advance(player)
|
||||
.untilBackgroundThreadCondition(
|
||||
() -> secondaryVideoRenderer.getState() == Renderer.STATE_ENABLED);
|
||||
@Renderer.State int videoState1 = videoRenderer.getState();
|
||||
@Renderer.State int secondaryVideoState1 = secondaryVideoRenderer.getState();
|
||||
advance(player)
|
||||
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_ENABLED);
|
||||
@Renderer.State int videoState2 = videoRenderer.getState();
|
||||
@Renderer.State int secondaryVideoState2 = secondaryVideoRenderer.getState();
|
||||
shouldPrimaryRendererThrowRecoverable.set(true);
|
||||
advance(player)
|
||||
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_DISABLED);
|
||||
@Renderer.State int videoState3 = videoRenderer.getState();
|
||||
@Renderer.State int secondaryVideoState3 = secondaryVideoRenderer.getState();
|
||||
player.release();
|
||||
|
||||
verify(listener).onPositionDiscontinuity(any(), any(), anyInt());
|
||||
assertThat(videoState1).isEqualTo(Renderer.STATE_STARTED);
|
||||
assertThat(secondaryVideoState1).isEqualTo(Renderer.STATE_ENABLED);
|
||||
assertThat(videoState2).isEqualTo(Renderer.STATE_ENABLED);
|
||||
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_STARTED);
|
||||
assertThat(videoState3).isEqualTo(Renderer.STATE_DISABLED);
|
||||
assertThat(secondaryVideoState3).isEqualTo(Renderer.STATE_STARTED);
|
||||
}
|
||||
|
||||
/** {@link FakeMediaSource} that prevents any reading of samples off the sample queue. */
|
||||
private static final class FakeBlockingMediaSource extends FakeMediaSource {
|
||||
|
||||
|
@ -55,6 +55,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@ -226,10 +227,11 @@ public class DecoderAudioRendererTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getDurationToProgressUs_withAudioSinkBuffersFull_returnsCalculatedDuration()
|
||||
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
||||
@ -244,11 +246,11 @@ public class DecoderAudioRendererTest {
|
||||
/* initialFormat= */ FORMAT,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
audioRenderer.enable(
|
||||
@ -275,15 +277,16 @@ public class DecoderAudioRendererTest {
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(75_000L);
|
||||
assertThat(durationToProgressUs).isEqualTo(50_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioSinkBuffersFullAndDoublePlaybackSpeed_returnsCalculatedDuration()
|
||||
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndDoublePlaybackSpeed_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
PlaybackParameters playbackParametersWithDoubleSpeed =
|
||||
new PlaybackParameters(/* speed= */ 2.0f);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
|
||||
@ -332,16 +335,17 @@ public class DecoderAudioRendererTest {
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(37_500L);
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioSinkBuffersFullAndPlaybackAdvancement_returnsCalculatedDuration()
|
||||
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndPlaybackAdvancement_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
||||
@ -391,20 +395,82 @@ public class DecoderAudioRendererTest {
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(65_000L);
|
||||
assertThat(durationToProgressUs).isEqualTo(40_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_afterReadToEndOfStreamWithAudioSinkBuffersFull_returnsCalculatedDuration()
|
||||
getDurationToProgressUs_usingAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||
CountDownLatch latchDecode = new CountDownLatch(4);
|
||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
||||
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ FORMAT,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
audioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {FORMAT},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
||||
when(mockAudioSink.handleBuffer(
|
||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
|
||||
.thenReturn(false);
|
||||
audioRenderer.start();
|
||||
while (latchDecode.getCount() != 0) {
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
}
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
long durationToProgressUs =
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(10_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterProcessEndOfStream_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
CountDownLatch latchDecode = new CountDownLatch(6);
|
||||
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
|
||||
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
|
||||
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
|
||||
ForwardingAudioSink forwardingAudioSink =
|
||||
new ForwardingAudioSink(mockAudioSink) {
|
||||
@Override
|
||||
public void playToEndOfStream() throws WriteException {
|
||||
super.playToEndOfStream();
|
||||
hasCalledPlayToEndOfStream.set(true);
|
||||
}
|
||||
};
|
||||
audioRenderer = createAudioRenderer(forwardingAudioSink);
|
||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
@ -421,10 +487,6 @@ public class DecoderAudioRendererTest {
|
||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
// Mock that audio sink is full when trying to write final sample.
|
||||
when(mockAudioSink.handleBuffer(
|
||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 250000), anyInt()))
|
||||
.thenReturn(false);
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
audioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
@ -436,17 +498,77 @@ public class DecoderAudioRendererTest {
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
// Represents audio sink buffers being full when trying to write 150000 us sample.
|
||||
audioRenderer.start();
|
||||
while (latchDecode.getCount() != 0) {
|
||||
audioRenderer.setCurrentStreamFinal();
|
||||
while (!hasCalledPlayToEndOfStream.get()) {
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
}
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
long durationToProgressUs =
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(125_000L);
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
|
||||
ForwardingAudioSink forwardingAudioSink =
|
||||
new ForwardingAudioSink(mockAudioSink) {
|
||||
@Override
|
||||
public void playToEndOfStream() throws WriteException {
|
||||
super.playToEndOfStream();
|
||||
hasCalledPlayToEndOfStream.set(true);
|
||||
}
|
||||
};
|
||||
audioRenderer = createAudioRenderer(forwardingAudioSink);
|
||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ FORMAT,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
audioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {FORMAT},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
audioRenderer.start();
|
||||
audioRenderer.setCurrentStreamFinal();
|
||||
while (!hasCalledPlayToEndOfStream.get()) {
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
}
|
||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
long durationToProgressUs =
|
||||
audioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -62,6 +62,7 @@ import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@ -733,8 +734,9 @@ public class MediaCodecAudioRendererTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getDurationToProgressUs_withAudioSinkBuffersFull_returnsCalculatedDuration()
|
||||
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
@ -776,13 +778,14 @@ public class MediaCodecAudioRendererTest {
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(75_000L);
|
||||
assertThat(durationToProgressUs).isEqualTo(50_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioSinkBuffersFullAndDoublePlaybackSpeed_returnsCalculatedDuration()
|
||||
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndDoublePlaybackSpeed_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
@ -826,14 +829,15 @@ public class MediaCodecAudioRendererTest {
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(37_500L);
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioSinkBuffersFullAndPlaybackAdvancement_returnsCalculatedDuration()
|
||||
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndPlaybackAdvancement_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
mediaCodecAudioRenderer =
|
||||
new MediaCodecAudioRenderer(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
@ -897,7 +901,200 @@ public class MediaCodecAudioRendererTest {
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(65_000L);
|
||||
assertThat(durationToProgressUs).isEqualTo(40_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
|
||||
throws Exception {
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ AUDIO_AAC,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
mediaCodecAudioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {AUDIO_AAC},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ false,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
// Represents audio sink buffers being full when trying to write 150_000 us sample.
|
||||
when(audioSink.handleBuffer(
|
||||
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
|
||||
.thenReturn(false);
|
||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
mediaCodecAudioRenderer.start();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
||||
}
|
||||
|
||||
long durationToProgressUs =
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(10_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterRenderToEndOfStream_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
||||
mediaCodecAudioRenderer =
|
||||
new MediaCodecAudioRenderer(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
new DefaultMediaCodecAdapterFactory(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
() -> {
|
||||
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
|
||||
return callbackThread;
|
||||
},
|
||||
() -> {
|
||||
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
|
||||
return queueingThread;
|
||||
}),
|
||||
mediaCodecSelector,
|
||||
/* enableDecoderFallback= */ false,
|
||||
new Handler(Looper.getMainLooper()),
|
||||
audioRendererEventListener,
|
||||
audioSink) {
|
||||
@Override
|
||||
protected void renderToEndOfStream() throws ExoPlaybackException {
|
||||
super.renderToEndOfStream();
|
||||
hasCalledRenderToEndOfStream.set(true);
|
||||
}
|
||||
};
|
||||
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
|
||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ AUDIO_AAC,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
mediaCodecAudioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {AUDIO_AAC},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ false,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
mediaCodecAudioRenderer.start();
|
||||
mediaCodecAudioRenderer.setCurrentStreamFinal();
|
||||
while (!hasCalledRenderToEndOfStream.get()) {
|
||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
||||
}
|
||||
|
||||
long durationToProgressUs =
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
|
||||
throws Exception {
|
||||
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
||||
mediaCodecAudioRenderer =
|
||||
new MediaCodecAudioRenderer(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
new DefaultMediaCodecAdapterFactory(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
() -> {
|
||||
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
|
||||
return callbackThread;
|
||||
},
|
||||
() -> {
|
||||
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
|
||||
return queueingThread;
|
||||
}),
|
||||
mediaCodecSelector,
|
||||
/* enableDecoderFallback= */ false,
|
||||
new Handler(Looper.getMainLooper()),
|
||||
audioRendererEventListener,
|
||||
audioSink) {
|
||||
@Override
|
||||
protected void renderToEndOfStream() throws ExoPlaybackException {
|
||||
super.renderToEndOfStream();
|
||||
hasCalledRenderToEndOfStream.set(true);
|
||||
}
|
||||
};
|
||||
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DRM_UNSUPPORTED,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ AUDIO_AAC,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
||||
END_OF_STREAM_ITEM));
|
||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
||||
mediaCodecAudioRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {AUDIO_AAC},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ false,
|
||||
/* startPositionUs= */ 0,
|
||||
/* offsetUs= */ 0,
|
||||
new MediaSource.MediaPeriodId(new Object()));
|
||||
mediaCodecAudioRenderer.start();
|
||||
mediaCodecAudioRenderer.setCurrentStreamFinal();
|
||||
while (!hasCalledRenderToEndOfStream.get()) {
|
||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
||||
}
|
||||
|
||||
long durationToProgressUs =
|
||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -29,6 +29,7 @@ import androidx.media3.exoplayer.DecoderCounters;
|
||||
import androidx.media3.exoplayer.DefaultRenderersFactory;
|
||||
import androidx.media3.exoplayer.ExoPlayer;
|
||||
import androidx.media3.exoplayer.Renderer;
|
||||
import androidx.media3.exoplayer.analytics.AnalyticsListener;
|
||||
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
|
||||
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
|
||||
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
|
||||
@ -103,6 +104,14 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
||||
new ExoPlayer.Builder(applicationContext, renderersFactory)
|
||||
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
|
||||
.build();
|
||||
player.addAnalyticsListener(
|
||||
new AnalyticsListener() {
|
||||
@Override
|
||||
public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {
|
||||
// Input buffers near the reset position should not be dropped.
|
||||
assertThat(eventTime.currentPlaybackPositionMs).isAtLeast(200);
|
||||
}
|
||||
});
|
||||
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
|
||||
player.setVideoSurface(surface);
|
||||
player.setMediaItem(MediaItem.fromUri(TEST_MP4_URI));
|
||||
@ -121,7 +130,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
||||
// Which input buffer is dropped first depends on the number of MediaCodec buffer slots.
|
||||
// This means the asserts cannot be isEqualTo.
|
||||
assertThat(decoderCounters.maxConsecutiveDroppedBufferCount).isAtMost(2);
|
||||
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(8);
|
||||
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(4);
|
||||
}
|
||||
|
||||
private static final class CapturingRenderersFactoryWithLateThresholdToDropDecoderInputUs
|
||||
@ -155,7 +164,6 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
||||
/* enableDecoderFallback= */ false,
|
||||
eventHandler,
|
||||
videoRendererEventListener,
|
||||
DefaultRenderersFactory.MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY,
|
||||
/* parseAv1SampleDependencies= */ true,
|
||||
/* lateThresholdToDropDecoderInputUs= */ -100_000_000L)
|
||||
};
|
||||
@ -173,7 +181,6 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
||||
boolean enableDecoderFallback,
|
||||
@Nullable Handler eventHandler,
|
||||
@Nullable VideoRendererEventListener eventListener,
|
||||
int maxDroppedFramesToNotify,
|
||||
boolean parseAv1SampleDependencies,
|
||||
long lateThresholdToDropDecoderInputUs) {
|
||||
super(
|
||||
@ -184,7 +191,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
||||
.setEnableDecoderFallback(enableDecoderFallback)
|
||||
.setEventHandler(eventHandler)
|
||||
.setEventListener(eventListener)
|
||||
.setMaxDroppedFramesToNotify(maxDroppedFramesToNotify)
|
||||
.setMaxDroppedFramesToNotify(1)
|
||||
.experimentalSetParseAv1SampleDependencies(parseAv1SampleDependencies)
|
||||
.experimentalSetLateThresholdToDropDecoderInputUs(
|
||||
lateThresholdToDropDecoderInputUs));
|
||||
|
@ -22,6 +22,7 @@ import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.TrackGroup;
|
||||
import androidx.media3.common.util.NullableType;
|
||||
import androidx.media3.decoder.DecoderInputBuffer;
|
||||
@ -33,6 +34,7 @@ import androidx.media3.exoplayer.source.MediaSource.MediaPeriodId;
|
||||
import androidx.media3.exoplayer.source.MediaSourceEventListener.EventDispatcher;
|
||||
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
||||
import androidx.media3.exoplayer.trackselection.FixedTrackSelection;
|
||||
import androidx.media3.exoplayer.upstream.Allocator;
|
||||
import androidx.media3.exoplayer.upstream.DefaultAllocator;
|
||||
import androidx.media3.test.utils.FakeMediaPeriod;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
@ -270,6 +272,39 @@ public final class MergingMediaPeriodTest {
|
||||
assertThat(inputBuffer.timeUs).isEqualTo(456_000 - 3000);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getChildPeriod_withTimeOffsetsAndTimeOffsetPeriodChildren_returnsCorrectChildPeriod() {
|
||||
TrackGroupArray trackGroupArray =
|
||||
new TrackGroupArray(
|
||||
new TrackGroup(new Format.Builder().setSampleMimeType(MimeTypes.VIDEO_AV1).build()));
|
||||
Allocator allocator =
|
||||
new DefaultAllocator(/* trimOnReset= */ false, /* individualAllocationSize= */ 1024);
|
||||
MediaPeriod childPeriod0 =
|
||||
new FakeMediaPeriod(
|
||||
trackGroupArray, allocator, /* singleSampleTimeUs= */ 0, new EventDispatcher());
|
||||
MediaPeriod childPeriod1 =
|
||||
new TimeOffsetMediaPeriod(
|
||||
new FakeMediaPeriod(
|
||||
trackGroupArray, allocator, /* singleSampleTimeUs= */ 300, new EventDispatcher()),
|
||||
/* timeOffsetUs= */ -300);
|
||||
MediaPeriod childPeriod2 =
|
||||
new FakeMediaPeriod(
|
||||
trackGroupArray, allocator, /* singleSampleTimeUs= */ -500, new EventDispatcher());
|
||||
|
||||
MergingMediaPeriod mergingMediaPeriod =
|
||||
new MergingMediaPeriod(
|
||||
new DefaultCompositeSequenceableLoaderFactory(),
|
||||
/* periodTimeOffsetsUs= */ new long[] {0, 0, 500},
|
||||
childPeriod0,
|
||||
childPeriod1,
|
||||
childPeriod2);
|
||||
|
||||
assertThat(mergingMediaPeriod.getChildPeriod(0)).isEqualTo(childPeriod0);
|
||||
assertThat(mergingMediaPeriod.getChildPeriod(1)).isEqualTo(childPeriod1);
|
||||
assertThat(mergingMediaPeriod.getChildPeriod(2)).isEqualTo(childPeriod2);
|
||||
}
|
||||
|
||||
private MergingMediaPeriod prepareMergingPeriod(MergingPeriodDefinition... definitions)
|
||||
throws Exception {
|
||||
return prepareMergingPeriod(/* singleTrackGroup= */ false, definitions);
|
||||
|
@ -90,12 +90,13 @@ import org.junit.runner.RunWith;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class PreloadMediaSourceTest {
|
||||
|
||||
private static final int LOADING_CHECK_INTERVAL_BYTES = 10 * 1024;
|
||||
private static final int LOADING_CHECK_INTERVAL_BYTES = 32;
|
||||
private static final int TARGET_PRELOAD_DURATION_US = 10000;
|
||||
|
||||
private Allocator allocator;
|
||||
private BandwidthMeter bandwidthMeter;
|
||||
private RenderersFactory renderersFactory;
|
||||
private MediaItem mediaItem;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
@ -112,6 +113,10 @@ public final class PreloadMediaSourceTest {
|
||||
SystemClock.DEFAULT.createHandler(handler.getLooper(), /* callback= */ null),
|
||||
audioListener)
|
||||
};
|
||||
mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/long_1080p_lowbitrate.mp4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -146,11 +151,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
|
||||
@ -191,11 +192,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
|
||||
@ -235,11 +232,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
@ -266,11 +259,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
AtomicReference<MediaSource> externalCallerMediaSourceReference = new AtomicReference<>();
|
||||
MediaSource.MediaSourceCaller externalCaller =
|
||||
@ -315,11 +304,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
|
||||
@ -388,11 +373,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
|
||||
@ -472,11 +453,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
|
||||
@ -583,11 +560,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
|
||||
@ -615,11 +588,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
FakeMediaSource wrappedMediaSource = mediaSourceFactory.getLastCreatedSource();
|
||||
wrappedMediaSource.setAllowPreparation(false);
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
@ -653,11 +622,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
@ -727,11 +692,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
@ -808,11 +769,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
@ -876,11 +833,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
|
||||
@ -923,11 +876,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
|
||||
MediaSource.MediaSourceCaller externalCaller =
|
||||
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);
|
||||
@ -976,11 +925,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
|
||||
MediaSource.MediaSourceCaller externalCaller =
|
||||
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);
|
||||
@ -1031,11 +976,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
AtomicBoolean externalCaller1SourceInfoRefreshedCalled = new AtomicBoolean();
|
||||
AtomicBoolean externalCaller2SourceInfoRefreshedCalled = new AtomicBoolean();
|
||||
MediaSource.MediaSourceCaller externalCaller1 =
|
||||
@ -1090,11 +1031,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
preloadMediaSource.preload(/* startPositionUs= */ 0L);
|
||||
shadowOf(Looper.getMainLooper()).idle();
|
||||
preloadMediaSource.releasePreloadMediaSource();
|
||||
@ -1140,11 +1077,7 @@ public final class PreloadMediaSourceTest {
|
||||
getRendererCapabilities(renderersFactory),
|
||||
allocator,
|
||||
Util.getCurrentOrMainLooper());
|
||||
PreloadMediaSource preloadMediaSource =
|
||||
preloadMediaSourceFactory.createMediaSource(
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
|
||||
.build());
|
||||
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
|
||||
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
|
||||
MediaSource.MediaSourceCaller externalCaller =
|
||||
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);
|
||||
|
@ -1122,7 +1122,7 @@ public final class DashMediaSource extends BaseMediaSource {
|
||||
if (manifest != null) {
|
||||
cmcdDataFactory.setIsLive(manifest.dynamic);
|
||||
}
|
||||
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
}
|
||||
startLoading(
|
||||
new ParsingLoadable<>(dataSource, dataSpec, C.DATA_TYPE_MANIFEST, manifestParser),
|
||||
|
@ -517,7 +517,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
.setPlaybackRate(loadingInfo.playbackSpeed)
|
||||
.setIsLive(!playlist.hasEndTag)
|
||||
.setDidRebuffer(loadingInfo.rebufferedSince(lastChunkRequestRealtimeMs))
|
||||
.setIsBufferEmpty(queue.isEmpty());
|
||||
.setIsBufferEmpty(queue.isEmpty())
|
||||
.setChunkDurationUs(segmentBaseHolder.segmentBase.durationUs);
|
||||
long nextMediaSequence =
|
||||
segmentBaseHolder.partIndex == C.INDEX_UNSET
|
||||
? segmentBaseHolder.mediaSequence + 1
|
||||
|
@ -112,8 +112,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
.setFlags(segmentBaseHolder.isPreload ? FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED : 0)
|
||||
.build();
|
||||
if (cmcdDataFactory != null) {
|
||||
CmcdData cmcdData =
|
||||
cmcdDataFactory.setChunkDurationUs(mediaSegment.durationUs).createCmcdData();
|
||||
CmcdData cmcdData = cmcdDataFactory.createCmcdData();
|
||||
dataSpec = cmcdData.addToDataSpec(dataSpec);
|
||||
}
|
||||
|
||||
|
@ -153,7 +153,7 @@ public final class DefaultHlsPlaylistTracker
|
||||
new CmcdData.Factory(cmcdConfiguration, CmcdData.STREAMING_FORMAT_HLS)
|
||||
.setObjectType(CmcdData.OBJECT_TYPE_MANIFEST)
|
||||
.createCmcdData();
|
||||
cmcdData.addToDataSpec(dataSpec);
|
||||
dataSpec = cmcdData.addToDataSpec(dataSpec);
|
||||
}
|
||||
ParsingLoadable<HlsPlaylist> multivariantPlaylistLoadable =
|
||||
new ParsingLoadable<>(
|
||||
@ -796,7 +796,7 @@ public final class DefaultHlsPlaylistTracker
|
||||
if (primaryMediaPlaylistSnapshot != null) {
|
||||
cmcdDataFactory.setIsLive(!primaryMediaPlaylistSnapshot.hasEndTag);
|
||||
}
|
||||
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
}
|
||||
ParsingLoadable<HlsPlaylist> mediaPlaylistLoadable =
|
||||
new ParsingLoadable<>(
|
||||
|
@ -40,6 +40,7 @@ import androidx.media3.exoplayer.source.MediaSourceFactory;
|
||||
import androidx.media3.exoplayer.source.SinglePeriodTimeline;
|
||||
import androidx.media3.exoplayer.upstream.Allocator;
|
||||
import androidx.media3.exoplayer.upstream.LoadErrorHandlingPolicy;
|
||||
import com.google.common.base.Ascii;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.io.IOException;
|
||||
import javax.net.SocketFactory;
|
||||
@ -182,13 +183,21 @@ public final class RtspMediaSource extends BaseMediaSource {
|
||||
checkNotNull(mediaItem.localConfiguration);
|
||||
return new RtspMediaSource(
|
||||
mediaItem,
|
||||
forceUseRtpTcp
|
||||
shouldForceUseRtpTcp(mediaItem)
|
||||
? new TransferRtpDataChannelFactory(timeoutMs)
|
||||
: new UdpDataSourceRtpDataChannelFactory(timeoutMs),
|
||||
userAgent,
|
||||
socketFactory,
|
||||
debugLoggingEnabled);
|
||||
}
|
||||
|
||||
private boolean shouldForceUseRtpTcp(MediaItem mediaItem) {
|
||||
if (forceUseRtpTcp) {
|
||||
return true;
|
||||
}
|
||||
@Nullable String scheme = checkNotNull(mediaItem.localConfiguration).uri.getScheme();
|
||||
return scheme != null && Ascii.equalsIgnoreCase("rtspt", scheme);
|
||||
}
|
||||
}
|
||||
|
||||
/** Thrown when an exception or error is encountered during loading an RTSP stream. */
|
||||
@ -237,7 +246,7 @@ public final class RtspMediaSource extends BaseMediaSource {
|
||||
this.mediaItem = mediaItem;
|
||||
this.rtpDataChannelFactory = rtpDataChannelFactory;
|
||||
this.userAgent = userAgent;
|
||||
this.uri = checkNotNull(mediaItem.localConfiguration).uri;
|
||||
this.uri = maybeConvertRtsptUriScheme(checkNotNull(mediaItem.localConfiguration).uri);
|
||||
this.socketFactory = socketFactory;
|
||||
this.debugLoggingEnabled = debugLoggingEnabled;
|
||||
this.timelineDurationUs = C.TIME_UNSET;
|
||||
@ -262,7 +271,8 @@ public final class RtspMediaSource extends BaseMediaSource {
|
||||
@Override
|
||||
public boolean canUpdateMediaItem(MediaItem mediaItem) {
|
||||
@Nullable MediaItem.LocalConfiguration newConfiguration = mediaItem.localConfiguration;
|
||||
return newConfiguration != null && newConfiguration.uri.equals(this.uri);
|
||||
return newConfiguration != null
|
||||
&& maybeConvertRtsptUriScheme(newConfiguration.uri).equals(this.uri);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -309,6 +319,14 @@ public final class RtspMediaSource extends BaseMediaSource {
|
||||
|
||||
// Internal methods.
|
||||
|
||||
private static Uri maybeConvertRtsptUriScheme(Uri uri) {
|
||||
@Nullable String scheme = uri.getScheme();
|
||||
if (scheme == null || !Ascii.equalsIgnoreCase("rtspt", scheme)) {
|
||||
return uri;
|
||||
}
|
||||
return Uri.parse("rtsp" + uri.toString().substring(5));
|
||||
}
|
||||
|
||||
private void notifySourceInfoRefreshed() {
|
||||
Timeline timeline =
|
||||
new SinglePeriodTimeline(
|
||||
|
@ -66,6 +66,18 @@ public class RtspMediaSourceTest {
|
||||
assertThat(canUpdateMediaItem).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void canUpdateMediaItem_withChangeToRtspFromRtspt_returnsTrue() {
|
||||
MediaItem initialMediaItem = new MediaItem.Builder().setUri("rtspt://test.test").build();
|
||||
MediaItem updatedMediaItem =
|
||||
TestUtil.buildFullyCustomizedMediaItem().buildUpon().setUri("rtsp://test.test").build();
|
||||
MediaSource mediaSource = buildMediaSource(initialMediaItem);
|
||||
|
||||
boolean canUpdateMediaItem = mediaSource.canUpdateMediaItem(updatedMediaItem);
|
||||
|
||||
assertThat(canUpdateMediaItem).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void updateMediaItem_createsTimelineWithUpdatedItem() throws Exception {
|
||||
MediaItem initialMediaItem =
|
||||
|
@ -689,7 +689,7 @@ public final class SsMediaSource extends BaseMediaSource
|
||||
if (manifest != null) {
|
||||
cmcdDataFactory.setIsLive(manifest.isLive);
|
||||
}
|
||||
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
|
||||
}
|
||||
ParsingLoadable<SsManifest> loadable =
|
||||
new ParsingLoadable<>(manifestDataSource, dataSpec, C.DATA_TYPE_MANIFEST, manifestParser);
|
||||
|
@ -169,12 +169,15 @@ public final class Ac3Util {
|
||||
*
|
||||
* @param data The AC3SpecificBox to parse.
|
||||
* @param trackId The track identifier to set on the format.
|
||||
* @param language The language to set on the format.
|
||||
* @param language The language to set on the format, or {@code null} if unset.
|
||||
* @param drmInitData {@link DrmInitData} to be included in the format.
|
||||
* @return The AC-3 format parsed from data in the header.
|
||||
*/
|
||||
public static Format parseAc3AnnexFFormat(
|
||||
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
|
||||
ParsableByteArray data,
|
||||
String trackId,
|
||||
@Nullable String language,
|
||||
@Nullable DrmInitData drmInitData) {
|
||||
ParsableBitArray dataBitArray = new ParsableBitArray();
|
||||
dataBitArray.reset(data);
|
||||
|
||||
@ -208,12 +211,15 @@ public final class Ac3Util {
|
||||
*
|
||||
* @param data The EC3SpecificBox to parse.
|
||||
* @param trackId The track identifier to set on the format.
|
||||
* @param language The language to set on the format.
|
||||
* @param language The language to set on the format, or {@code null} if unset.
|
||||
* @param drmInitData {@link DrmInitData} to be included in the format.
|
||||
* @return The E-AC-3 format parsed from data in the header.
|
||||
*/
|
||||
public static Format parseEAc3AnnexFFormat(
|
||||
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
|
||||
ParsableByteArray data,
|
||||
String trackId,
|
||||
@Nullable String language,
|
||||
@Nullable DrmInitData drmInitData) {
|
||||
ParsableBitArray dataBitArray = new ParsableBitArray();
|
||||
dataBitArray.reset(data);
|
||||
|
||||
|
@ -163,14 +163,17 @@ public final class Ac4Util {
|
||||
*
|
||||
* @param data The AC4SpecificBox to parse.
|
||||
* @param trackId The track identifier to set on the format.
|
||||
* @param language The language to set on the format.
|
||||
* @param language The language to set on the format, or {@code null} if unset.
|
||||
* @param drmInitData {@link DrmInitData} to be included in the format.
|
||||
* @return The AC-4 format parsed from data in the header.
|
||||
* @throws ParserException If an unsupported container feature is encountered while parsing AC-4
|
||||
* Annex E.
|
||||
*/
|
||||
public static Format parseAc4AnnexEFormat(
|
||||
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData)
|
||||
ParsableByteArray data,
|
||||
String trackId,
|
||||
@Nullable String language,
|
||||
@Nullable DrmInitData drmInitData)
|
||||
throws ParserException {
|
||||
ParsableBitArray dataBitArray = new ParsableBitArray();
|
||||
dataBitArray.reset(data);
|
||||
|
@ -310,9 +310,13 @@ public final class FlacExtractor implements Extractor {
|
||||
currentFrameFirstSampleNumber = nextFrameFirstSampleNumber;
|
||||
}
|
||||
|
||||
if (buffer.bytesLeft() < FlacConstants.MAX_FRAME_HEADER_SIZE) {
|
||||
// The next frame header may not fit in the rest of the buffer, so put the trailing bytes at
|
||||
// the start of the buffer, and reset the position and limit.
|
||||
int remainingBufferCapacity = buffer.getData().length - buffer.limit();
|
||||
if (buffer.bytesLeft() < FlacConstants.MAX_FRAME_HEADER_SIZE
|
||||
&& remainingBufferCapacity < FlacConstants.MAX_FRAME_HEADER_SIZE) {
|
||||
// We're running out of bytes to read before buffer.limit, and the next frame header may not
|
||||
// fit in the rest of buffer.data beyond buffer.limit, so we move the bytes between
|
||||
// buffer.position and buffer.limit to the start of buffer.data, and reset the position and
|
||||
// limit.
|
||||
int bytesLeft = buffer.bytesLeft();
|
||||
System.arraycopy(
|
||||
buffer.getData(), buffer.getPosition(), buffer.getData(), /* destPos= */ 0, bytesLeft);
|
||||
|
@ -36,6 +36,7 @@ import androidx.media3.common.util.ParsableBitArray;
|
||||
import androidx.media3.common.util.ParsableByteArray;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.container.Mp4AlternateGroupData;
|
||||
import androidx.media3.container.Mp4Box;
|
||||
import androidx.media3.container.Mp4Box.LeafBox;
|
||||
import androidx.media3.container.Mp4LocationData;
|
||||
@ -54,6 +55,7 @@ import androidx.media3.extractor.VorbisUtil;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.primitives.Ints;
|
||||
import java.math.RoundingMode;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
@ -380,21 +382,38 @@ public final class BoxParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
return stsdData.format == null
|
||||
? null
|
||||
: new Track(
|
||||
tkhdData.id,
|
||||
trackType,
|
||||
mdhdData.timescale,
|
||||
movieTimescale,
|
||||
durationUs,
|
||||
mdhdData.mediaDurationUs,
|
||||
stsdData.format,
|
||||
stsdData.requiredSampleTransformation,
|
||||
stsdData.trackEncryptionBoxes,
|
||||
stsdData.nalUnitLengthFieldLength,
|
||||
editListDurations,
|
||||
editListMediaTimes);
|
||||
if (stsdData.format == null) {
|
||||
return null;
|
||||
}
|
||||
Format format;
|
||||
if (tkhdData.alternateGroup != 0) {
|
||||
Mp4AlternateGroupData alternateGroupEntry =
|
||||
new Mp4AlternateGroupData(tkhdData.alternateGroup);
|
||||
format =
|
||||
stsdData
|
||||
.format
|
||||
.buildUpon()
|
||||
.setMetadata(
|
||||
stsdData.format.metadata != null
|
||||
? stsdData.format.metadata.copyWithAppendedEntries(alternateGroupEntry)
|
||||
: new Metadata(alternateGroupEntry))
|
||||
.build();
|
||||
} else {
|
||||
format = stsdData.format;
|
||||
}
|
||||
return new Track(
|
||||
tkhdData.id,
|
||||
trackType,
|
||||
mdhdData.timescale,
|
||||
movieTimescale,
|
||||
durationUs,
|
||||
mdhdData.mediaDurationUs,
|
||||
format,
|
||||
stsdData.requiredSampleTransformation,
|
||||
stsdData.trackEncryptionBoxes,
|
||||
stsdData.nalUnitLengthFieldLength,
|
||||
editListDurations,
|
||||
editListMediaTimes);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -509,6 +528,7 @@ public final class BoxParser {
|
||||
int[] flags;
|
||||
long timestampTimeUnits = 0;
|
||||
long duration;
|
||||
long totalSize = 0;
|
||||
|
||||
if (rechunkFixedSizeSamples) {
|
||||
long[] chunkOffsetsBytes = new long[chunkIterator.length];
|
||||
@ -526,6 +546,7 @@ public final class BoxParser {
|
||||
timestamps = rechunkedResults.timestamps;
|
||||
flags = rechunkedResults.flags;
|
||||
duration = rechunkedResults.duration;
|
||||
totalSize = rechunkedResults.totalSize;
|
||||
} else {
|
||||
offsets = new long[sampleCount];
|
||||
sizes = new int[sampleCount];
|
||||
@ -568,6 +589,7 @@ public final class BoxParser {
|
||||
|
||||
offsets[i] = offset;
|
||||
sizes[i] = sampleSizeBox.readNextSampleSize();
|
||||
totalSize += sizes[i];
|
||||
if (sizes[i] > maximumSize) {
|
||||
maximumSize = sizes[i];
|
||||
}
|
||||
@ -639,6 +661,20 @@ public final class BoxParser {
|
||||
+ (!isCttsValid ? ", ctts invalid" : ""));
|
||||
}
|
||||
}
|
||||
|
||||
if (track.mediaDurationUs > 0) {
|
||||
long averageBitrate =
|
||||
Util.scaleLargeValue(
|
||||
totalSize * C.BITS_PER_BYTE,
|
||||
C.MICROS_PER_SECOND,
|
||||
track.mediaDurationUs,
|
||||
RoundingMode.HALF_DOWN);
|
||||
if (averageBitrate > 0 && averageBitrate < Integer.MAX_VALUE) {
|
||||
Format format = track.format.buildUpon().setAverageBitrate((int) averageBitrate).build();
|
||||
track = track.copyWithFormat(format);
|
||||
}
|
||||
}
|
||||
|
||||
long durationUs = Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, track.timescale);
|
||||
|
||||
if (track.editListDurations == null) {
|
||||
@ -913,7 +949,9 @@ public final class BoxParser {
|
||||
}
|
||||
}
|
||||
|
||||
tkhd.skipBytes(16);
|
||||
tkhd.skipBytes(10);
|
||||
int alternateGroup = tkhd.readUnsignedShort();
|
||||
tkhd.skipBytes(4);
|
||||
int a00 = tkhd.readInt();
|
||||
int a01 = tkhd.readInt();
|
||||
tkhd.skipBytes(4);
|
||||
@ -933,7 +971,7 @@ public final class BoxParser {
|
||||
rotationDegrees = 0;
|
||||
}
|
||||
|
||||
return new TkhdData(trackId, duration, rotationDegrees);
|
||||
return new TkhdData(trackId, duration, alternateGroup, rotationDegrees);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -997,22 +1035,34 @@ public final class BoxParser {
|
||||
mediaDurationUs = Util.scaleLargeTimestamp(mediaDuration, C.MICROS_PER_SECOND, timescale);
|
||||
}
|
||||
}
|
||||
int languageCode = mdhd.readUnsignedShort();
|
||||
String language =
|
||||
""
|
||||
+ (char) (((languageCode >> 10) & 0x1F) + 0x60)
|
||||
+ (char) (((languageCode >> 5) & 0x1F) + 0x60)
|
||||
+ (char) ((languageCode & 0x1F) + 0x60);
|
||||
|
||||
String language = getLanguageFromCode(/* languageCode= */ mdhd.readUnsignedShort());
|
||||
return new MdhdData(timescale, mediaDurationUs, language);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static String getLanguageFromCode(int languageCode) {
|
||||
char[] chars = {
|
||||
(char) (((languageCode >> 10) & 0x1F) + 0x60),
|
||||
(char) (((languageCode >> 5) & 0x1F) + 0x60),
|
||||
(char) ((languageCode & 0x1F) + 0x60)
|
||||
};
|
||||
|
||||
for (char c : chars) {
|
||||
if (c < 'a' || c > 'z') {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a stsd atom (defined in ISO/IEC 14496-12).
|
||||
*
|
||||
* @param stsd The stsd atom to decode.
|
||||
* @param trackId The track's identifier in its container.
|
||||
* @param rotationDegrees The rotation of the track in degrees.
|
||||
* @param language The language of the track.
|
||||
* @param language The language of the track, or {@code null} if unset.
|
||||
* @param drmInitData {@link DrmInitData} to be included in the format, or {@code null}.
|
||||
* @param isQuickTime True for QuickTime media. False otherwise.
|
||||
* @return An object containing the parsed data.
|
||||
@ -1021,7 +1071,7 @@ public final class BoxParser {
|
||||
ParsableByteArray stsd,
|
||||
int trackId,
|
||||
int rotationDegrees,
|
||||
String language,
|
||||
@Nullable String language,
|
||||
@Nullable DrmInitData drmInitData,
|
||||
boolean isQuickTime)
|
||||
throws ParserException {
|
||||
@ -1057,6 +1107,7 @@ public final class BoxParser {
|
||||
childStartPosition,
|
||||
childAtomSize,
|
||||
trackId,
|
||||
language,
|
||||
rotationDegrees,
|
||||
drmInitData,
|
||||
out,
|
||||
@ -1125,7 +1176,7 @@ public final class BoxParser {
|
||||
int position,
|
||||
int atomSize,
|
||||
int trackId,
|
||||
String language,
|
||||
@Nullable String language,
|
||||
StsdData out) {
|
||||
parent.setPosition(position + Mp4Box.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
|
||||
|
||||
@ -1174,6 +1225,7 @@ public final class BoxParser {
|
||||
int position,
|
||||
int size,
|
||||
int trackId,
|
||||
@Nullable String language,
|
||||
int rotationDegrees,
|
||||
@Nullable DrmInitData drmInitData,
|
||||
StsdData out,
|
||||
@ -1223,6 +1275,7 @@ public final class BoxParser {
|
||||
@Nullable byte[] projectionData = null;
|
||||
@C.StereoMode int stereoMode = Format.NO_VALUE;
|
||||
@Nullable EsdsData esdsData = null;
|
||||
@Nullable BtrtData btrtData = null;
|
||||
int maxNumReorderSamples = Format.NO_VALUE;
|
||||
int maxSubLayers = Format.NO_VALUE;
|
||||
@Nullable NalUnitUtil.H265VpsData vpsData = null;
|
||||
@ -1446,6 +1499,8 @@ public final class BoxParser {
|
||||
if (initializationDataBytes != null) {
|
||||
initializationData = ImmutableList.of(initializationDataBytes);
|
||||
}
|
||||
} else if (childAtomType == Mp4Box.TYPE_btrt) {
|
||||
btrtData = parseBtrtFromParent(parent, childStartPosition);
|
||||
} else if (childAtomType == Mp4Box.TYPE_pasp) {
|
||||
pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
|
||||
pixelWidthHeightRatioFromPasp = true;
|
||||
@ -1543,6 +1598,7 @@ public final class BoxParser {
|
||||
.setMaxNumReorderSamples(maxNumReorderSamples)
|
||||
.setMaxSubLayers(maxSubLayers)
|
||||
.setDrmInitData(drmInitData)
|
||||
.setLanguage(language)
|
||||
// Note that if either mdcv or clli are missing, we leave the corresponding HDR static
|
||||
// metadata bytes with value zero. See [Internal ref: b/194535665].
|
||||
.setColorInfo(
|
||||
@ -1555,7 +1611,12 @@ public final class BoxParser {
|
||||
.setChromaBitdepth(bitdepthChroma)
|
||||
.build());
|
||||
|
||||
if (esdsData != null) {
|
||||
// Prefer btrtData over esdsData for video track.
|
||||
if (btrtData != null) {
|
||||
formatBuilder
|
||||
.setAverageBitrate(Ints.saturatedCast(btrtData.avgBitrate))
|
||||
.setPeakBitrate(Ints.saturatedCast(btrtData.maxBitrate));
|
||||
} else if (esdsData != null) {
|
||||
formatBuilder
|
||||
.setAverageBitrate(Ints.saturatedCast(esdsData.bitrate))
|
||||
.setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate));
|
||||
@ -1812,7 +1873,7 @@ public final class BoxParser {
|
||||
int position,
|
||||
int size,
|
||||
int trackId,
|
||||
String language,
|
||||
@Nullable String language,
|
||||
boolean isQuickTime,
|
||||
@Nullable DrmInitData drmInitData,
|
||||
StsdData out,
|
||||
@ -1834,6 +1895,7 @@ public final class BoxParser {
|
||||
@C.PcmEncoding int pcmEncoding = Format.NO_VALUE;
|
||||
@Nullable String codecs = null;
|
||||
@Nullable EsdsData esdsData = null;
|
||||
@Nullable BtrtData btrtData = null;
|
||||
|
||||
if (quickTimeSoundDescriptionVersion == 0 || quickTimeSoundDescriptionVersion == 1) {
|
||||
channelCount = parent.readUnsignedShort();
|
||||
@ -2040,6 +2102,8 @@ public final class BoxParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (childAtomType == Mp4Box.TYPE_btrt) {
|
||||
btrtData = parseBtrtFromParent(parent, childPosition);
|
||||
} else if (childAtomType == Mp4Box.TYPE_dac3) {
|
||||
parent.setPosition(Mp4Box.HEADER_SIZE + childPosition);
|
||||
out.format =
|
||||
@ -2127,10 +2191,15 @@ public final class BoxParser {
|
||||
.setDrmInitData(drmInitData)
|
||||
.setLanguage(language);
|
||||
|
||||
// Prefer esdsData over btrtData for audio track.
|
||||
if (esdsData != null) {
|
||||
formatBuilder
|
||||
.setAverageBitrate(Ints.saturatedCast(esdsData.bitrate))
|
||||
.setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate));
|
||||
} else if (btrtData != null) {
|
||||
formatBuilder
|
||||
.setAverageBitrate(Ints.saturatedCast(btrtData.avgBitrate))
|
||||
.setPeakBitrate(Ints.saturatedCast(btrtData.maxBitrate));
|
||||
}
|
||||
|
||||
out.format = formatBuilder.build();
|
||||
@ -2221,6 +2290,20 @@ public final class BoxParser {
|
||||
/* peakBitrate= */ peakBitrate > 0 ? peakBitrate : Format.NO_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns bitrate data contained in a btrt box, as specified by Section 8.5.2.2 in ISO/IEC
|
||||
* 14496-12:2012(E).
|
||||
*/
|
||||
private static BtrtData parseBtrtFromParent(ParsableByteArray parent, int position) {
|
||||
parent.setPosition(position + Mp4Box.HEADER_SIZE);
|
||||
|
||||
parent.skipBytes(4); // bufferSizeDB
|
||||
long maxBitrate = parent.readUnsignedInt();
|
||||
long avgBitrate = parent.readUnsignedInt();
|
||||
|
||||
return new BtrtData(avgBitrate, maxBitrate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns stereo video playback related meta data from the vexu box. See
|
||||
* https://developer.apple.com/av-foundation/Stereo-Video-ISOBMFF-Extensions.pdf for ref.
|
||||
@ -2481,11 +2564,13 @@ public final class BoxParser {
|
||||
|
||||
private final int id;
|
||||
private final long duration;
|
||||
private final int alternateGroup;
|
||||
private final int rotationDegrees;
|
||||
|
||||
public TkhdData(int id, long duration, int rotationDegrees) {
|
||||
public TkhdData(int id, long duration, int alternateGroup, int rotationDegrees) {
|
||||
this.id = id;
|
||||
this.duration = duration;
|
||||
this.alternateGroup = alternateGroup;
|
||||
this.rotationDegrees = rotationDegrees;
|
||||
}
|
||||
}
|
||||
@ -2526,6 +2611,17 @@ public final class BoxParser {
|
||||
}
|
||||
}
|
||||
|
||||
/** Data parsed from btrt box. */
|
||||
private static final class BtrtData {
|
||||
private final long avgBitrate;
|
||||
private final long maxBitrate;
|
||||
|
||||
public BtrtData(long avgBitrate, long maxBitrate) {
|
||||
this.avgBitrate = avgBitrate;
|
||||
this.maxBitrate = maxBitrate;
|
||||
}
|
||||
}
|
||||
|
||||
/** Data parsed from stri box. */
|
||||
private static final class StriData {
|
||||
private final boolean hasLeftEyeView;
|
||||
@ -2552,9 +2648,9 @@ public final class BoxParser {
|
||||
private static final class MdhdData {
|
||||
private final long timescale;
|
||||
private final long mediaDurationUs;
|
||||
private final String language;
|
||||
@Nullable private final String language;
|
||||
|
||||
public MdhdData(long timescale, long mediaDurationUs, String language) {
|
||||
public MdhdData(long timescale, long mediaDurationUs, @Nullable String language) {
|
||||
this.timescale = timescale;
|
||||
this.mediaDurationUs = mediaDurationUs;
|
||||
this.language = language;
|
||||
|
@ -35,6 +35,7 @@ import androidx.media3.common.util.Util;
|
||||
public final long[] timestamps;
|
||||
public final int[] flags;
|
||||
public final long duration;
|
||||
public final long totalSize;
|
||||
|
||||
private Results(
|
||||
long[] offsets,
|
||||
@ -42,13 +43,15 @@ import androidx.media3.common.util.Util;
|
||||
int maximumSize,
|
||||
long[] timestamps,
|
||||
int[] flags,
|
||||
long duration) {
|
||||
long duration,
|
||||
long totalSize) {
|
||||
this.offsets = offsets;
|
||||
this.sizes = sizes;
|
||||
this.maximumSize = maximumSize;
|
||||
this.timestamps = timestamps;
|
||||
this.flags = flags;
|
||||
this.duration = duration;
|
||||
this.totalSize = totalSize;
|
||||
}
|
||||
}
|
||||
|
||||
@ -81,6 +84,7 @@ import androidx.media3.common.util.Util;
|
||||
int maximumSize = 0;
|
||||
long[] timestamps = new long[rechunkedSampleCount];
|
||||
int[] flags = new int[rechunkedSampleCount];
|
||||
int totalSize = 0;
|
||||
|
||||
int originalSampleIndex = 0;
|
||||
int newSampleIndex = 0;
|
||||
@ -93,6 +97,7 @@ import androidx.media3.common.util.Util;
|
||||
|
||||
offsets[newSampleIndex] = sampleOffset;
|
||||
sizes[newSampleIndex] = fixedSampleSize * bufferSampleCount;
|
||||
totalSize += sizes[newSampleIndex];
|
||||
maximumSize = max(maximumSize, sizes[newSampleIndex]);
|
||||
timestamps[newSampleIndex] = (timestampDeltaInTimeUnits * originalSampleIndex);
|
||||
flags[newSampleIndex] = C.BUFFER_FLAG_KEY_FRAME;
|
||||
@ -106,7 +111,7 @@ import androidx.media3.common.util.Util;
|
||||
}
|
||||
long duration = timestampDeltaInTimeUnits * originalSampleIndex;
|
||||
|
||||
return new Results(offsets, sizes, maximumSize, timestamps, flags, duration);
|
||||
return new Results(offsets, sizes, maximumSize, timestamps, flags, duration, totalSize);
|
||||
}
|
||||
|
||||
private FixedSampleSizeRechunker() {
|
||||
|
@ -84,13 +84,22 @@ import com.google.common.collect.ImmutableList;
|
||||
|
||||
private MetadataUtil() {}
|
||||
|
||||
/** Updates a {@link Format.Builder} to include metadata from the provided sources. */
|
||||
/**
|
||||
* Updates a {@link Format.Builder} to include metadata from the provided sources.
|
||||
*
|
||||
* @param trackType The {@link C.TrackType} of the track.
|
||||
* @param mdtaMetadata The {@link Metadata} from the {@code mdta} box if present, otherwise null.
|
||||
* @param formatBuilder A {@link Format.Builder} to append the metadata too.
|
||||
* @param existingMetadata The {@link Format#metadata} from {@code formatBuilder}.
|
||||
* @param additionalMetadata Additional metadata to append.
|
||||
*/
|
||||
public static void setFormatMetadata(
|
||||
int trackType,
|
||||
@C.TrackType int trackType,
|
||||
@Nullable Metadata mdtaMetadata,
|
||||
Format.Builder formatBuilder,
|
||||
@Nullable Metadata existingMetadata,
|
||||
@NullableType Metadata... additionalMetadata) {
|
||||
Metadata formatMetadata = new Metadata();
|
||||
Metadata formatMetadata = existingMetadata != null ? existingMetadata : new Metadata();
|
||||
|
||||
if (mdtaMetadata != null) {
|
||||
for (int i = 0; i < mdtaMetadata.length(); i++) {
|
||||
|
@ -757,12 +757,6 @@ public final class Mp4Extractor implements Extractor, SeekMap {
|
||||
roleFlags |=
|
||||
firstVideoTrackIndex == C.INDEX_UNSET ? C.ROLE_FLAG_MAIN : C.ROLE_FLAG_ALTERNATE;
|
||||
}
|
||||
if (track.format.frameRate == Format.NO_VALUE
|
||||
&& trackDurationUs > 0
|
||||
&& trackSampleTable.sampleCount > 0) {
|
||||
float frameRate = trackSampleTable.sampleCount / (trackDurationUs / 1000000f);
|
||||
formatBuilder.setFrameRate(frameRate);
|
||||
}
|
||||
if (readingAuxiliaryTracks) {
|
||||
roleFlags |= C.ROLE_FLAG_AUXILIARY;
|
||||
formatBuilder.setAuxiliaryTrackType(auxiliaryTrackTypesForAuxiliaryTracks.get(i));
|
||||
@ -775,6 +769,7 @@ public final class Mp4Extractor implements Extractor, SeekMap {
|
||||
track.type,
|
||||
mdtaMetadata,
|
||||
formatBuilder,
|
||||
track.format.metadata,
|
||||
slowMotionMetadataEntries.isEmpty() ? null : new Metadata(slowMotionMetadataEntries),
|
||||
udtaMetadata,
|
||||
mvhdMetadata);
|
||||
|
@ -260,6 +260,11 @@ public final class Mp4ExtractorParameterizedTest {
|
||||
assertExtractorBehavior("media/mp4/sample_2_byte_NAL_length.mp4");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mp4SampleWithBtrt() throws Exception {
|
||||
assertExtractorBehavior("media/mp4/sample_with_btrt.mp4");
|
||||
}
|
||||
|
||||
private void assertExtractorBehavior(String file) throws IOException {
|
||||
ExtractorAsserts.AssertionConfig.Builder assertionConfigBuilder =
|
||||
new ExtractorAsserts.AssertionConfig.Builder();
|
||||
|
@ -113,10 +113,7 @@ import androidx.media3.common.util.Util;
|
||||
public PendingIntent createMediaActionPendingIntent(
|
||||
MediaSession mediaSession, @Player.Command long command) {
|
||||
int keyCode = toKeyCode(command);
|
||||
Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON);
|
||||
intent.setData(mediaSession.getImpl().getUri());
|
||||
intent.setComponent(new ComponentName(service, service.getClass()));
|
||||
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, keyCode));
|
||||
Intent intent = getMediaButtonIntent(mediaSession, keyCode);
|
||||
if (Util.SDK_INT >= 26
|
||||
&& command == COMMAND_PLAY_PAUSE
|
||||
&& !mediaSession.getPlayer().getPlayWhenReady()) {
|
||||
@ -130,6 +127,26 @@ import androidx.media3.common.util.Util;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PendingIntent createNotificationDismissalIntent(MediaSession mediaSession) {
|
||||
Intent intent =
|
||||
getMediaButtonIntent(mediaSession, KEYCODE_MEDIA_STOP)
|
||||
.putExtra(MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY, true);
|
||||
return PendingIntent.getService(
|
||||
service,
|
||||
/* requestCode= */ KEYCODE_MEDIA_STOP,
|
||||
intent,
|
||||
Util.SDK_INT >= 23 ? PendingIntent.FLAG_IMMUTABLE : 0);
|
||||
}
|
||||
|
||||
private Intent getMediaButtonIntent(MediaSession mediaSession, int mediaKeyCode) {
|
||||
Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON);
|
||||
intent.setData(mediaSession.getImpl().getUri());
|
||||
intent.setComponent(new ComponentName(service, service.getClass()));
|
||||
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, mediaKeyCode));
|
||||
return intent;
|
||||
}
|
||||
|
||||
private int toKeyCode(@Player.Command long action) {
|
||||
if (action == COMMAND_SEEK_TO_NEXT_MEDIA_ITEM || action == COMMAND_SEEK_TO_NEXT) {
|
||||
return KEYCODE_MEDIA_NEXT;
|
||||
|
@ -22,7 +22,6 @@ import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT;
|
||||
import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM;
|
||||
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS;
|
||||
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM;
|
||||
import static androidx.media3.common.Player.COMMAND_STOP;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
|
||||
@ -379,8 +378,7 @@ public class DefaultMediaNotificationProvider implements MediaNotification.Provi
|
||||
Notification notification =
|
||||
builder
|
||||
.setContentIntent(mediaSession.getSessionActivity())
|
||||
.setDeleteIntent(
|
||||
actionFactory.createMediaActionPendingIntent(mediaSession, COMMAND_STOP))
|
||||
.setDeleteIntent(actionFactory.createNotificationDismissalIntent(mediaSession))
|
||||
.setOnlyAlertOnce(true)
|
||||
.setSmallIcon(smallIconResourceId)
|
||||
.setStyle(mediaStyle)
|
||||
|
@ -898,6 +898,8 @@ import java.util.concurrent.TimeoutException;
|
||||
return metadata.writer;
|
||||
case MediaMetadataCompat.METADATA_KEY_COMPOSER:
|
||||
return metadata.composer;
|
||||
case MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE:
|
||||
return metadata.subtitle;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
@ -61,6 +61,7 @@ public final class MediaBrowser extends MediaController {
|
||||
private Looper applicationLooper;
|
||||
private @MonotonicNonNull BitmapLoader bitmapLoader;
|
||||
private int maxCommandsForMediaItems;
|
||||
private long platformSessionCallbackAggregationTimeoutMs;
|
||||
|
||||
/**
|
||||
* Creates a builder for {@link MediaBrowser}.
|
||||
@ -78,6 +79,8 @@ public final class MediaBrowser extends MediaController {
|
||||
connectionHints = Bundle.EMPTY;
|
||||
listener = new Listener() {};
|
||||
applicationLooper = Util.getCurrentOrMainLooper();
|
||||
platformSessionCallbackAggregationTimeoutMs =
|
||||
DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -156,6 +159,24 @@ public final class MediaBrowser extends MediaController {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timeout after which updates from the platform session callbacks are applied to the
|
||||
* browser, in milliseconds.
|
||||
*
|
||||
* <p>The default is 100ms.
|
||||
*
|
||||
* @param platformSessionCallbackAggregationTimeoutMs The timeout, in milliseconds.
|
||||
* @return The builder to allow chaining.
|
||||
*/
|
||||
@UnstableApi
|
||||
@CanIgnoreReturnValue
|
||||
public Builder experimentalSetPlatformSessionCallbackAggregationTimeoutMs(
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
this.platformSessionCallbackAggregationTimeoutMs =
|
||||
platformSessionCallbackAggregationTimeoutMs;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a {@link MediaBrowser} asynchronously.
|
||||
*
|
||||
@ -196,7 +217,8 @@ public final class MediaBrowser extends MediaController {
|
||||
applicationLooper,
|
||||
holder,
|
||||
bitmapLoader,
|
||||
maxCommandsForMediaItems);
|
||||
maxCommandsForMediaItems,
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
postOrRun(new Handler(applicationLooper), () -> holder.setController(browser));
|
||||
return holder;
|
||||
}
|
||||
@ -266,7 +288,8 @@ public final class MediaBrowser extends MediaController {
|
||||
Looper applicationLooper,
|
||||
ConnectionCallback connectionCallback,
|
||||
@Nullable BitmapLoader bitmapLoader,
|
||||
int maxCommandsForMediaItems) {
|
||||
int maxCommandsForMediaItems,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
super(
|
||||
context,
|
||||
token,
|
||||
@ -275,7 +298,8 @@ public final class MediaBrowser extends MediaController {
|
||||
applicationLooper,
|
||||
connectionCallback,
|
||||
bitmapLoader,
|
||||
maxCommandsForMediaItems);
|
||||
maxCommandsForMediaItems,
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -286,12 +310,19 @@ public final class MediaBrowser extends MediaController {
|
||||
SessionToken token,
|
||||
Bundle connectionHints,
|
||||
Looper applicationLooper,
|
||||
@Nullable BitmapLoader bitmapLoader) {
|
||||
@Nullable BitmapLoader bitmapLoader,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
MediaBrowserImpl impl;
|
||||
if (token.isLegacySession()) {
|
||||
impl =
|
||||
new MediaBrowserImplLegacy(
|
||||
context, this, token, connectionHints, applicationLooper, checkNotNull(bitmapLoader));
|
||||
context,
|
||||
this,
|
||||
token,
|
||||
connectionHints,
|
||||
applicationLooper,
|
||||
checkNotNull(bitmapLoader),
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
} else {
|
||||
impl = new MediaBrowserImplBase(context, this, token, connectionHints, applicationLooper);
|
||||
}
|
||||
|
@ -64,8 +64,16 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
SessionToken token,
|
||||
Bundle connectionHints,
|
||||
Looper applicationLooper,
|
||||
BitmapLoader bitmapLoader) {
|
||||
super(context, instance, token, connectionHints, applicationLooper, bitmapLoader);
|
||||
BitmapLoader bitmapLoader,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
super(
|
||||
context,
|
||||
instance,
|
||||
token,
|
||||
connectionHints,
|
||||
applicationLooper,
|
||||
bitmapLoader,
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
this.instance = instance;
|
||||
commandButtonsForMediaItems = ImmutableMap.of();
|
||||
}
|
||||
|
@ -201,6 +201,8 @@ public class MediaController implements Player {
|
||||
"MediaController method is called from a wrong thread."
|
||||
+ " See javadoc of MediaController for details.";
|
||||
|
||||
@UnstableApi protected static final long DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS = 100L;
|
||||
|
||||
/** A builder for {@link MediaController}. */
|
||||
public static final class Builder {
|
||||
|
||||
@ -211,6 +213,7 @@ public class MediaController implements Player {
|
||||
private Looper applicationLooper;
|
||||
private @MonotonicNonNull BitmapLoader bitmapLoader;
|
||||
private int maxCommandsForMediaItems;
|
||||
private long platformSessionCallbackAggregationTimeoutMs;
|
||||
|
||||
/**
|
||||
* Creates a builder for {@link MediaController}.
|
||||
@ -242,6 +245,8 @@ public class MediaController implements Player {
|
||||
connectionHints = Bundle.EMPTY;
|
||||
listener = new Listener() {};
|
||||
applicationLooper = Util.getCurrentOrMainLooper();
|
||||
platformSessionCallbackAggregationTimeoutMs =
|
||||
DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -320,6 +325,24 @@ public class MediaController implements Player {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timeout after which updates from the platform session callbacks are applied to the
|
||||
* browser, in milliseconds.
|
||||
*
|
||||
* <p>The default is 100ms.
|
||||
*
|
||||
* @param platformSessionCallbackAggregationTimeoutMs The timeout, in milliseconds.
|
||||
* @return tThe builder to allow chaining.
|
||||
*/
|
||||
@UnstableApi
|
||||
@CanIgnoreReturnValue
|
||||
public Builder experimentalSetPlatformSessionCallbackAggregationTimeoutMs(
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
this.platformSessionCallbackAggregationTimeoutMs =
|
||||
platformSessionCallbackAggregationTimeoutMs;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a {@link MediaController} asynchronously.
|
||||
*
|
||||
@ -361,7 +384,8 @@ public class MediaController implements Player {
|
||||
applicationLooper,
|
||||
holder,
|
||||
bitmapLoader,
|
||||
maxCommandsForMediaItems);
|
||||
maxCommandsForMediaItems,
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
postOrRun(new Handler(applicationLooper), () -> holder.setController(controller));
|
||||
return holder;
|
||||
}
|
||||
@ -553,7 +577,8 @@ public class MediaController implements Player {
|
||||
Looper applicationLooper,
|
||||
ConnectionCallback connectionCallback,
|
||||
@Nullable BitmapLoader bitmapLoader,
|
||||
int maxCommandsForMediaItems) {
|
||||
int maxCommandsForMediaItems,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
checkNotNull(context, "context must not be null");
|
||||
checkNotNull(token, "token must not be null");
|
||||
Log.i(
|
||||
@ -576,7 +601,14 @@ public class MediaController implements Player {
|
||||
this.connectionCallback = connectionCallback;
|
||||
this.maxCommandsForMediaItems = maxCommandsForMediaItems;
|
||||
|
||||
impl = createImpl(context, token, connectionHints, applicationLooper, bitmapLoader);
|
||||
impl =
|
||||
createImpl(
|
||||
context,
|
||||
token,
|
||||
connectionHints,
|
||||
applicationLooper,
|
||||
bitmapLoader,
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
impl.connect();
|
||||
}
|
||||
|
||||
@ -587,10 +619,17 @@ public class MediaController implements Player {
|
||||
SessionToken token,
|
||||
Bundle connectionHints,
|
||||
Looper applicationLooper,
|
||||
@Nullable BitmapLoader bitmapLoader) {
|
||||
@Nullable BitmapLoader bitmapLoader,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
if (token.isLegacySession()) {
|
||||
return new MediaControllerImplLegacy(
|
||||
context, this, token, connectionHints, applicationLooper, checkNotNull(bitmapLoader));
|
||||
context,
|
||||
this,
|
||||
token,
|
||||
connectionHints,
|
||||
applicationLooper,
|
||||
checkNotNull(bitmapLoader),
|
||||
platformSessionCallbackAggregationTimeoutMs);
|
||||
} else {
|
||||
return new MediaControllerImplBase(context, this, token, connectionHints, applicationLooper);
|
||||
}
|
||||
|
@ -93,8 +93,6 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
|
||||
private static final String TAG = "MCImplLegacy";
|
||||
|
||||
private static final long AGGREGATES_CALLBACKS_WITHIN_TIMEOUT_MS = 500L;
|
||||
|
||||
/* package */ final Context context;
|
||||
private final MediaController instance;
|
||||
|
||||
@ -104,6 +102,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
private final BitmapLoader bitmapLoader;
|
||||
private final ImmutableList<CommandButton> commandButtonsForMediaItems;
|
||||
private final Bundle connectionHints;
|
||||
private final long platformSessionCallbackAggregationTimeoutMs;
|
||||
|
||||
@Nullable private MediaControllerCompat controllerCompat;
|
||||
@Nullable private MediaBrowserCompat browserCompat;
|
||||
@ -122,7 +121,8 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
SessionToken token,
|
||||
Bundle connectionHints,
|
||||
Looper applicationLooper,
|
||||
BitmapLoader bitmapLoader) {
|
||||
BitmapLoader bitmapLoader,
|
||||
long platformSessionCallbackAggregationTimeoutMs) {
|
||||
// Initialize default values.
|
||||
legacyPlayerInfo = new LegacyPlayerInfo();
|
||||
pendingLegacyPlayerInfo = new LegacyPlayerInfo();
|
||||
@ -140,6 +140,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
this.token = token;
|
||||
this.connectionHints = connectionHints;
|
||||
this.bitmapLoader = bitmapLoader;
|
||||
this.platformSessionCallbackAggregationTimeoutMs = platformSessionCallbackAggregationTimeoutMs;
|
||||
currentPositionMs = C.TIME_UNSET;
|
||||
lastSetPlayWhenReadyCalledTimeMs = C.TIME_UNSET;
|
||||
// Always empty. Only supported for a MediaBrowser connected to a MediaBrowserServiceCompat.
|
||||
@ -1992,7 +1993,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
|
||||
return;
|
||||
}
|
||||
pendingChangesHandler.sendEmptyMessageDelayed(
|
||||
MSG_HANDLE_PENDING_UPDATES, AGGREGATES_CALLBACKS_WITHIN_TIMEOUT_MS);
|
||||
MSG_HANDLE_PENDING_UPDATES, platformSessionCallbackAggregationTimeoutMs);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,6 +31,17 @@ import com.google.common.collect.ImmutableList;
|
||||
/** A notification for media playbacks. */
|
||||
public final class MediaNotification {
|
||||
|
||||
/**
|
||||
* Event key to indicate a media notification was dismissed.
|
||||
*
|
||||
* <p>This event key can be used as an extras key for a boolean extra on a media button pending
|
||||
* intent, and as as custom session command action to inform the media notification controller
|
||||
* that a notification was dismissed.
|
||||
*/
|
||||
@UnstableApi
|
||||
public static final String NOTIFICATION_DISMISSED_EVENT_KEY =
|
||||
"androidx.media3.session.NOTIFICATION_DISMISSED_EVENT_KEY";
|
||||
|
||||
/**
|
||||
* Creates {@linkplain NotificationCompat.Action actions} and {@linkplain PendingIntent pending
|
||||
* intents} for notifications.
|
||||
@ -99,10 +110,20 @@ public final class MediaNotification {
|
||||
* Creates a {@link PendingIntent} for a media action that will be handled by the library.
|
||||
*
|
||||
* @param mediaSession The media session to which the action will be sent.
|
||||
* @param command The intent's command.
|
||||
* @param command The {@link PendingIntent}.
|
||||
*/
|
||||
PendingIntent createMediaActionPendingIntent(
|
||||
MediaSession mediaSession, @Player.Command long command);
|
||||
|
||||
/**
|
||||
* Creates a {@link PendingIntent} triggered when the notification is dismissed.
|
||||
*
|
||||
* @param mediaSession The media session for which the intent is created.
|
||||
* @return The {@link PendingIntent}.
|
||||
*/
|
||||
default PendingIntent createNotificationDismissalIntent(MediaSession mediaSession) {
|
||||
return createMediaActionPendingIntent(mediaSession, Player.COMMAND_STOP);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -17,6 +17,7 @@ package androidx.media3.session;
|
||||
|
||||
import static android.app.Service.STOP_FOREGROUND_DETACH;
|
||||
import static android.app.Service.STOP_FOREGROUND_REMOVE;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static java.util.concurrent.TimeUnit.MILLISECONDS;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
@ -59,14 +60,15 @@ import java.util.concurrent.TimeoutException;
|
||||
private static final int MSG_USER_ENGAGED_TIMEOUT = 1;
|
||||
|
||||
private final MediaSessionService mediaSessionService;
|
||||
private final MediaNotification.Provider mediaNotificationProvider;
|
||||
|
||||
private final MediaNotification.ActionFactory actionFactory;
|
||||
private final NotificationManagerCompat notificationManagerCompat;
|
||||
private final Handler mainHandler;
|
||||
private final Executor mainExecutor;
|
||||
private final Intent startSelfIntent;
|
||||
private final Map<MediaSession, ListenableFuture<MediaController>> controllerMap;
|
||||
private final Map<MediaSession, ControllerInfo> controllerMap;
|
||||
|
||||
private MediaNotification.Provider mediaNotificationProvider;
|
||||
private int totalNotificationCount;
|
||||
@Nullable private MediaNotification mediaNotification;
|
||||
private boolean startedInForeground;
|
||||
@ -104,7 +106,7 @@ import java.util.concurrent.TimeoutException;
|
||||
.setListener(listener)
|
||||
.setApplicationLooper(Looper.getMainLooper())
|
||||
.buildAsync();
|
||||
controllerMap.put(session, controllerFuture);
|
||||
controllerMap.put(session, new ControllerInfo(controllerFuture));
|
||||
controllerFuture.addListener(
|
||||
() -> {
|
||||
try {
|
||||
@ -123,9 +125,9 @@ import java.util.concurrent.TimeoutException;
|
||||
}
|
||||
|
||||
public void removeSession(MediaSession session) {
|
||||
@Nullable ListenableFuture<MediaController> future = controllerMap.remove(session);
|
||||
if (future != null) {
|
||||
MediaController.releaseFuture(future);
|
||||
@Nullable ControllerInfo controllerInfo = controllerMap.remove(session);
|
||||
if (controllerInfo != null) {
|
||||
MediaController.releaseFuture(controllerInfo.controllerFuture);
|
||||
}
|
||||
}
|
||||
|
||||
@ -145,6 +147,15 @@ import java.util.concurrent.TimeoutException;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the media notification provider.
|
||||
*
|
||||
* @param mediaNotificationProvider The {@link MediaNotification.Provider}.
|
||||
*/
|
||||
public void setMediaNotificationProvider(MediaNotification.Provider mediaNotificationProvider) {
|
||||
this.mediaNotificationProvider = mediaNotificationProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the notification.
|
||||
*
|
||||
@ -158,19 +169,8 @@ import java.util.concurrent.TimeoutException;
|
||||
}
|
||||
|
||||
int notificationSequence = ++totalNotificationCount;
|
||||
MediaController mediaNotificationController = null;
|
||||
ListenableFuture<MediaController> controller = controllerMap.get(session);
|
||||
if (controller != null && controller.isDone()) {
|
||||
try {
|
||||
mediaNotificationController = Futures.getDone(controller);
|
||||
} catch (ExecutionException e) {
|
||||
// Ignore.
|
||||
}
|
||||
}
|
||||
ImmutableList<CommandButton> mediaButtonPreferences =
|
||||
mediaNotificationController != null
|
||||
? mediaNotificationController.getMediaButtonPreferences()
|
||||
: ImmutableList.of();
|
||||
checkNotNull(getConnectedControllerForSession(session)).getMediaButtonPreferences();
|
||||
MediaNotification.Provider.Callback callback =
|
||||
notification ->
|
||||
mainExecutor.execute(
|
||||
@ -261,6 +261,13 @@ import java.util.concurrent.TimeoutException;
|
||||
}
|
||||
}
|
||||
|
||||
private void onNotificationDismissed(MediaSession session) {
|
||||
@Nullable ControllerInfo controllerInfo = controllerMap.get(session);
|
||||
if (controllerInfo != null) {
|
||||
controllerInfo.wasNotificationDismissed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// POST_NOTIFICATIONS permission is not required for media session related notifications.
|
||||
// https://developer.android.com/develop/ui/views/notifications/notification-permission#exemptions-media-sessions
|
||||
@SuppressLint("MissingPermission")
|
||||
@ -301,17 +308,25 @@ import java.util.concurrent.TimeoutException;
|
||||
|
||||
private boolean shouldShowNotification(MediaSession session) {
|
||||
MediaController controller = getConnectedControllerForSession(session);
|
||||
return controller != null && !controller.getCurrentTimeline().isEmpty();
|
||||
if (controller == null || controller.getCurrentTimeline().isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
ControllerInfo controllerInfo = checkNotNull(controllerMap.get(session));
|
||||
if (controller.getPlaybackState() != Player.STATE_IDLE) {
|
||||
// Playback restarted, reset previous notification dismissed flag.
|
||||
controllerInfo.wasNotificationDismissed = false;
|
||||
}
|
||||
return !controllerInfo.wasNotificationDismissed;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private MediaController getConnectedControllerForSession(MediaSession session) {
|
||||
ListenableFuture<MediaController> controller = controllerMap.get(session);
|
||||
if (controller == null || !controller.isDone()) {
|
||||
@Nullable ControllerInfo controllerInfo = controllerMap.get(session);
|
||||
if (controllerInfo == null || !controllerInfo.controllerFuture.isDone()) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return Futures.getDone(controller);
|
||||
return Futures.getDone(controllerInfo.controllerFuture);
|
||||
} catch (ExecutionException exception) {
|
||||
// We should never reach this.
|
||||
throw new IllegalStateException(exception);
|
||||
@ -350,8 +365,7 @@ import java.util.concurrent.TimeoutException;
|
||||
}
|
||||
}
|
||||
|
||||
private static final class MediaControllerListener
|
||||
implements MediaController.Listener, Player.Listener {
|
||||
private final class MediaControllerListener implements MediaController.Listener, Player.Listener {
|
||||
private final MediaSessionService mediaSessionService;
|
||||
private final MediaSession session;
|
||||
|
||||
@ -381,6 +395,17 @@ import java.util.concurrent.TimeoutException;
|
||||
session, /* startInForegroundWhenPaused= */ false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListenableFuture<SessionResult> onCustomCommand(
|
||||
MediaController controller, SessionCommand command, Bundle args) {
|
||||
@SessionResult.Code int resultCode = SessionError.ERROR_NOT_SUPPORTED;
|
||||
if (command.customAction.equals(MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY)) {
|
||||
onNotificationDismissed(session);
|
||||
resultCode = SessionResult.RESULT_SUCCESS;
|
||||
}
|
||||
return Futures.immediateFuture(new SessionResult(resultCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(MediaController controller) {
|
||||
if (mediaSessionService.isSessionAdded(session)) {
|
||||
@ -427,6 +452,18 @@ import java.util.concurrent.TimeoutException;
|
||||
startedInForeground = false;
|
||||
}
|
||||
|
||||
private static final class ControllerInfo {
|
||||
|
||||
public final ListenableFuture<MediaController> controllerFuture;
|
||||
|
||||
/** Indicates whether the user actively dismissed the notification. */
|
||||
public boolean wasNotificationDismissed;
|
||||
|
||||
public ControllerInfo(ListenableFuture<MediaController> controllerFuture) {
|
||||
this.controllerFuture = controllerFuture;
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(24)
|
||||
private static class Api24 {
|
||||
|
||||
|
@ -1322,10 +1322,14 @@ import org.checkerframework.checker.initialization.qual.Initialized;
|
||||
return false;
|
||||
}
|
||||
// Send from media notification controller.
|
||||
return applyMediaButtonKeyEvent(keyEvent, doubleTapCompleted);
|
||||
boolean isDismissNotificationEvent =
|
||||
intent.getBooleanExtra(
|
||||
MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY, /* defaultValue= */ false);
|
||||
return applyMediaButtonKeyEvent(keyEvent, doubleTapCompleted, isDismissNotificationEvent);
|
||||
}
|
||||
|
||||
private boolean applyMediaButtonKeyEvent(KeyEvent keyEvent, boolean doubleTapCompleted) {
|
||||
private boolean applyMediaButtonKeyEvent(
|
||||
KeyEvent keyEvent, boolean doubleTapCompleted, boolean isDismissNotificationEvent) {
|
||||
ControllerInfo controllerInfo = checkNotNull(instance.getMediaNotificationControllerInfo());
|
||||
Runnable command;
|
||||
int keyCode = keyEvent.getKeyCode();
|
||||
@ -1375,6 +1379,15 @@ import org.checkerframework.checker.initialization.qual.Initialized;
|
||||
postOrRun(
|
||||
getApplicationHandler(),
|
||||
() -> {
|
||||
if (isDismissNotificationEvent) {
|
||||
ListenableFuture<SessionResult> ignored =
|
||||
sendCustomCommand(
|
||||
controllerInfo,
|
||||
new SessionCommand(
|
||||
MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY,
|
||||
/* extras= */ Bundle.EMPTY),
|
||||
/* args= */ Bundle.EMPTY);
|
||||
}
|
||||
command.run();
|
||||
sessionStub.getConnectedControllersManager().flushCommandQueue(controllerInfo);
|
||||
});
|
||||
@ -1902,7 +1915,10 @@ import org.checkerframework.checker.initialization.qual.Initialized;
|
||||
playPauseTask =
|
||||
() -> {
|
||||
if (isMediaNotificationController(controllerInfo)) {
|
||||
applyMediaButtonKeyEvent(keyEvent, /* doubleTapCompleted= */ false);
|
||||
applyMediaButtonKeyEvent(
|
||||
keyEvent,
|
||||
/* doubleTapCompleted= */ false,
|
||||
/* isDismissNotificationEvent= */ false);
|
||||
} else {
|
||||
sessionLegacyStub.handleMediaPlayPauseOnHandler(
|
||||
checkNotNull(controllerInfo.getRemoteUserInfo()));
|
||||
|
@ -180,9 +180,6 @@ public abstract class MediaSessionService extends Service {
|
||||
@GuardedBy("lock")
|
||||
private @MonotonicNonNull MediaNotificationManager mediaNotificationManager;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private MediaNotification.@MonotonicNonNull Provider mediaNotificationProvider;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private @MonotonicNonNull DefaultActionFactory actionFactory;
|
||||
|
||||
@ -637,8 +634,6 @@ public abstract class MediaSessionService extends Service {
|
||||
/**
|
||||
* Sets the {@link MediaNotification.Provider} to customize notifications.
|
||||
*
|
||||
* <p>This should be called before {@link #onCreate()} returns.
|
||||
*
|
||||
* <p>This method can be called from any thread.
|
||||
*/
|
||||
@UnstableApi
|
||||
@ -646,7 +641,8 @@ public abstract class MediaSessionService extends Service {
|
||||
MediaNotification.Provider mediaNotificationProvider) {
|
||||
checkNotNull(mediaNotificationProvider);
|
||||
synchronized (lock) {
|
||||
this.mediaNotificationProvider = mediaNotificationProvider;
|
||||
getMediaNotificationManager(/* initialMediaNotificationProvider= */ mediaNotificationProvider)
|
||||
.setMediaNotificationProvider(mediaNotificationProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@ -679,16 +675,23 @@ public abstract class MediaSessionService extends Service {
|
||||
}
|
||||
|
||||
private MediaNotificationManager getMediaNotificationManager() {
|
||||
return getMediaNotificationManager(/* initialMediaNotificationProvider= */ null);
|
||||
}
|
||||
|
||||
private MediaNotificationManager getMediaNotificationManager(
|
||||
@Nullable MediaNotification.Provider initialMediaNotificationProvider) {
|
||||
synchronized (lock) {
|
||||
if (mediaNotificationManager == null) {
|
||||
if (mediaNotificationProvider == null) {
|
||||
if (initialMediaNotificationProvider == null) {
|
||||
checkStateNotNull(getBaseContext(), "Accessing service context before onCreate()");
|
||||
mediaNotificationProvider =
|
||||
initialMediaNotificationProvider =
|
||||
new DefaultMediaNotificationProvider.Builder(getApplicationContext()).build();
|
||||
}
|
||||
mediaNotificationManager =
|
||||
new MediaNotificationManager(
|
||||
/* mediaSessionService= */ this, mediaNotificationProvider, getActionFactory());
|
||||
/* mediaSessionService= */ this,
|
||||
initialMediaNotificationProvider,
|
||||
getActionFactory());
|
||||
}
|
||||
return mediaNotificationManager;
|
||||
}
|
||||
|
@ -710,7 +710,9 @@ import java.util.List;
|
||||
if (isCommandAvailable(COMMAND_GET_TIMELINE)) {
|
||||
return getCurrentTimeline();
|
||||
} else if (isCommandAvailable(COMMAND_GET_CURRENT_MEDIA_ITEM)) {
|
||||
return new CurrentMediaItemOnlyTimeline(this);
|
||||
return getCurrentTimeline().isEmpty()
|
||||
? Timeline.EMPTY
|
||||
: new CurrentMediaItemOnlyTimeline(this);
|
||||
}
|
||||
return Timeline.EMPTY;
|
||||
}
|
||||
|
@ -317,7 +317,8 @@ public final class MediaMetadataCompat implements Parcelable {
|
||||
METADATA_KEY_ALBUM_ARTIST,
|
||||
METADATA_KEY_WRITER,
|
||||
METADATA_KEY_AUTHOR,
|
||||
METADATA_KEY_COMPOSER
|
||||
METADATA_KEY_COMPOSER,
|
||||
METADATA_KEY_DISPLAY_SUBTITLE
|
||||
};
|
||||
|
||||
private static final @BitmapKey String[] PREFERRED_BITMAP_ORDER = {
|
||||
|
@ -267,74 +267,35 @@ public final class LegacyConversionsTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void convertToMediaMetadataCompat_displayTitleAndTitleHandledCorrectly() {
|
||||
MediaMetadata mediaMetadataWithTitleOnly =
|
||||
new MediaMetadata.Builder()
|
||||
.setTitle("title")
|
||||
.setSubtitle("subtitle")
|
||||
.setDescription("description")
|
||||
.setArtist("artist")
|
||||
.setAlbumArtist("albumArtist")
|
||||
.build();
|
||||
MediaMetadata mediaMetadataWithDisplayTitleOnly =
|
||||
new MediaMetadata.Builder()
|
||||
.setDisplayTitle("displayTitle")
|
||||
.setSubtitle("subtitle")
|
||||
.setDescription("description")
|
||||
.setArtist("artist")
|
||||
.setAlbumArtist("albumArtist")
|
||||
.build();
|
||||
MediaMetadata mediaMetadataWithDisplayTitleAndTitle =
|
||||
new MediaMetadata.Builder()
|
||||
.setTitle("title")
|
||||
.setDisplayTitle("displayTitle")
|
||||
.setSubtitle("subtitle")
|
||||
.setDescription("description")
|
||||
.setArtist("artist")
|
||||
.setAlbumArtist("albumArtist")
|
||||
.build();
|
||||
public void
|
||||
convertToMediaDescriptionCompat_withoutDisplayTitleWithSubtitle_subtitleUsedAsSubtitle() {
|
||||
MediaMetadata metadata =
|
||||
new MediaMetadata.Builder().setTitle("a_title").setSubtitle("a_subtitle").build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder().setMediaId("testId").setMediaMetadata(metadata).build();
|
||||
|
||||
MediaDescriptionCompat mediaDescriptionCompatFromDisplayTitleAndTitle =
|
||||
LegacyConversions.convertToMediaMetadataCompat(
|
||||
mediaMetadataWithDisplayTitleAndTitle,
|
||||
"mediaId",
|
||||
/* mediaUri= */ null,
|
||||
/* durationMs= */ 10_000L,
|
||||
/* artworkBitmap= */ null)
|
||||
.getDescription();
|
||||
MediaDescriptionCompat mediaDescriptionCompatFromDisplayTitleOnly =
|
||||
LegacyConversions.convertToMediaMetadataCompat(
|
||||
mediaMetadataWithDisplayTitleOnly,
|
||||
"mediaId",
|
||||
/* mediaUri= */ null,
|
||||
/* durationMs= */ 10_000L,
|
||||
/* artworkBitmap= */ null)
|
||||
.getDescription();
|
||||
MediaDescriptionCompat mediaDescriptionCompatFromTitleOnly =
|
||||
LegacyConversions.convertToMediaMetadataCompat(
|
||||
mediaMetadataWithTitleOnly,
|
||||
"mediaId",
|
||||
/* mediaUri= */ null,
|
||||
/* durationMs= */ 10_000L,
|
||||
/* artworkBitmap= */ null)
|
||||
.getDescription();
|
||||
MediaDescriptionCompat descriptionCompat =
|
||||
LegacyConversions.convertToMediaDescriptionCompat(mediaItem, /* artworkBitmap= */ null);
|
||||
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getTitle().toString())
|
||||
.isEqualTo("displayTitle");
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getSubtitle().toString())
|
||||
.isEqualTo("subtitle");
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getDescription().toString())
|
||||
.isEqualTo("description");
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getTitle().toString())
|
||||
.isEqualTo("displayTitle");
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getSubtitle().toString())
|
||||
.isEqualTo("subtitle");
|
||||
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getDescription().toString())
|
||||
.isEqualTo("description");
|
||||
assertThat(mediaDescriptionCompatFromTitleOnly.getTitle().toString()).isEqualTo("title");
|
||||
assertThat(mediaDescriptionCompatFromTitleOnly.getSubtitle().toString()).isEqualTo("artist");
|
||||
assertThat(mediaDescriptionCompatFromTitleOnly.getDescription().toString())
|
||||
.isEqualTo("albumArtist");
|
||||
assertThat(descriptionCompat.getTitle().toString()).isEqualTo("a_title");
|
||||
assertThat(descriptionCompat.getSubtitle().toString()).isEqualTo("a_subtitle");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void convertToMediaDescriptionCompat_withDisplayTitleAndSubtitle_subtitleUsedAsSubtitle() {
|
||||
MediaMetadata metadata =
|
||||
new MediaMetadata.Builder()
|
||||
.setDisplayTitle("a_display_title")
|
||||
.setSubtitle("a_subtitle")
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder().setMediaId("testId").setMediaMetadata(metadata).build();
|
||||
|
||||
MediaDescriptionCompat descriptionCompat =
|
||||
LegacyConversions.convertToMediaDescriptionCompat(mediaItem, /* artworkBitmap= */ null);
|
||||
|
||||
assertThat(descriptionCompat.getTitle().toString()).isEqualTo("a_display_title");
|
||||
assertThat(descriptionCompat.getSubtitle().toString()).isEqualTo("a_subtitle");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -504,6 +504,36 @@ public class MediaSessionServiceTest {
|
||||
serviceController.destroy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setMediaNotificationProvider_afterSetForegroundServiceTimeoutMs_usesCustomProvider()
|
||||
throws TimeoutException {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
ExoPlayer player = new TestExoPlayerBuilder(context).build();
|
||||
MediaSession session = new MediaSession.Builder(context, player).build();
|
||||
ServiceController<TestService> serviceController = Robolectric.buildService(TestService.class);
|
||||
TestService service = serviceController.create().get();
|
||||
|
||||
service.setForegroundServiceTimeoutMs(100);
|
||||
service.setMediaNotificationProvider(
|
||||
new DefaultMediaNotificationProvider(
|
||||
service,
|
||||
/* notificationIdProvider= */ mediaSession -> 2000,
|
||||
DefaultMediaNotificationProvider.DEFAULT_CHANNEL_ID,
|
||||
DefaultMediaNotificationProvider.DEFAULT_CHANNEL_NAME_RESOURCE_ID));
|
||||
service.addSession(session);
|
||||
// Start a player to trigger notification creation.
|
||||
player.setMediaItem(MediaItem.fromUri("asset:///media/mp4/sample.mp4"));
|
||||
player.prepare();
|
||||
player.play();
|
||||
runMainLooperUntil(() -> notificationManager.getActiveNotifications().length == 1);
|
||||
|
||||
assertThat(getStatusBarNotification(/* notificationId= */ 2000)).isNotNull();
|
||||
|
||||
session.release();
|
||||
player.release();
|
||||
serviceController.destroy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void onStartCommand_mediaButtonEvent_pausedByMediaNotificationController()
|
||||
throws InterruptedException {
|
||||
|
@ -22,6 +22,8 @@ import static org.mockito.Mockito.when;
|
||||
import android.os.Bundle;
|
||||
import android.os.Looper;
|
||||
import androidx.media3.common.Player;
|
||||
import androidx.media3.common.Timeline;
|
||||
import androidx.media3.test.utils.FakeTimeline;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.junit.Before;
|
||||
@ -57,6 +59,52 @@ public class PlayerWrapperTest {
|
||||
when(player.getApplicationLooper()).thenReturn(Looper.myLooper());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineAndGetCurrentMediaItem_isEmpty() {
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(false);
|
||||
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
|
||||
|
||||
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
|
||||
|
||||
assertThat(currentTimeline.isEmpty()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineWhenEmpty_isEmpty() {
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(true);
|
||||
when(player.getCurrentTimeline()).thenReturn(Timeline.EMPTY);
|
||||
|
||||
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
|
||||
|
||||
assertThat(currentTimeline.isEmpty()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineWhenMultipleItems_hasSingleItemTimeline() {
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(true);
|
||||
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
|
||||
|
||||
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
|
||||
|
||||
assertThat(currentTimeline.getWindowCount()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCurrentTimelineWithCommandCheck_withCommandGetTimeline_returnOriginalTimeline() {
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(true);
|
||||
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(false);
|
||||
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
|
||||
|
||||
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
|
||||
|
||||
assertThat(currentTimeline.getWindowCount()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createSessionPositionInfoForBundling() {
|
||||
int testAdGroupIndex = 12;
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 58
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 12593505
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -269,6 +270,7 @@ track 1:
|
||||
sample count = 1
|
||||
track duration = 466800
|
||||
format 0:
|
||||
averageBitrate = 2593047
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -299,6 +301,7 @@ track 2:
|
||||
sample count = 58
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 99395
|
||||
id = 3
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/microvideo-meta-stream
|
||||
@ -541,6 +544,7 @@ track 3:
|
||||
sample count = 1
|
||||
track duration = 1133700
|
||||
format 0:
|
||||
averageBitrate = 416
|
||||
id = 4
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/motionphoto-image-meta
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 44
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 12593505
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -213,6 +214,7 @@ track 1:
|
||||
sample count = 1
|
||||
track duration = 466800
|
||||
format 0:
|
||||
averageBitrate = 2593047
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -243,6 +245,7 @@ track 2:
|
||||
sample count = 38
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 99395
|
||||
id = 3
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/microvideo-meta-stream
|
||||
@ -405,6 +408,7 @@ track 3:
|
||||
sample count = 1
|
||||
track duration = 1133700
|
||||
format 0:
|
||||
averageBitrate = 416
|
||||
id = 4
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/motionphoto-image-meta
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 23
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 12593505
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -129,6 +130,7 @@ track 1:
|
||||
sample count = 1
|
||||
track duration = 466800
|
||||
format 0:
|
||||
averageBitrate = 2593047
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -159,6 +161,7 @@ track 2:
|
||||
sample count = 17
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 99395
|
||||
id = 3
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/microvideo-meta-stream
|
||||
@ -237,6 +240,7 @@ track 3:
|
||||
sample count = 1
|
||||
track duration = 1133700
|
||||
format 0:
|
||||
averageBitrate = 416
|
||||
id = 4
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/motionphoto-image-meta
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 2
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 12593505
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -45,6 +46,7 @@ track 1:
|
||||
sample count = 1
|
||||
track duration = 466800
|
||||
format 0:
|
||||
averageBitrate = 2593047
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -75,6 +77,7 @@ track 2:
|
||||
sample count = 1
|
||||
track duration = 2100700
|
||||
format 0:
|
||||
averageBitrate = 99395
|
||||
id = 3
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/microvideo-meta-stream
|
||||
@ -89,6 +92,7 @@ track 3:
|
||||
sample count = 1
|
||||
track duration = 1133700
|
||||
format 0:
|
||||
averageBitrate = 416
|
||||
id = 4
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = application/motionphoto-image-meta
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 867000
|
||||
format 0:
|
||||
averageBitrate = 35692
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -25,6 +26,7 @@ track 0:
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
roleFlags = [main]
|
||||
language = en
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 32, hash 1F3D6E87
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 867000
|
||||
format 0:
|
||||
averageBitrate = 35692
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -25,6 +26,7 @@ track 0:
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
roleFlags = [main]
|
||||
language = en
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 32, hash 1F3D6E87
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 867000
|
||||
format 0:
|
||||
averageBitrate = 35692
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -25,6 +26,7 @@ track 0:
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
roleFlags = [main]
|
||||
language = en
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 32, hash 1F3D6E87
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 867000
|
||||
format 0:
|
||||
averageBitrate = 35692
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -25,6 +26,7 @@ track 0:
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
roleFlags = [main]
|
||||
language = en
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 32, hash 1F3D6E87
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 43
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -200,6 +201,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -215,6 +217,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 31
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -152,6 +153,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -167,6 +169,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 16
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -92,6 +93,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -107,6 +109,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -32,6 +33,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -47,6 +49,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 43
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -200,6 +201,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -215,6 +217,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 31
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -152,6 +153,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -167,6 +169,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 16
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -92,6 +93,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -107,6 +109,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 1
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -32,6 +33,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -47,6 +49,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 43
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -200,6 +201,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -215,6 +217,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -11,6 +11,7 @@ track 0:
|
||||
sample count = 43
|
||||
track duration = 1019300
|
||||
format 0:
|
||||
averageBitrate = 74502
|
||||
peakBitrate = 200000
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
@ -20,7 +21,7 @@ track 0:
|
||||
channelCount = 1
|
||||
sampleRate = 44100
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 2, hash 5F7
|
||||
sample 0:
|
||||
@ -200,6 +201,7 @@ track 1:
|
||||
sample count = 30
|
||||
track duration = 1020100
|
||||
format 0:
|
||||
averageBitrate = 1026770
|
||||
id = 2
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/avc
|
||||
@ -215,6 +217,7 @@ track 1:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
|
||||
initializationData:
|
||||
data = length 19, hash D3863A4C
|
||||
|
@ -20,6 +20,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 27, hash 9F13E633
|
||||
data = length 8, hash 94643657
|
||||
|
@ -20,6 +20,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 27, hash 9F13E633
|
||||
data = length 8, hash 94643657
|
||||
|
@ -20,6 +20,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 27, hash 9F13E633
|
||||
data = length 8, hash 94643657
|
||||
|
@ -20,6 +20,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 27, hash 9F13E633
|
||||
data = length 8, hash 94643657
|
||||
|
@ -21,6 +21,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 2426, hash 25737613
|
||||
sample 0:
|
||||
|
@ -21,6 +21,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 2426, hash 25737613
|
||||
sample 0:
|
||||
|
@ -21,6 +21,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 2426, hash 25737613
|
||||
sample 0:
|
||||
|
@ -21,6 +21,7 @@ track 0:
|
||||
colorTransfer = 3
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
initializationData:
|
||||
data = length 2426, hash 25737613
|
||||
sample 0:
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
@ -11,6 +11,8 @@ track 0:
|
||||
sample count = 30
|
||||
track duration = 1000000
|
||||
format 0:
|
||||
averageBitrate = 155160
|
||||
peakBitrate = 155160
|
||||
id = 1
|
||||
containerMimeType = video/mp4
|
||||
sampleMimeType = video/hevc
|
||||
@ -25,6 +27,7 @@ track 0:
|
||||
colorRange = 2
|
||||
lumaBitdepth = 8
|
||||
chromaBitdepth = 8
|
||||
language = und
|
||||
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
|
||||
initializationData:
|
||||
data = length 84, hash 6FF5034A
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user