Compare commits

..

No commits in common. "839c4a90f2ab36e48be73e1b5e907f3283dce72e" and "51efcad6720d3016d4b17fe8a1e9a11ca89e8b67" have entirely different histories.

639 changed files with 631 additions and 5940 deletions

View File

@ -19,7 +19,6 @@ body:
options:
- Media3 main branch
- Media3 pre-release (alpha, beta or RC not in this list)
- Media3 1.6.1
- Media3 1.6.0
- Media3 1.5.1
- Media3 1.5.0
@ -45,6 +44,9 @@ body:
- ExoPlayer 2.16.0
- ExoPlayer 2.15.1
- ExoPlayer 2.15.0
- ExoPlayer 2.14.2
- ExoPlayer 2.14.1
- ExoPlayer 2.14.0
- ExoPlayer dev-v2 branch
- Older (unsupported)
validations:

View File

@ -2,69 +2,6 @@
## 1.6
### 1.6.1 (2025-04-14)
This release includes the following changes since the
[1.6.0 release](#160-2025-03-26):
* Common Library:
* Add `PlaybackParameters.withPitch(float)` method for easily copying a
`PlaybackParameters` with a new `pitch` value
([#2257](https://github.com/androidx/media/issues/2257)).
* ExoPlayer:
* Fix issue where media item transition fails due to recoverable renderer
error during initialization of the next media item
([#2229](https://github.com/androidx/media/issues/2229)).
* Fix issue where `ProgressiveMediaPeriod` throws an
`IllegalStateException` as `PreloadMediaSource` attempts to call its
`getBufferedDurationUs()` before it is prepared
([#2315](https://github.com/androidx/media/issues/2315)).
* Fix sending `CmcdData` in manifest requests for DASH, HLS, and
SmoothStreaming ([#2253](https://github.com/androidx/media/pull/2253)).
* Ensure `AdPlaybackState.withAdDurationsUs(long[][])` can be used after
ad groups have been removed. The user still needs to pass in an array of
durations for removed ad groups which can be empty or null
([#2267](https://github.com/androidx/media/issues/2267)).
* Extractors:
* MP4: Parse `alternate_group` from the `tkhd` box and expose it as an
`Mp4AlternateGroupData` entry in each track's `Format.metadata`
([#2242](https://github.com/androidx/media/issues/2242)).
* Audio:
* Fix offload issue where the position might get stuck when playing a
playlist of short content
([#1920](https://github.com/androidx/media/issues/1920)).
* Session:
* Lower aggregation timeout for platform `MediaSession` callbacks from 500
to 100 milliseconds and add an experimental setter to allow apps to
configure this value.
* Fix issue where notifications reappear after they have been dismissed by
the user ([#2302](https://github.com/androidx/media/issues/2302)).
* Fix a bug where the session returned a single-item timeline when the
wrapped player is actually empty. This happened when the wrapped player
doesn't have `COMMAND_GET_TIMELINE` available while
`COMMAND_GET_CURRENT_MEDIA_ITEM` is available and the wrapped player is
empty ([#2320](https://github.com/androidx/media/issues/2320)).
* Fix a bug where calling
`MediaSessionService.setMediaNotificationProvider` is silently ignored
after other interactions with the service like
`setForegroundServiceTimeoutMs`
([#2305](https://github.com/androidx/media/issues/2305)).
* UI:
* Enable `PlayerSurface` to work with `ExoPlayer.setVideoEffects` and
`CompositionPlayer`.
* Fix bug where `PlayerSurface` can't be recomposed with a new `Player`.
* HLS extension:
* Fix issue where chunk duration wasn't set in `CmcdData` for HLS media,
causing an assertion failure when processing encrypted media segments
([#2312](https://github.com/androidx/media/issues/2312)).
* RTSP extension:
* Add support for URI with RTSPT scheme as a way to configure the RTSP
session to use TCP
([#1484](https://github.com/androidx/media/issues/1484)).
* Cast extension:
* Add support for playlist metadata
([#2235](https://github.com/androidx/media/pull/2235)).
### 1.6.0 (2025-03-26)
This release includes the following changes since the

View File

@ -12,8 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
project.ext {
releaseVersion = '1.6.1'
releaseVersionCode = 1_006_001_3_00
releaseVersion = '1.6.0'
releaseVersionCode = 1_006_000_3_00
minSdkVersion = 21
// See https://developer.android.com/training/cars/media/automotive-os#automotive-module
automotiveMinSdkVersion = 28

View File

@ -16,7 +16,6 @@
package androidx.media3.cast;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.common.util.Util.castNonNull;
import static java.lang.Math.min;
@ -167,7 +166,6 @@ public final class CastPlayer extends BasePlayer {
private long pendingSeekPositionMs;
@Nullable private PositionInfo pendingMediaItemRemovalPosition;
private MediaMetadata mediaMetadata;
private MediaMetadata playlistMetadata;
private DeviceInfo deviceInfo;
/**
@ -270,7 +268,6 @@ public final class CastPlayer extends BasePlayer {
playbackState = STATE_IDLE;
currentTimeline = CastTimeline.EMPTY_CAST_TIMELINE;
mediaMetadata = MediaMetadata.EMPTY;
playlistMetadata = MediaMetadata.EMPTY;
currentTracks = Tracks.EMPTY;
availableCommands = new Commands.Builder().addAll(PERMANENT_AVAILABLE_COMMANDS).build();
pendingSeekWindowIndex = C.INDEX_UNSET;
@ -659,19 +656,14 @@ public final class CastPlayer extends BasePlayer {
@Override
public MediaMetadata getPlaylistMetadata() {
return playlistMetadata;
// CastPlayer does not currently support metadata.
return MediaMetadata.EMPTY;
}
/** This method is not supported and does nothing. */
@Override
public void setPlaylistMetadata(MediaMetadata playlistMetadata) {
checkNotNull(playlistMetadata);
if (playlistMetadata.equals(this.playlistMetadata)) {
return;
}
this.playlistMetadata = playlistMetadata;
listeners.sendEvent(
EVENT_PLAYLIST_METADATA_CHANGED,
listener -> listener.onPlaylistMetadataChanged(this.playlistMetadata));
public void setPlaylistMetadata(MediaMetadata mediaMetadata) {
// CastPlayer does not currently support metadata.
}
@Override

View File

@ -1800,7 +1800,7 @@ public class CastPlayerTest {
}
@Test
public void setMediaItems_doesNotifyOnMediaMetadataChanged() {
public void setMediaItems_doesNotifyOnMetadataChanged() {
when(mockRemoteMediaClient.queueJumpToItem(anyInt(), anyLong(), eq(null)))
.thenReturn(mockPendingResult);
ArgumentCaptor<MediaMetadata> metadataCaptor = ArgumentCaptor.forClass(MediaMetadata.class);
@ -1827,7 +1827,7 @@ public class CastPlayerTest {
.build());
castPlayer.addListener(mockListener);
MediaMetadata initialMetadata = castPlayer.getMediaMetadata();
MediaMetadata intitalMetadata = castPlayer.getMediaMetadata();
castPlayer.setMediaItems(firstPlaylist, /* startIndex= */ 0, /* startPositionMs= */ 2000L);
updateTimeLine(firstPlaylist, /* mediaQueueItemIds= */ new int[] {1}, /* currentItemId= */ 1);
MediaMetadata firstMetadata = castPlayer.getMediaMetadata();
@ -1850,7 +1850,7 @@ public class CastPlayerTest {
secondPlaylist.get(1).mediaMetadata,
secondPlaylist.get(0).mediaMetadata)
.inOrder();
assertThat(initialMetadata).isEqualTo(MediaMetadata.EMPTY);
assertThat(intitalMetadata).isEqualTo(MediaMetadata.EMPTY);
assertThat(ImmutableList.of(firstMetadata, secondMetadata, thirdMetadata))
.containsExactly(
firstPlaylist.get(0).mediaMetadata,
@ -1898,35 +1898,6 @@ public class CastPlayerTest {
verify(mockListener, never()).onMediaMetadataChanged(any());
}
@Test
public void setPlaylistMetadata_doesNotifyOnPlaylistMetadataChanged() {
castPlayer.addListener(mockListener);
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(MediaMetadata.EMPTY);
castPlayer.setPlaylistMetadata(metadata);
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
verify(mockListener).onPlaylistMetadataChanged(metadata);
}
@Test
public void setPlaylistMetadata_equalMetadata_doesNotNotifyOnPlaylistMetadataChanged() {
castPlayer.addListener(mockListener);
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
castPlayer.setPlaylistMetadata(metadata);
castPlayer.setPlaylistMetadata(metadata);
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
verify(mockListener, times(1)).onPlaylistMetadataChanged(metadata);
}
@Test
public void getDeviceInfo_returnsCorrectDeviceInfoWithPlaybackTypeRemote() {
DeviceInfo deviceInfo = castPlayer.getDeviceInfo();

View File

@ -1072,23 +1072,14 @@ public final class AdPlaybackState {
/**
* Returns an instance with the specified ad durations, in microseconds.
*
* <p>The number of arrays of durations ({@code adDurations.length}) must always be equal to
* {@link #adGroupCount}. This is required even on an instance created with {@link
* #withRemovedAdGroupCount(int)}. The array of durations at the index of a removed ad group can
* be null or empty.
*
* @throws IllegalArgumentException if {@code adDurations.length != adGroupCount}.
* <p>Must only be used if {@link #removedAdGroupCount} is 0.
*/
@CheckResult
public AdPlaybackState withAdDurationsUs(long[][] adDurationUs) {
checkArgument(adDurationUs.length == adGroupCount);
checkState(removedAdGroupCount == 0);
AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length);
for (int correctedAdGroupIndex = 0;
correctedAdGroupIndex < adGroupCount - removedAdGroupCount;
correctedAdGroupIndex++) {
adGroups[correctedAdGroupIndex] =
adGroups[correctedAdGroupIndex].withAdDurationsUs(
adDurationUs[removedAdGroupCount + correctedAdGroupIndex]);
for (int adGroupIndex = 0; adGroupIndex < adGroupCount; adGroupIndex++) {
adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdDurationsUs(adDurationUs[adGroupIndex]);
}
return new AdPlaybackState(
adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount);

View File

@ -1039,10 +1039,7 @@ public final class Format {
/** The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. */
public final int sampleRate;
/**
* The {@link C.PcmEncoding} for PCM or losslessly compressed audio. Set to {@link #NO_VALUE} for
* other media types.
*/
/** The {@link C.PcmEncoding} for PCM audio. Set to {@link #NO_VALUE} for other media types. */
@UnstableApi public final @C.PcmEncoding int pcmEncoding;
/**

View File

@ -29,11 +29,11 @@ public final class MediaLibraryInfo {
/** The version of the library expressed as a string, for example "1.2.3" or "1.2.0-beta01". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
public static final String VERSION = "1.6.1";
public static final String VERSION = "1.6.0";
/** The version of the library expressed as {@code TAG + "/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.1";
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.0";
/**
* The version of the library expressed as an integer, for example 1002003300.
@ -47,7 +47,7 @@ public final class MediaLibraryInfo {
* (123-045-006-3-00).
*/
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final int VERSION_INT = 1_006_001_3_00;
public static final int VERSION_INT = 1_006_000_3_00;
/** Whether the library was compiled with {@link Assertions} checks enabled. */
public static final boolean ASSERTIONS_ENABLED = true;

View File

@ -88,18 +88,6 @@ public final class PlaybackParameters {
return new PlaybackParameters(speed, pitch);
}
/**
* Returns a copy with the given pitch.
*
* @param pitch The new pitch. Must be greater than zero.
* @return The copied playback parameters.
*/
@UnstableApi
@CheckResult
public PlaybackParameters withPitch(@FloatRange(from = 0, fromInclusive = false) float pitch) {
return new PlaybackParameters(speed, pitch);
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {

View File

@ -1613,7 +1613,7 @@ public interface Player {
/** {@link #getDeviceInfo()} changed. */
int EVENT_DEVICE_INFO_CHANGED = 29;
/** {@link #getDeviceVolume()} or {@link #isDeviceMuted()} changed. */
/** {@link #getDeviceVolume()} changed. */
int EVENT_DEVICE_VOLUME_CHANGED = 30;
/**

View File

@ -220,7 +220,10 @@ public final class AudioManagerCompat {
try {
return audioManager.getStreamVolume(streamType);
} catch (RuntimeException e) {
Log.w(TAG, "Could not retrieve stream volume for stream type " + streamType, e);
Log.w(
"AudioManagerCompat",
"Could not retrieve stream volume for stream type " + streamType,
e);
return audioManager.getStreamMaxVolume(streamType);
}
}

View File

@ -128,10 +128,6 @@ public final class MediaFormatUtil {
formatBuilder.setInitializationData(csdBuffers.build());
if (mediaFormat.containsKey(MediaFormat.KEY_TRACK_ID)) {
formatBuilder.setId(mediaFormat.getInteger(MediaFormat.KEY_TRACK_ID));
}
return formatBuilder.build();
}
@ -179,10 +175,6 @@ public final class MediaFormatUtil {
result.setInteger(MediaFormat.KEY_ENCODER_PADDING, format.encoderPadding);
maybeSetPixelAspectRatio(result, format.pixelWidthHeightRatio);
if (format.id != null) {
result.setInteger(MediaFormat.KEY_TRACK_ID, Integer.parseInt(format.id));
}
return result;
}

View File

@ -2570,8 +2570,7 @@ public final class Util {
*/
public static @ContentType int inferContentType(Uri uri) {
@Nullable String scheme = uri.getScheme();
if (scheme != null
&& (Ascii.equalsIgnoreCase("rtsp", scheme) || Ascii.equalsIgnoreCase("rtspt", scheme))) {
if (scheme != null && Ascii.equalsIgnoreCase("rtsp", scheme)) {
return C.CONTENT_TYPE_RTSP;
}

View File

@ -1036,110 +1036,4 @@ public class AdPlaybackStateTest {
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle()).ids[1]).isNull();
assertThat(AdPlaybackState.AdGroup.fromBundle(adGroup.toBundle())).isEqualTo(adGroup);
}
@Test
public void setDurationsUs_withRemovedAdGroups_updatedCorrectlyAndSafely() {
AdPlaybackState adPlaybackState =
new AdPlaybackState("adsId")
.withLivePostrollPlaceholderAppended(false)
.withNewAdGroup(/* adGroupIndex= */ 0, 10_000)
.withAdCount(/* adGroupIndex= */ 0, 1)
.withAvailableAdMediaItem(
/* adGroupIndex= */ 0,
/* adIndexInAdGroup= */ 0,
MediaItem.fromUri("http://example.com/0-0"))
.withNewAdGroup(/* adGroupIndex= */ 1, 11_000)
.withAdCount(/* adGroupIndex= */ 1, 2)
.withAvailableAdMediaItem(
/* adGroupIndex= */ 1,
/* adIndexInAdGroup= */ 0,
MediaItem.fromUri("http://example.com/1-0"))
.withAvailableAdMediaItem(
/* adGroupIndex= */ 1,
/* adIndexInAdGroup= */ 1,
MediaItem.fromUri("http://example.com/1-1"))
.withNewAdGroup(/* adGroupIndex= */ 2, 12_000)
.withAdCount(/* adGroupIndex= */ 2, 1)
.withAvailableAdMediaItem(
/* adGroupIndex= */ 2,
/* adIndexInAdGroup= */ 0,
MediaItem.fromUri("http://example.com/2-0"));
long[][] adDurationsUs = {
new long[] {10L}, new long[] {20L, 21L}, new long[] {30L}, new long[] {C.TIME_END_OF_SOURCE}
};
adPlaybackState =
adPlaybackState
.withAdDurationsUs(adDurationsUs)
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1);
assertThat(adPlaybackState.adGroupCount).isEqualTo(4);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).durationsUs).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).count).isEqualTo(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).states).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).isPlaceholder).isFalse();
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).mediaItems).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).ids).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
.asList()
.containsExactly(20L, 21L)
.inOrder();
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
.asList()
.containsExactly(30L);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
.asList()
.containsExactly(C.TIME_END_OF_SOURCE);
adDurationsUs[1][0] = 120L;
adDurationsUs[1][1] = 121L;
adPlaybackState = adPlaybackState.withAdDurationsUs(adDurationsUs);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
.asList()
.containsExactly(120L, 121L)
.inOrder();
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
.asList()
.containsExactly(30L);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
.asList()
.containsExactly(C.TIME_END_OF_SOURCE);
adDurationsUs[0] = null;
adDurationsUs[1] = null;
adDurationsUs[2][0] = C.TIME_UNSET;
adPlaybackState =
adPlaybackState
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 2)
.withAdDurationsUs(adDurationsUs);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 1).durationsUs).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs)
.asList()
.containsExactly(C.TIME_UNSET);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
.asList()
.containsExactly(C.TIME_END_OF_SOURCE);
adDurationsUs[2] = null;
adDurationsUs[3][0] = 0L;
adPlaybackState =
adPlaybackState
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 3)
.withAdDurationsUs(adDurationsUs);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).durationsUs).hasLength(0);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs)
.asList()
.containsExactly(0L);
adDurationsUs[3] = null;
adPlaybackState =
adPlaybackState
.withRemovedAdGroupCount(/* removedAdGroupCount= */ 4)
.withAdDurationsUs(adDurationsUs);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 3).durationsUs).hasLength(0);
}
}

View File

@ -1,54 +0,0 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.container;
import androidx.annotation.Nullable;
import androidx.media3.common.Metadata;
import androidx.media3.common.util.UnstableApi;
/** Stores MP4 {@code alternate_group} info parsed from a {@code tkhd} box. */
@UnstableApi
public final class Mp4AlternateGroupData implements Metadata.Entry {
public final int alternateGroup;
public Mp4AlternateGroupData(int alternateGroup) {
this.alternateGroup = alternateGroup;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Mp4AlternateGroupData)) {
return false;
}
Mp4AlternateGroupData other = (Mp4AlternateGroupData) obj;
return alternateGroup == other.alternateGroup;
}
@Override
public int hashCode() {
return alternateGroup;
}
@Override
public String toString() {
return "Mp4AlternateGroup: " + alternateGroup;
}
}

View File

@ -245,9 +245,6 @@ public abstract class Mp4Box {
@SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_esds = 0x65736473;
@SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_btrt = 0x62747274;
@SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_moof = 0x6d6f6f66;

View File

@ -701,13 +701,11 @@ public class MediaExtractorCompatTest {
public void getTrackFormat_withMultipleTracks_returnsCorrectTrackId() throws IOException {
fakeExtractor.addReadAction(
(input, seekPosition) -> {
TrackOutput output1 = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_VIDEO);
TrackOutput output2 = extractorOutput.track(/* id= */ 1, C.TRACK_TYPE_AUDIO);
TrackOutput output1 = extractorOutput.track(/* id= */ 1, C.TRACK_TYPE_VIDEO);
TrackOutput output2 = extractorOutput.track(/* id= */ 2, C.TRACK_TYPE_AUDIO);
extractorOutput.endTracks();
output1.format(
new Format.Builder().setId(1).setSampleMimeType(MimeTypes.VIDEO_H264).build());
output2.format(
new Format.Builder().setId(2).setSampleMimeType(MimeTypes.AUDIO_AAC).build());
output1.format(PLACEHOLDER_FORMAT_VIDEO);
output2.format(PLACEHOLDER_FORMAT_AUDIO);
return Extractor.RESULT_CONTINUE;
});

View File

@ -485,9 +485,6 @@ public interface ExoPlayer extends Player {
* <p>If enabled, ExoPlayer's playback loop will run as rarely as possible by scheduling work
* for when {@link Renderer} progress can be made.
*
* <p>If a custom {@link AudioSink} is used then it must correctly implement {@link
* AudioSink#getAudioTrackBufferSizeUs()} to enable dynamic scheduling for audio playback.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param dynamicSchedulingEnabled Whether to enable dynamic scheduling.

View File

@ -750,7 +750,25 @@ import java.util.concurrent.atomic.AtomicBoolean;
: readingPeriod.info.id);
}
}
if (e.type == ExoPlaybackException.TYPE_RENDERER
if (e.isRecoverable
&& (pendingRecoverableRendererError == null
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
// If pendingRecoverableRendererError != null and error was
// ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED then upon retry, renderer will attempt with
// offload disabled.
Log.w(TAG, "Recoverable renderer error", e);
if (pendingRecoverableRendererError != null) {
pendingRecoverableRendererError.addSuppressed(e);
e = pendingRecoverableRendererError;
} else {
pendingRecoverableRendererError = e;
}
// Given that the player is now in an unhandled exception state, the error needs to be
// recovered or the player stopped before any other message is handled.
handler.sendMessageAtFrontOfQueue(
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
} else if (e.type == ExoPlaybackException.TYPE_RENDERER
&& renderers[e.rendererIndex % renderers.length].isRendererPrewarming(
/* id= */ e.rendererIndex)) {
// TODO(b/380273486): Investigate recovery for pre-warming renderer errors
@ -774,12 +792,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
pendingRecoverableRendererError.addSuppressed(e);
e = pendingRecoverableRendererError;
}
Log.e(TAG, "Playback error", e);
if (e.type == ExoPlaybackException.TYPE_RENDERER
&& queue.getPlayingPeriod() != queue.getReadingPeriod()) {
// We encountered a renderer error while reading ahead. Force-update the playback position
// to the failing item to ensure correct retry or that the user-visible error is reported
// after the transition.
// to the failing item to ensure the user-visible error is reported after the transition.
while (queue.getPlayingPeriod() != queue.getReadingPeriod()) {
queue.advancePlayingPeriod();
}
@ -795,24 +812,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
/* reportDiscontinuity= */ true,
Player.DISCONTINUITY_REASON_AUTO_TRANSITION);
}
if (e.isRecoverable
&& (pendingRecoverableRendererError == null
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
// Given that the player is now in an unhandled exception state, the error needs to be
// recovered or the player stopped before any other message is handled.
Log.w(TAG, "Recoverable renderer error", e);
if (pendingRecoverableRendererError == null) {
pendingRecoverableRendererError = e;
}
handler.sendMessageAtFrontOfQueue(
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
} else {
Log.e(TAG, "Playback error", e);
stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false);
playbackInfo = playbackInfo.copyWithPlaybackError(e);
}
stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false);
playbackInfo = playbackInfo.copyWithPlaybackError(e);
}
} catch (DrmSession.DrmSessionException e) {
handleIoException(e, e.errorCode);
@ -2782,10 +2783,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
private void maybeUpdateOffloadScheduling() {
// If playing period is audio-only with offload mode preference to enable, then offload
// scheduling should be enabled.
if (queue.getPlayingPeriod() != queue.getReadingPeriod()) {
// Do not enable offload scheduling when starting to process the next media item.
return;
}
@Nullable MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
if (playingPeriodHolder != null) {
TrackSelectorResult trackSelectorResult = playingPeriodHolder.getTrackSelectorResult();

View File

@ -1002,6 +1002,7 @@ public final class MediaExtractorCompat {
FormatHolder scratchFormatHolder, DecoderInputBuffer scratchNoDataDecoderInputBuffer) {
Format format = getFormat(scratchFormatHolder, scratchNoDataDecoderInputBuffer);
MediaFormat mediaFormatResult = MediaFormatUtil.createMediaFormatFromFormat(format);
mediaFormatResult.setInteger(MediaFormat.KEY_TRACK_ID, getIdOfBackingTrack());
if (compatibilityTrackMimeType != null) {
if (Util.SDK_INT >= 29) {
mediaFormatResult.removeKey(MediaFormat.KEY_CODECS_STRING);

View File

@ -591,15 +591,6 @@ public interface AudioSink {
*/
default void setOutputStreamOffsetUs(long outputStreamOffsetUs) {}
/**
* Returns the size of the underlying {@link AudioTrack} buffer in microseconds. If unsupported or
* the {@link AudioTrack} is not initialized then return {@link C#TIME_UNSET}.
*
* <p>If the {@link AudioTrack} is configured with a compressed encoding, then the returned
* duration is an estimated minimum based on the encoding's maximum encoded byte rate.
*/
long getAudioTrackBufferSizeUs();
/**
* Enables tunneling, if possible. The sink is reset if tunneling was previously disabled.
* Enabling tunneling is only possible if the sink is based on a platform {@link AudioTrack}, and

View File

@ -22,7 +22,6 @@ import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.media.AudioDeviceInfo;
@ -170,7 +169,6 @@ public abstract class DecoderAudioRenderer<
private long largestQueuedPresentationTimeUs;
private long lastBufferInStreamPresentationTimeUs;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
public DecoderAudioRenderer() {
this(/* eventHandler= */ null, /* eventListener= */ null);
@ -248,28 +246,16 @@ public abstract class DecoderAudioRenderer<
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
bufferedDurationUs =
long durationUs =
(long)
(bufferedDurationUs
((nextBufferToWritePresentationTimeUs - positionUs)
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
/ 2);
if (isStarted) {
// Account for the elapsed time since the start of this iteration of the rendering loop.
bufferedDurationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
durationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
}
return max(DEFAULT_DURATION_TO_PROGRESS_US, bufferedDurationUs);
return max(DEFAULT_DURATION_TO_PROGRESS_US, durationUs);
}
@Override
@ -318,7 +304,6 @@ public abstract class DecoderAudioRenderer<
try {
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
@ -600,7 +585,6 @@ public abstract class DecoderAudioRenderer<
outputStreamEnded = true;
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
}
private void flushDecoder() throws ExoPlaybackException {
@ -676,7 +660,6 @@ public abstract class DecoderAudioRenderer<
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
@ -706,7 +689,6 @@ public abstract class DecoderAudioRenderer<
setOutputStreamOffsetUs(C.TIME_UNSET);
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
setSourceDrmSession(null);
releaseDecoder();

View File

@ -71,7 +71,6 @@ import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayDeque;
@ -1455,23 +1454,6 @@ public final class DefaultAudioSink implements AudioSink {
}
}
@Override
public long getAudioTrackBufferSizeUs() {
if (!isAudioTrackInitialized()) {
return C.TIME_UNSET;
}
if (Util.SDK_INT >= 23) {
return Api23.getAudioTrackBufferSizeUs(audioTrack, configuration);
}
long byteRate =
configuration.outputMode == OUTPUT_MODE_PCM
? (long) configuration.outputSampleRate * configuration.outputPcmFrameSize
: DefaultAudioTrackBufferSizeProvider.getMaximumEncodedRateBytesPerSecond(
configuration.outputEncoding);
return Util.scaleLargeValue(
configuration.bufferSize, C.MICROS_PER_SECOND, byteRate, RoundingMode.DOWN);
}
@Override
public void enableTunnelingV21() {
Assertions.checkState(externalAudioSessionIdProvided);
@ -2383,18 +2365,6 @@ public final class DefaultAudioSink implements AudioSink {
audioTrack.setPreferredDevice(
audioDeviceInfo == null ? null : audioDeviceInfo.audioDeviceInfo);
}
public static long getAudioTrackBufferSizeUs(
AudioTrack audioTrack, Configuration configuration) {
return configuration.outputMode == OUTPUT_MODE_PCM
? configuration.framesToDurationUs(audioTrack.getBufferSizeInFrames())
: Util.scaleLargeValue(
audioTrack.getBufferSizeInFrames(),
C.MICROS_PER_SECOND,
DefaultAudioTrackBufferSizeProvider.getMaximumEncodedRateBytesPerSecond(
configuration.outputEncoding),
RoundingMode.DOWN);
}
}
@RequiresApi(31)

View File

@ -162,11 +162,6 @@ public class ForwardingAudioSink implements AudioSink {
sink.setOutputStreamOffsetUs(outputStreamOffsetUs);
}
@Override
public long getAudioTrackBufferSizeUs() {
return sink.getAudioTrackBufferSizeUs();
}
@Override
public void enableTunnelingV21() {
sink.enableTunnelingV21();

View File

@ -20,7 +20,6 @@ import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_MA
import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO;
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.lang.Math.max;
import static java.lang.Math.min;
import android.annotation.SuppressLint;
import android.content.Context;
@ -126,7 +125,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private int rendererPriority;
private boolean isStarted;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
/**
* @param context A context.
@ -520,33 +518,20 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
protected long getDurationToProgressUs(
long positionUs, long elapsedRealtimeUs, boolean isOnBufferAvailableListenerRegistered) {
if (nextBufferToWritePresentationTimeUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
if (nextBufferToWritePresentationTimeUs != C.TIME_UNSET) {
long durationUs =
(long)
((nextBufferToWritePresentationTimeUs - positionUs)
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
/ 2);
if (isStarted) {
// Account for the elapsed time since the start of this iteration of the rendering loop.
durationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
}
return max(DEFAULT_DURATION_TO_PROGRESS_US, durationUs);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
bufferedDurationUs =
(long)
(bufferedDurationUs
/ (getPlaybackParameters() != null ? getPlaybackParameters().speed : 1.0f)
/ 2);
if (isStarted) {
// Account for the elapsed time since the start of this iteration of the rendering loop.
bufferedDurationUs -= Util.msToUs(getClock().elapsedRealtime()) - elapsedRealtimeUs;
}
return max(DEFAULT_DURATION_TO_PROGRESS_US, bufferedDurationUs);
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
@Override
@ -694,7 +679,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
}
@ -719,7 +703,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSinkNeedsReset = true;
inputFormat = null;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
audioSink.flush();
} finally {
@ -735,7 +718,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onReset() {
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
super.onReset();
} finally {
@ -875,7 +857,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
}
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e,

View File

@ -41,7 +41,6 @@ import java.util.List;
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
private final MediaPeriod[] periods;
private final boolean[] periodsWithTimeOffsets;
private final IdentityHashMap<SampleStream, Integer> streamPeriodIndices;
private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory;
private final ArrayList<MediaPeriod> childrenPendingPreparation;
@ -63,10 +62,8 @@ import java.util.List;
compositeSequenceableLoader = compositeSequenceableLoaderFactory.empty();
streamPeriodIndices = new IdentityHashMap<>();
enabledPeriods = new MediaPeriod[0];
periodsWithTimeOffsets = new boolean[periods.length];
for (int i = 0; i < periods.length; i++) {
if (periodTimeOffsetsUs[i] != 0) {
periodsWithTimeOffsets[i] = true;
this.periods[i] = new TimeOffsetMediaPeriod(periods[i], periodTimeOffsetsUs[i]);
}
}
@ -78,7 +75,7 @@ import java.util.List;
* specified index.
*/
public MediaPeriod getChildPeriod(int index) {
return periodsWithTimeOffsets[index]
return periods[index] instanceof TimeOffsetMediaPeriod
? ((TimeOffsetMediaPeriod) periods[index]).getWrappedMediaPeriod()
: periods[index];
}

View File

@ -505,18 +505,17 @@ public final class PreloadMediaSource extends WrappingMediaSource {
return;
}
PreloadMediaPeriod preloadMediaPeriod = (PreloadMediaPeriod) mediaPeriod;
if (prepared) {
long bufferedPositionUs = mediaPeriod.getBufferedPositionUs();
if (bufferedPositionUs == C.TIME_END_OF_SOURCE) {
preloadControl.onLoadedToTheEndOfSource(PreloadMediaSource.this);
stopPreloading();
return;
}
if (!preloadControl.onContinueLoadingRequested(
PreloadMediaSource.this, bufferedPositionUs - periodStartPositionUs)) {
stopPreloading();
return;
}
long bufferedPositionUs = mediaPeriod.getBufferedPositionUs();
if (prepared && bufferedPositionUs == C.TIME_END_OF_SOURCE) {
preloadControl.onLoadedToTheEndOfSource(PreloadMediaSource.this);
stopPreloading();
return;
}
if (prepared
&& !preloadControl.onContinueLoadingRequested(
PreloadMediaSource.this, bufferedPositionUs - periodStartPositionUs)) {
stopPreloading();
return;
}
preloadMediaPeriod.continueLoading(
new LoadingInfo.Builder().setPlaybackPositionUs(periodStartPositionUs).build());

View File

@ -739,8 +739,6 @@ public class EventLogger implements AnalyticsListener {
return "NONE";
case Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS:
return "TRANSIENT_AUDIO_FOCUS_LOSS";
case Player.PLAYBACK_SUPPRESSION_REASON_UNSUITABLE_AUDIO_OUTPUT:
return "UNSUITABLE_AUDIO_OUTPUT";
default:
return "?";
}

View File

@ -149,17 +149,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
*/
private static final long OFFSET_FROM_PERIOD_END_TO_TREAT_AS_LAST_US = 100_000L;
/**
* The offset from {@link #getLastResetPositionUs()} in microseconds, before which input buffers
* are not allowed to be dropped.
*
* <p>This value must be greater than the pre-roll distance used by common audio codecs, such as
* 80ms used by Opus <a
* href="https://opus-codec.org/docs/opus_in_isobmff.html#4.3.6.2">Encapsulation of Opus in ISO
* Base Media File Format</a>
*/
private static final long OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US = 200_000L;
/**
* The maximum number of consecutive dropped input buffers that allow discarding frame headers.
*
@ -627,16 +616,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
boolean treatDroppedBuffersAsSkipped)
throws ExoPlaybackException {
if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) {
// TODO: b/161996553 - Remove the isAwayFromLastResetPosition check when audio pre-rolling
// is implemented correctly. Audio codecs such as Opus require pre-roll samples to be decoded
// and discarded on a seek. Depending on the audio decoder, the positionUs may jump forward
// by the pre-roll duration. Do not drop more frames than necessary when this happens.
boolean isAwayFromLastResetPosition =
positionUs
> getLastResetPositionUs()
+ OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US;
shouldDropDecoderInputBuffers =
isAwayFromLastResetPosition && earlyUs < minEarlyUsToDropDecoderInput;
shouldDropDecoderInputBuffers = earlyUs < minEarlyUsToDropDecoderInput;
}
return shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastFrame)
&& maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped);

View File

@ -147,7 +147,6 @@ import androidx.media3.exoplayer.analytics.PlayerId;
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
import androidx.media3.exoplayer.drm.DrmSessionEventListener;
import androidx.media3.exoplayer.drm.DrmSessionManager;
import androidx.media3.exoplayer.mediacodec.MediaCodecRenderer;
import androidx.media3.exoplayer.metadata.MetadataOutput;
import androidx.media3.exoplayer.source.ClippingMediaSource;
import androidx.media3.exoplayer.source.ConcatenatingMediaSource;
@ -11787,54 +11786,6 @@ public class ExoPlayerTest {
player.release();
}
@Test
public void enablingOffload_withFastReadingPeriodAdvancement_playerDoesNotSleep()
throws Exception {
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO);
AtomicInteger sleepingForOffloadCounter = new AtomicInteger();
ExoPlayer player =
parameterizeTestExoPlayerBuilder(
new TestExoPlayerBuilder(context).setRenderers(sleepRenderer))
.build();
ExoPlayer.AudioOffloadListener listener =
new ExoPlayer.AudioOffloadListener() {
@Override
public void onSleepingForOffloadChanged(boolean sleepingForOffload) {
if (sleepingForOffload) {
sleepingForOffloadCounter.getAndIncrement();
}
}
};
player.addAudioOffloadListener(listener);
// Set a playlist of multiple, short audio-only items such that the reading period quickly
// advances past the playing period.
Timeline timeline = new FakeTimeline();
player.setMediaSources(
ImmutableList.of(
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT)));
player.setTrackSelectionParameters(
player
.getTrackSelectionParameters()
.buildUpon()
.setAudioOffloadPreferences(
new AudioOffloadPreferences.Builder()
.setAudioOffloadMode(AudioOffloadPreferences.AUDIO_OFFLOAD_MODE_REQUIRED)
.build())
.build());
player.prepare();
player.play();
advance(player).untilStartOfMediaItem(/* mediaItemIndex= */ 1);
sleepRenderer.sleepOnNextRender();
runUntilPlaybackState(player, Player.STATE_ENDED);
assertThat(sleepingForOffloadCounter.get()).isEqualTo(0);
player.release();
}
@Test
public void wakeupListenerWhileSleepingForOffload_isWokenUp_renderingResumes() throws Exception {
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO).sleepOnNextRender();
@ -16749,84 +16700,6 @@ public class ExoPlayerTest {
assertThat(videoScalingSetOnSecondaryVideoRenderer.get()).isTrue();
}
@Test
public void
play_withRecoverableErrorAfterAdvancingReadingPeriod_advancesPlayingPeriodWhileErrorHandling()
throws Exception {
Clock fakeClock = new FakeClock(/* isAutoAdvancing= */ true);
AtomicBoolean shouldRendererThrowRecoverableError = new AtomicBoolean(false);
AtomicInteger onStreamChangedCount = new AtomicInteger(0);
ExoPlayer player =
new TestExoPlayerBuilder(context)
.setClock(fakeClock)
.setRenderersFactory(
new RenderersFactory() {
@Override
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput) {
return new Renderer[] {
new FakeVideoRenderer(
SystemClock.DEFAULT.createHandler(
eventHandler.getLooper(), /* callback= */ null),
videoRendererEventListener) {
@Override
protected void onStreamChanged(
Format[] formats,
long startPositionUs,
long offsetUs,
MediaSource.MediaPeriodId mediaPeriodId)
throws ExoPlaybackException {
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
onStreamChangedCount.getAndIncrement();
}
@Override
public void render(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
if (!shouldRendererThrowRecoverableError.get()) {
super.render(positionUs, elapsedRealtimeUs);
} else {
shouldRendererThrowRecoverableError.set(false);
throw createRendererException(
new MediaCodecRenderer.DecoderInitializationException(
new Format.Builder().build(),
new IllegalArgumentException(),
false,
0),
this.getFormatHolder().format,
true,
PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
}
}
};
}
})
.build();
player.setMediaSources(
ImmutableList.of(
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT)));
player.prepare();
// Play a bit until the reading period has advanced.
player.play();
advance(player).untilBackgroundThreadCondition(() -> onStreamChangedCount.get() == 2);
shouldRendererThrowRecoverableError.set(true);
runUntilPlaybackState(player, Player.STATE_ENDED);
player.release();
// onStreamChanged should occur thrice;
// 1 during first enable, 2 during replace stream, 3 during error recovery
assertThat(onStreamChangedCount.get()).isEqualTo(3);
assertThat(shouldRendererThrowRecoverableError.get()).isFalse();
}
// Internal methods.
private void addWatchAsSystemFeature() {

View File

@ -1524,91 +1524,6 @@ public class ExoPlayerWithPrewarmingRenderersTest {
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_ENABLED);
}
@Test
public void
play_recoverableErrorWithPrimaryRendererDuringPrewarming_doesNotResetSecondaryRenderer()
throws Exception {
Clock fakeClock = new FakeClock(/* isAutoAdvancing= */ true);
Player.Listener listener = mock(Player.Listener.class);
AtomicBoolean shouldPrimaryRendererThrowRecoverable = new AtomicBoolean(false);
ExoPlayer player =
new TestExoPlayerBuilder(context)
.setClock(fakeClock)
.setRenderersFactory(
new FakeRenderersFactorySupportingSecondaryVideoRenderer(fakeClock) {
@Override
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput) {
HandlerWrapper clockAwareHandler =
clock.createHandler(eventHandler.getLooper(), /* callback= */ null);
return new Renderer[] {
new FakeVideoRenderer(clockAwareHandler, videoRendererEventListener) {
@Override
public void render(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
if (!shouldPrimaryRendererThrowRecoverable.get()) {
super.render(positionUs, elapsedRealtimeUs);
} else {
shouldPrimaryRendererThrowRecoverable.set(false);
throw createRendererException(
new MediaCodecRenderer.DecoderInitializationException(
new Format.Builder().build(),
new IllegalArgumentException(),
false,
0),
this.getFormatHolder().format,
true,
PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
}
},
new FakeAudioRenderer(clockAwareHandler, audioRendererEventListener)
};
}
})
.build();
player.addListener(listener);
Renderer videoRenderer = player.getRenderer(/* index= */ 0);
Renderer secondaryVideoRenderer = player.getSecondaryRenderer(/* index= */ 0);
// Set a playlist that allows a new renderer to be enabled early.
player.setMediaSources(
ImmutableList.of(
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
new FakeBlockingMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT)));
player.prepare();
// Play a bit until the second renderer is pre-warming.
player.play();
advance(player)
.untilBackgroundThreadCondition(
() -> secondaryVideoRenderer.getState() == Renderer.STATE_ENABLED);
@Renderer.State int videoState1 = videoRenderer.getState();
@Renderer.State int secondaryVideoState1 = secondaryVideoRenderer.getState();
advance(player)
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_ENABLED);
@Renderer.State int videoState2 = videoRenderer.getState();
@Renderer.State int secondaryVideoState2 = secondaryVideoRenderer.getState();
shouldPrimaryRendererThrowRecoverable.set(true);
advance(player)
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_DISABLED);
@Renderer.State int videoState3 = videoRenderer.getState();
@Renderer.State int secondaryVideoState3 = secondaryVideoRenderer.getState();
player.release();
verify(listener).onPositionDiscontinuity(any(), any(), anyInt());
assertThat(videoState1).isEqualTo(Renderer.STATE_STARTED);
assertThat(secondaryVideoState1).isEqualTo(Renderer.STATE_ENABLED);
assertThat(videoState2).isEqualTo(Renderer.STATE_ENABLED);
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_STARTED);
assertThat(videoState3).isEqualTo(Renderer.STATE_DISABLED);
assertThat(secondaryVideoState3).isEqualTo(Renderer.STATE_STARTED);
}
/** {@link FakeMediaSource} that prevents any reading of samples off the sample queue. */
private static final class FakeBlockingMediaSource extends FakeMediaSource {

View File

@ -55,7 +55,6 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -227,11 +226,10 @@ public class DecoderAudioRendererTest {
}
@Test
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
public void getDurationToProgressUs_withAudioSinkBuffersFull_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
@ -246,11 +244,11 @@ public class DecoderAudioRendererTest {
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
@ -277,16 +275,15 @@ public class DecoderAudioRendererTest {
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(50_000L);
assertThat(durationToProgressUs).isEqualTo(75_000L);
}
@Test
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndDoublePlaybackSpeed_returnsCalculatedDuration()
getDurationToProgressUs_withAudioSinkBuffersFullAndDoublePlaybackSpeed_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
PlaybackParameters playbackParametersWithDoubleSpeed =
new PlaybackParameters(/* speed= */ 2.0f);
when(mockAudioSink.getPlaybackParameters()).thenReturn(playbackParametersWithDoubleSpeed);
@ -335,17 +332,16 @@ public class DecoderAudioRendererTest {
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
assertThat(durationToProgressUs).isEqualTo(37_500L);
}
@Test
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndPlaybackAdvancement_returnsCalculatedDuration()
getDurationToProgressUs_withAudioSinkBuffersFullAndPlaybackAdvancement_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
CountDownLatch latchDecode = new CountDownLatch(4);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
@ -395,17 +391,17 @@ public class DecoderAudioRendererTest {
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
assertThat(durationToProgressUs).isEqualTo(40_000L);
assertThat(durationToProgressUs).isEqualTo(65_000L);
}
@Test
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
getDurationToProgressUs_afterReadToEndOfStreamWithAudioSinkBuffersFull_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
CountDownLatch latchDecode = new CountDownLatch(4);
CountDownLatch latchDecode = new CountDownLatch(6);
ForwardingAudioSinkWithCountdownLatch countdownLatchAudioSink =
new ForwardingAudioSinkWithCountdownLatch(mockAudioSink, latchDecode);
audioRenderer = createAudioRenderer(countdownLatchAudioSink);
@ -419,12 +415,16 @@ public class DecoderAudioRendererTest {
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
// Mock that audio sink is full when trying to write final sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 250000), anyInt()))
.thenReturn(false);
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
@ -437,138 +437,16 @@ public class DecoderAudioRendererTest {
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150000 us sample.
when(mockAudioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150000), anyInt()))
.thenReturn(false);
audioRenderer.start();
while (latchDecode.getCount() != 0) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(10_000L);
}
@Test
public void
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
ForwardingAudioSink forwardingAudioSink =
new ForwardingAudioSink(mockAudioSink) {
@Override
public void playToEndOfStream() throws WriteException {
super.playToEndOfStream();
hasCalledPlayToEndOfStream.set(true);
}
};
audioRenderer = createAudioRenderer(forwardingAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
audioRenderer.setCurrentStreamFinal();
while (!hasCalledPlayToEndOfStream.get()) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
ForwardingAudioSink forwardingAudioSink =
new ForwardingAudioSink(mockAudioSink) {
@Override
public void playToEndOfStream() throws WriteException {
super.playToEndOfStream();
hasCalledPlayToEndOfStream.set(true);
}
};
audioRenderer = createAudioRenderer(forwardingAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
audioRenderer.setCurrentStreamFinal();
while (!hasCalledPlayToEndOfStream.get()) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
assertThat(durationToProgressUs).isEqualTo(125_000L);
}
@Test

View File

@ -62,7 +62,6 @@ import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -734,9 +733,8 @@ public class MediaCodecAudioRendererTest {
}
@Test
public void getDurationToProgressUs_usingAudioTrackBufferDurationUs_returnsCalculatedDuration()
public void getDurationToProgressUs_withAudioSinkBuffersFull_returnsCalculatedDuration()
throws Exception {
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
@ -778,14 +776,13 @@ public class MediaCodecAudioRendererTest {
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(50_000L);
assertThat(durationToProgressUs).isEqualTo(75_000L);
}
@Test
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndDoublePlaybackSpeed_returnsCalculatedDuration()
getDurationToProgressUs_withAudioSinkBuffersFullAndDoublePlaybackSpeed_returnsCalculatedDuration()
throws Exception {
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
@ -829,15 +826,14 @@ public class MediaCodecAudioRendererTest {
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
assertThat(durationToProgressUs).isEqualTo(37_500L);
}
@Test
public void
getDurationToProgressUs_usingAudioTrackBufferDurationUsAndPlaybackAdvancement_returnsCalculatedDuration()
getDurationToProgressUs_withAudioSinkBuffersFullAndPlaybackAdvancement_returnsCalculatedDuration()
throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
@ -901,200 +897,7 @@ public class MediaCodecAudioRendererTest {
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, rendererPositionElapsedRealtimeUs);
assertThat(durationToProgressUs).isEqualTo(40_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUsUnsupported_returnsDefaultDuration()
throws Exception {
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
// Represents audio sink buffers being full when trying to write 150_000 us sample.
when(audioSink.handleBuffer(
any(), longThat(presentationTimeUs -> presentationTimeUs == 150_000), anyInt()))
.thenReturn(false);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
mediaCodecAudioRenderer.start();
for (int i = 0; i < 10; i++) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(10_000L);
}
@Test
public void
getDurationToProgressUs_withWrittenLessThanBufferDurationAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink) {
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
super.renderToEndOfStream();
hasCalledRenderToEndOfStream.set(true);
}
};
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(100_000L);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!hasCalledRenderToEndOfStream.get()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink) {
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
super.renderToEndOfStream();
hasCalledRenderToEndOfStream.set(true);
}
};
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!hasCalledRenderToEndOfStream.get()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
assertThat(durationToProgressUs).isEqualTo(65_000L);
}
@Test

View File

@ -29,7 +29,6 @@ import androidx.media3.exoplayer.DecoderCounters;
import androidx.media3.exoplayer.DefaultRenderersFactory;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.analytics.AnalyticsListener;
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
@ -104,14 +103,6 @@ public class ParseAv1SampleDependenciesPlaybackTest {
new ExoPlayer.Builder(applicationContext, renderersFactory)
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
.build();
player.addAnalyticsListener(
new AnalyticsListener() {
@Override
public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {
// Input buffers near the reset position should not be dropped.
assertThat(eventTime.currentPlaybackPositionMs).isAtLeast(200);
}
});
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
player.setVideoSurface(surface);
player.setMediaItem(MediaItem.fromUri(TEST_MP4_URI));
@ -130,7 +121,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
// Which input buffer is dropped first depends on the number of MediaCodec buffer slots.
// This means the asserts cannot be isEqualTo.
assertThat(decoderCounters.maxConsecutiveDroppedBufferCount).isAtMost(2);
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(4);
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(8);
}
private static final class CapturingRenderersFactoryWithLateThresholdToDropDecoderInputUs
@ -164,6 +155,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
/* enableDecoderFallback= */ false,
eventHandler,
videoRendererEventListener,
DefaultRenderersFactory.MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY,
/* parseAv1SampleDependencies= */ true,
/* lateThresholdToDropDecoderInputUs= */ -100_000_000L)
};
@ -181,6 +173,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
boolean enableDecoderFallback,
@Nullable Handler eventHandler,
@Nullable VideoRendererEventListener eventListener,
int maxDroppedFramesToNotify,
boolean parseAv1SampleDependencies,
long lateThresholdToDropDecoderInputUs) {
super(
@ -191,7 +184,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
.setEnableDecoderFallback(enableDecoderFallback)
.setEventHandler(eventHandler)
.setEventListener(eventListener)
.setMaxDroppedFramesToNotify(1)
.setMaxDroppedFramesToNotify(maxDroppedFramesToNotify)
.experimentalSetParseAv1SampleDependencies(parseAv1SampleDependencies)
.experimentalSetLateThresholdToDropDecoderInputUs(
lateThresholdToDropDecoderInputUs));

View File

@ -22,7 +22,6 @@ import static com.google.common.truth.Truth.assertThat;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.TrackGroup;
import androidx.media3.common.util.NullableType;
import androidx.media3.decoder.DecoderInputBuffer;
@ -34,7 +33,6 @@ import androidx.media3.exoplayer.source.MediaSource.MediaPeriodId;
import androidx.media3.exoplayer.source.MediaSourceEventListener.EventDispatcher;
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
import androidx.media3.exoplayer.trackselection.FixedTrackSelection;
import androidx.media3.exoplayer.upstream.Allocator;
import androidx.media3.exoplayer.upstream.DefaultAllocator;
import androidx.media3.test.utils.FakeMediaPeriod;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -272,39 +270,6 @@ public final class MergingMediaPeriodTest {
assertThat(inputBuffer.timeUs).isEqualTo(456_000 - 3000);
}
@Test
public void
getChildPeriod_withTimeOffsetsAndTimeOffsetPeriodChildren_returnsCorrectChildPeriod() {
TrackGroupArray trackGroupArray =
new TrackGroupArray(
new TrackGroup(new Format.Builder().setSampleMimeType(MimeTypes.VIDEO_AV1).build()));
Allocator allocator =
new DefaultAllocator(/* trimOnReset= */ false, /* individualAllocationSize= */ 1024);
MediaPeriod childPeriod0 =
new FakeMediaPeriod(
trackGroupArray, allocator, /* singleSampleTimeUs= */ 0, new EventDispatcher());
MediaPeriod childPeriod1 =
new TimeOffsetMediaPeriod(
new FakeMediaPeriod(
trackGroupArray, allocator, /* singleSampleTimeUs= */ 300, new EventDispatcher()),
/* timeOffsetUs= */ -300);
MediaPeriod childPeriod2 =
new FakeMediaPeriod(
trackGroupArray, allocator, /* singleSampleTimeUs= */ -500, new EventDispatcher());
MergingMediaPeriod mergingMediaPeriod =
new MergingMediaPeriod(
new DefaultCompositeSequenceableLoaderFactory(),
/* periodTimeOffsetsUs= */ new long[] {0, 0, 500},
childPeriod0,
childPeriod1,
childPeriod2);
assertThat(mergingMediaPeriod.getChildPeriod(0)).isEqualTo(childPeriod0);
assertThat(mergingMediaPeriod.getChildPeriod(1)).isEqualTo(childPeriod1);
assertThat(mergingMediaPeriod.getChildPeriod(2)).isEqualTo(childPeriod2);
}
private MergingMediaPeriod prepareMergingPeriod(MergingPeriodDefinition... definitions)
throws Exception {
return prepareMergingPeriod(/* singleTrackGroup= */ false, definitions);

View File

@ -90,13 +90,12 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public final class PreloadMediaSourceTest {
private static final int LOADING_CHECK_INTERVAL_BYTES = 32;
private static final int LOADING_CHECK_INTERVAL_BYTES = 10 * 1024;
private static final int TARGET_PRELOAD_DURATION_US = 10000;
private Allocator allocator;
private BandwidthMeter bandwidthMeter;
private RenderersFactory renderersFactory;
private MediaItem mediaItem;
@Before
public void setUp() {
@ -113,10 +112,6 @@ public final class PreloadMediaSourceTest {
SystemClock.DEFAULT.createHandler(handler.getLooper(), /* callback= */ null),
audioListener)
};
mediaItem =
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/long_1080p_lowbitrate.mp4"))
.build();
}
@Test
@ -151,7 +146,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
@ -192,7 +191,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
@ -232,7 +235,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
@ -259,7 +266,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
AtomicReference<MediaSource> externalCallerMediaSourceReference = new AtomicReference<>();
MediaSource.MediaSourceCaller externalCaller =
@ -304,7 +315,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadMediaSourceReference.get() != null);
@ -373,7 +388,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
@ -453,7 +472,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
@ -560,7 +583,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
runMainLooperUntil(() -> preloadExceptionReference.get() != null);
@ -588,7 +615,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
FakeMediaSource wrappedMediaSource = mediaSourceFactory.getLastCreatedSource();
wrappedMediaSource.setAllowPreparation(false);
preloadMediaSource.preload(/* startPositionUs= */ 0L);
@ -622,7 +653,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
@ -692,7 +727,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
@ -769,7 +808,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
@ -833,7 +876,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
@ -876,7 +923,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
MediaSource.MediaSourceCaller externalCaller =
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);
@ -925,7 +976,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
MediaSource.MediaSourceCaller externalCaller =
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);
@ -976,7 +1031,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
AtomicBoolean externalCaller1SourceInfoRefreshedCalled = new AtomicBoolean();
AtomicBoolean externalCaller2SourceInfoRefreshedCalled = new AtomicBoolean();
MediaSource.MediaSourceCaller externalCaller1 =
@ -1031,7 +1090,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
preloadMediaSource.preload(/* startPositionUs= */ 0L);
shadowOf(Looper.getMainLooper()).idle();
preloadMediaSource.releasePreloadMediaSource();
@ -1077,7 +1140,11 @@ public final class PreloadMediaSourceTest {
getRendererCapabilities(renderersFactory),
allocator,
Util.getCurrentOrMainLooper());
PreloadMediaSource preloadMediaSource = preloadMediaSourceFactory.createMediaSource(mediaItem);
PreloadMediaSource preloadMediaSource =
preloadMediaSourceFactory.createMediaSource(
new MediaItem.Builder()
.setUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"))
.build());
AtomicBoolean externalCallerSourceInfoRefreshedCalled = new AtomicBoolean();
MediaSource.MediaSourceCaller externalCaller =
(source, timeline) -> externalCallerSourceInfoRefreshedCalled.set(true);

View File

@ -1122,7 +1122,7 @@ public final class DashMediaSource extends BaseMediaSource {
if (manifest != null) {
cmcdDataFactory.setIsLive(manifest.dynamic);
}
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
}
startLoading(
new ParsingLoadable<>(dataSource, dataSpec, C.DATA_TYPE_MANIFEST, manifestParser),

View File

@ -517,8 +517,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
.setPlaybackRate(loadingInfo.playbackSpeed)
.setIsLive(!playlist.hasEndTag)
.setDidRebuffer(loadingInfo.rebufferedSince(lastChunkRequestRealtimeMs))
.setIsBufferEmpty(queue.isEmpty())
.setChunkDurationUs(segmentBaseHolder.segmentBase.durationUs);
.setIsBufferEmpty(queue.isEmpty());
long nextMediaSequence =
segmentBaseHolder.partIndex == C.INDEX_UNSET
? segmentBaseHolder.mediaSequence + 1

View File

@ -112,7 +112,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
.setFlags(segmentBaseHolder.isPreload ? FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED : 0)
.build();
if (cmcdDataFactory != null) {
CmcdData cmcdData = cmcdDataFactory.createCmcdData();
CmcdData cmcdData =
cmcdDataFactory.setChunkDurationUs(mediaSegment.durationUs).createCmcdData();
dataSpec = cmcdData.addToDataSpec(dataSpec);
}

View File

@ -153,7 +153,7 @@ public final class DefaultHlsPlaylistTracker
new CmcdData.Factory(cmcdConfiguration, CmcdData.STREAMING_FORMAT_HLS)
.setObjectType(CmcdData.OBJECT_TYPE_MANIFEST)
.createCmcdData();
dataSpec = cmcdData.addToDataSpec(dataSpec);
cmcdData.addToDataSpec(dataSpec);
}
ParsingLoadable<HlsPlaylist> multivariantPlaylistLoadable =
new ParsingLoadable<>(
@ -796,7 +796,7 @@ public final class DefaultHlsPlaylistTracker
if (primaryMediaPlaylistSnapshot != null) {
cmcdDataFactory.setIsLive(!primaryMediaPlaylistSnapshot.hasEndTag);
}
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
}
ParsingLoadable<HlsPlaylist> mediaPlaylistLoadable =
new ParsingLoadable<>(

View File

@ -40,7 +40,6 @@ import androidx.media3.exoplayer.source.MediaSourceFactory;
import androidx.media3.exoplayer.source.SinglePeriodTimeline;
import androidx.media3.exoplayer.upstream.Allocator;
import androidx.media3.exoplayer.upstream.LoadErrorHandlingPolicy;
import com.google.common.base.Ascii;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.IOException;
import javax.net.SocketFactory;
@ -183,21 +182,13 @@ public final class RtspMediaSource extends BaseMediaSource {
checkNotNull(mediaItem.localConfiguration);
return new RtspMediaSource(
mediaItem,
shouldForceUseRtpTcp(mediaItem)
forceUseRtpTcp
? new TransferRtpDataChannelFactory(timeoutMs)
: new UdpDataSourceRtpDataChannelFactory(timeoutMs),
userAgent,
socketFactory,
debugLoggingEnabled);
}
private boolean shouldForceUseRtpTcp(MediaItem mediaItem) {
if (forceUseRtpTcp) {
return true;
}
@Nullable String scheme = checkNotNull(mediaItem.localConfiguration).uri.getScheme();
return scheme != null && Ascii.equalsIgnoreCase("rtspt", scheme);
}
}
/** Thrown when an exception or error is encountered during loading an RTSP stream. */
@ -246,7 +237,7 @@ public final class RtspMediaSource extends BaseMediaSource {
this.mediaItem = mediaItem;
this.rtpDataChannelFactory = rtpDataChannelFactory;
this.userAgent = userAgent;
this.uri = maybeConvertRtsptUriScheme(checkNotNull(mediaItem.localConfiguration).uri);
this.uri = checkNotNull(mediaItem.localConfiguration).uri;
this.socketFactory = socketFactory;
this.debugLoggingEnabled = debugLoggingEnabled;
this.timelineDurationUs = C.TIME_UNSET;
@ -271,8 +262,7 @@ public final class RtspMediaSource extends BaseMediaSource {
@Override
public boolean canUpdateMediaItem(MediaItem mediaItem) {
@Nullable MediaItem.LocalConfiguration newConfiguration = mediaItem.localConfiguration;
return newConfiguration != null
&& maybeConvertRtsptUriScheme(newConfiguration.uri).equals(this.uri);
return newConfiguration != null && newConfiguration.uri.equals(this.uri);
}
@Override
@ -319,14 +309,6 @@ public final class RtspMediaSource extends BaseMediaSource {
// Internal methods.
private static Uri maybeConvertRtsptUriScheme(Uri uri) {
@Nullable String scheme = uri.getScheme();
if (scheme == null || !Ascii.equalsIgnoreCase("rtspt", scheme)) {
return uri;
}
return Uri.parse("rtsp" + uri.toString().substring(5));
}
private void notifySourceInfoRefreshed() {
Timeline timeline =
new SinglePeriodTimeline(

View File

@ -66,18 +66,6 @@ public class RtspMediaSourceTest {
assertThat(canUpdateMediaItem).isFalse();
}
@Test
public void canUpdateMediaItem_withChangeToRtspFromRtspt_returnsTrue() {
MediaItem initialMediaItem = new MediaItem.Builder().setUri("rtspt://test.test").build();
MediaItem updatedMediaItem =
TestUtil.buildFullyCustomizedMediaItem().buildUpon().setUri("rtsp://test.test").build();
MediaSource mediaSource = buildMediaSource(initialMediaItem);
boolean canUpdateMediaItem = mediaSource.canUpdateMediaItem(updatedMediaItem);
assertThat(canUpdateMediaItem).isTrue();
}
@Test
public void updateMediaItem_createsTimelineWithUpdatedItem() throws Exception {
MediaItem initialMediaItem =

View File

@ -689,7 +689,7 @@ public final class SsMediaSource extends BaseMediaSource
if (manifest != null) {
cmcdDataFactory.setIsLive(manifest.isLive);
}
dataSpec = cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
cmcdDataFactory.createCmcdData().addToDataSpec(dataSpec);
}
ParsingLoadable<SsManifest> loadable =
new ParsingLoadable<>(manifestDataSource, dataSpec, C.DATA_TYPE_MANIFEST, manifestParser);

View File

@ -169,15 +169,12 @@ public final class Ac3Util {
*
* @param data The AC3SpecificBox to parse.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format, or {@code null} if unset.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The AC-3 format parsed from data in the header.
*/
public static Format parseAc3AnnexFFormat(
ParsableByteArray data,
String trackId,
@Nullable String language,
@Nullable DrmInitData drmInitData) {
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
ParsableBitArray dataBitArray = new ParsableBitArray();
dataBitArray.reset(data);
@ -211,15 +208,12 @@ public final class Ac3Util {
*
* @param data The EC3SpecificBox to parse.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format, or {@code null} if unset.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The E-AC-3 format parsed from data in the header.
*/
public static Format parseEAc3AnnexFFormat(
ParsableByteArray data,
String trackId,
@Nullable String language,
@Nullable DrmInitData drmInitData) {
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
ParsableBitArray dataBitArray = new ParsableBitArray();
dataBitArray.reset(data);

View File

@ -163,17 +163,14 @@ public final class Ac4Util {
*
* @param data The AC4SpecificBox to parse.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format, or {@code null} if unset.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The AC-4 format parsed from data in the header.
* @throws ParserException If an unsupported container feature is encountered while parsing AC-4
* Annex E.
*/
public static Format parseAc4AnnexEFormat(
ParsableByteArray data,
String trackId,
@Nullable String language,
@Nullable DrmInitData drmInitData)
ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData)
throws ParserException {
ParsableBitArray dataBitArray = new ParsableBitArray();
dataBitArray.reset(data);

View File

@ -310,13 +310,9 @@ public final class FlacExtractor implements Extractor {
currentFrameFirstSampleNumber = nextFrameFirstSampleNumber;
}
int remainingBufferCapacity = buffer.getData().length - buffer.limit();
if (buffer.bytesLeft() < FlacConstants.MAX_FRAME_HEADER_SIZE
&& remainingBufferCapacity < FlacConstants.MAX_FRAME_HEADER_SIZE) {
// We're running out of bytes to read before buffer.limit, and the next frame header may not
// fit in the rest of buffer.data beyond buffer.limit, so we move the bytes between
// buffer.position and buffer.limit to the start of buffer.data, and reset the position and
// limit.
if (buffer.bytesLeft() < FlacConstants.MAX_FRAME_HEADER_SIZE) {
// The next frame header may not fit in the rest of the buffer, so put the trailing bytes at
// the start of the buffer, and reset the position and limit.
int bytesLeft = buffer.bytesLeft();
System.arraycopy(
buffer.getData(), buffer.getPosition(), buffer.getData(), /* destPos= */ 0, bytesLeft);

View File

@ -36,7 +36,6 @@ import androidx.media3.common.util.ParsableBitArray;
import androidx.media3.common.util.ParsableByteArray;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.container.Mp4AlternateGroupData;
import androidx.media3.container.Mp4Box;
import androidx.media3.container.Mp4Box.LeafBox;
import androidx.media3.container.Mp4LocationData;
@ -55,7 +54,6 @@ import androidx.media3.extractor.VorbisUtil;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
@ -382,38 +380,21 @@ public final class BoxParser {
}
}
}
if (stsdData.format == null) {
return null;
}
Format format;
if (tkhdData.alternateGroup != 0) {
Mp4AlternateGroupData alternateGroupEntry =
new Mp4AlternateGroupData(tkhdData.alternateGroup);
format =
stsdData
.format
.buildUpon()
.setMetadata(
stsdData.format.metadata != null
? stsdData.format.metadata.copyWithAppendedEntries(alternateGroupEntry)
: new Metadata(alternateGroupEntry))
.build();
} else {
format = stsdData.format;
}
return new Track(
tkhdData.id,
trackType,
mdhdData.timescale,
movieTimescale,
durationUs,
mdhdData.mediaDurationUs,
format,
stsdData.requiredSampleTransformation,
stsdData.trackEncryptionBoxes,
stsdData.nalUnitLengthFieldLength,
editListDurations,
editListMediaTimes);
return stsdData.format == null
? null
: new Track(
tkhdData.id,
trackType,
mdhdData.timescale,
movieTimescale,
durationUs,
mdhdData.mediaDurationUs,
stsdData.format,
stsdData.requiredSampleTransformation,
stsdData.trackEncryptionBoxes,
stsdData.nalUnitLengthFieldLength,
editListDurations,
editListMediaTimes);
}
/**
@ -528,7 +509,6 @@ public final class BoxParser {
int[] flags;
long timestampTimeUnits = 0;
long duration;
long totalSize = 0;
if (rechunkFixedSizeSamples) {
long[] chunkOffsetsBytes = new long[chunkIterator.length];
@ -546,7 +526,6 @@ public final class BoxParser {
timestamps = rechunkedResults.timestamps;
flags = rechunkedResults.flags;
duration = rechunkedResults.duration;
totalSize = rechunkedResults.totalSize;
} else {
offsets = new long[sampleCount];
sizes = new int[sampleCount];
@ -589,7 +568,6 @@ public final class BoxParser {
offsets[i] = offset;
sizes[i] = sampleSizeBox.readNextSampleSize();
totalSize += sizes[i];
if (sizes[i] > maximumSize) {
maximumSize = sizes[i];
}
@ -661,20 +639,6 @@ public final class BoxParser {
+ (!isCttsValid ? ", ctts invalid" : ""));
}
}
if (track.mediaDurationUs > 0) {
long averageBitrate =
Util.scaleLargeValue(
totalSize * C.BITS_PER_BYTE,
C.MICROS_PER_SECOND,
track.mediaDurationUs,
RoundingMode.HALF_DOWN);
if (averageBitrate > 0 && averageBitrate < Integer.MAX_VALUE) {
Format format = track.format.buildUpon().setAverageBitrate((int) averageBitrate).build();
track = track.copyWithFormat(format);
}
}
long durationUs = Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, track.timescale);
if (track.editListDurations == null) {
@ -949,9 +913,7 @@ public final class BoxParser {
}
}
tkhd.skipBytes(10);
int alternateGroup = tkhd.readUnsignedShort();
tkhd.skipBytes(4);
tkhd.skipBytes(16);
int a00 = tkhd.readInt();
int a01 = tkhd.readInt();
tkhd.skipBytes(4);
@ -971,7 +933,7 @@ public final class BoxParser {
rotationDegrees = 0;
}
return new TkhdData(trackId, duration, alternateGroup, rotationDegrees);
return new TkhdData(trackId, duration, rotationDegrees);
}
/**
@ -1035,34 +997,22 @@ public final class BoxParser {
mediaDurationUs = Util.scaleLargeTimestamp(mediaDuration, C.MICROS_PER_SECOND, timescale);
}
}
String language = getLanguageFromCode(/* languageCode= */ mdhd.readUnsignedShort());
int languageCode = mdhd.readUnsignedShort();
String language =
""
+ (char) (((languageCode >> 10) & 0x1F) + 0x60)
+ (char) (((languageCode >> 5) & 0x1F) + 0x60)
+ (char) ((languageCode & 0x1F) + 0x60);
return new MdhdData(timescale, mediaDurationUs, language);
}
@Nullable
private static String getLanguageFromCode(int languageCode) {
char[] chars = {
(char) (((languageCode >> 10) & 0x1F) + 0x60),
(char) (((languageCode >> 5) & 0x1F) + 0x60),
(char) ((languageCode & 0x1F) + 0x60)
};
for (char c : chars) {
if (c < 'a' || c > 'z') {
return null;
}
}
return new String(chars);
}
/**
* Parses a stsd atom (defined in ISO/IEC 14496-12).
*
* @param stsd The stsd atom to decode.
* @param trackId The track's identifier in its container.
* @param rotationDegrees The rotation of the track in degrees.
* @param language The language of the track, or {@code null} if unset.
* @param language The language of the track.
* @param drmInitData {@link DrmInitData} to be included in the format, or {@code null}.
* @param isQuickTime True for QuickTime media. False otherwise.
* @return An object containing the parsed data.
@ -1071,7 +1021,7 @@ public final class BoxParser {
ParsableByteArray stsd,
int trackId,
int rotationDegrees,
@Nullable String language,
String language,
@Nullable DrmInitData drmInitData,
boolean isQuickTime)
throws ParserException {
@ -1107,7 +1057,6 @@ public final class BoxParser {
childStartPosition,
childAtomSize,
trackId,
language,
rotationDegrees,
drmInitData,
out,
@ -1176,7 +1125,7 @@ public final class BoxParser {
int position,
int atomSize,
int trackId,
@Nullable String language,
String language,
StsdData out) {
parent.setPosition(position + Mp4Box.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
@ -1225,7 +1174,6 @@ public final class BoxParser {
int position,
int size,
int trackId,
@Nullable String language,
int rotationDegrees,
@Nullable DrmInitData drmInitData,
StsdData out,
@ -1275,7 +1223,6 @@ public final class BoxParser {
@Nullable byte[] projectionData = null;
@C.StereoMode int stereoMode = Format.NO_VALUE;
@Nullable EsdsData esdsData = null;
@Nullable BtrtData btrtData = null;
int maxNumReorderSamples = Format.NO_VALUE;
int maxSubLayers = Format.NO_VALUE;
@Nullable NalUnitUtil.H265VpsData vpsData = null;
@ -1499,8 +1446,6 @@ public final class BoxParser {
if (initializationDataBytes != null) {
initializationData = ImmutableList.of(initializationDataBytes);
}
} else if (childAtomType == Mp4Box.TYPE_btrt) {
btrtData = parseBtrtFromParent(parent, childStartPosition);
} else if (childAtomType == Mp4Box.TYPE_pasp) {
pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
pixelWidthHeightRatioFromPasp = true;
@ -1598,7 +1543,6 @@ public final class BoxParser {
.setMaxNumReorderSamples(maxNumReorderSamples)
.setMaxSubLayers(maxSubLayers)
.setDrmInitData(drmInitData)
.setLanguage(language)
// Note that if either mdcv or clli are missing, we leave the corresponding HDR static
// metadata bytes with value zero. See [Internal ref: b/194535665].
.setColorInfo(
@ -1611,12 +1555,7 @@ public final class BoxParser {
.setChromaBitdepth(bitdepthChroma)
.build());
// Prefer btrtData over esdsData for video track.
if (btrtData != null) {
formatBuilder
.setAverageBitrate(Ints.saturatedCast(btrtData.avgBitrate))
.setPeakBitrate(Ints.saturatedCast(btrtData.maxBitrate));
} else if (esdsData != null) {
if (esdsData != null) {
formatBuilder
.setAverageBitrate(Ints.saturatedCast(esdsData.bitrate))
.setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate));
@ -1873,7 +1812,7 @@ public final class BoxParser {
int position,
int size,
int trackId,
@Nullable String language,
String language,
boolean isQuickTime,
@Nullable DrmInitData drmInitData,
StsdData out,
@ -1895,7 +1834,6 @@ public final class BoxParser {
@C.PcmEncoding int pcmEncoding = Format.NO_VALUE;
@Nullable String codecs = null;
@Nullable EsdsData esdsData = null;
@Nullable BtrtData btrtData = null;
if (quickTimeSoundDescriptionVersion == 0 || quickTimeSoundDescriptionVersion == 1) {
channelCount = parent.readUnsignedShort();
@ -2102,8 +2040,6 @@ public final class BoxParser {
}
}
}
} else if (childAtomType == Mp4Box.TYPE_btrt) {
btrtData = parseBtrtFromParent(parent, childPosition);
} else if (childAtomType == Mp4Box.TYPE_dac3) {
parent.setPosition(Mp4Box.HEADER_SIZE + childPosition);
out.format =
@ -2191,15 +2127,10 @@ public final class BoxParser {
.setDrmInitData(drmInitData)
.setLanguage(language);
// Prefer esdsData over btrtData for audio track.
if (esdsData != null) {
formatBuilder
.setAverageBitrate(Ints.saturatedCast(esdsData.bitrate))
.setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate));
} else if (btrtData != null) {
formatBuilder
.setAverageBitrate(Ints.saturatedCast(btrtData.avgBitrate))
.setPeakBitrate(Ints.saturatedCast(btrtData.maxBitrate));
}
out.format = formatBuilder.build();
@ -2290,20 +2221,6 @@ public final class BoxParser {
/* peakBitrate= */ peakBitrate > 0 ? peakBitrate : Format.NO_VALUE);
}
/**
* Returns bitrate data contained in a btrt box, as specified by Section 8.5.2.2 in ISO/IEC
* 14496-12:2012(E).
*/
private static BtrtData parseBtrtFromParent(ParsableByteArray parent, int position) {
parent.setPosition(position + Mp4Box.HEADER_SIZE);
parent.skipBytes(4); // bufferSizeDB
long maxBitrate = parent.readUnsignedInt();
long avgBitrate = parent.readUnsignedInt();
return new BtrtData(avgBitrate, maxBitrate);
}
/**
* Returns stereo video playback related meta data from the vexu box. See
* https://developer.apple.com/av-foundation/Stereo-Video-ISOBMFF-Extensions.pdf for ref.
@ -2564,13 +2481,11 @@ public final class BoxParser {
private final int id;
private final long duration;
private final int alternateGroup;
private final int rotationDegrees;
public TkhdData(int id, long duration, int alternateGroup, int rotationDegrees) {
public TkhdData(int id, long duration, int rotationDegrees) {
this.id = id;
this.duration = duration;
this.alternateGroup = alternateGroup;
this.rotationDegrees = rotationDegrees;
}
}
@ -2611,17 +2526,6 @@ public final class BoxParser {
}
}
/** Data parsed from btrt box. */
private static final class BtrtData {
private final long avgBitrate;
private final long maxBitrate;
public BtrtData(long avgBitrate, long maxBitrate) {
this.avgBitrate = avgBitrate;
this.maxBitrate = maxBitrate;
}
}
/** Data parsed from stri box. */
private static final class StriData {
private final boolean hasLeftEyeView;
@ -2648,9 +2552,9 @@ public final class BoxParser {
private static final class MdhdData {
private final long timescale;
private final long mediaDurationUs;
@Nullable private final String language;
private final String language;
public MdhdData(long timescale, long mediaDurationUs, @Nullable String language) {
public MdhdData(long timescale, long mediaDurationUs, String language) {
this.timescale = timescale;
this.mediaDurationUs = mediaDurationUs;
this.language = language;

View File

@ -35,7 +35,6 @@ import androidx.media3.common.util.Util;
public final long[] timestamps;
public final int[] flags;
public final long duration;
public final long totalSize;
private Results(
long[] offsets,
@ -43,15 +42,13 @@ import androidx.media3.common.util.Util;
int maximumSize,
long[] timestamps,
int[] flags,
long duration,
long totalSize) {
long duration) {
this.offsets = offsets;
this.sizes = sizes;
this.maximumSize = maximumSize;
this.timestamps = timestamps;
this.flags = flags;
this.duration = duration;
this.totalSize = totalSize;
}
}
@ -84,7 +81,6 @@ import androidx.media3.common.util.Util;
int maximumSize = 0;
long[] timestamps = new long[rechunkedSampleCount];
int[] flags = new int[rechunkedSampleCount];
int totalSize = 0;
int originalSampleIndex = 0;
int newSampleIndex = 0;
@ -97,7 +93,6 @@ import androidx.media3.common.util.Util;
offsets[newSampleIndex] = sampleOffset;
sizes[newSampleIndex] = fixedSampleSize * bufferSampleCount;
totalSize += sizes[newSampleIndex];
maximumSize = max(maximumSize, sizes[newSampleIndex]);
timestamps[newSampleIndex] = (timestampDeltaInTimeUnits * originalSampleIndex);
flags[newSampleIndex] = C.BUFFER_FLAG_KEY_FRAME;
@ -111,7 +106,7 @@ import androidx.media3.common.util.Util;
}
long duration = timestampDeltaInTimeUnits * originalSampleIndex;
return new Results(offsets, sizes, maximumSize, timestamps, flags, duration, totalSize);
return new Results(offsets, sizes, maximumSize, timestamps, flags, duration);
}
private FixedSampleSizeRechunker() {

View File

@ -84,22 +84,13 @@ import com.google.common.collect.ImmutableList;
private MetadataUtil() {}
/**
* Updates a {@link Format.Builder} to include metadata from the provided sources.
*
* @param trackType The {@link C.TrackType} of the track.
* @param mdtaMetadata The {@link Metadata} from the {@code mdta} box if present, otherwise null.
* @param formatBuilder A {@link Format.Builder} to append the metadata too.
* @param existingMetadata The {@link Format#metadata} from {@code formatBuilder}.
* @param additionalMetadata Additional metadata to append.
*/
/** Updates a {@link Format.Builder} to include metadata from the provided sources. */
public static void setFormatMetadata(
@C.TrackType int trackType,
int trackType,
@Nullable Metadata mdtaMetadata,
Format.Builder formatBuilder,
@Nullable Metadata existingMetadata,
@NullableType Metadata... additionalMetadata) {
Metadata formatMetadata = existingMetadata != null ? existingMetadata : new Metadata();
Metadata formatMetadata = new Metadata();
if (mdtaMetadata != null) {
for (int i = 0; i < mdtaMetadata.length(); i++) {

View File

@ -757,6 +757,12 @@ public final class Mp4Extractor implements Extractor, SeekMap {
roleFlags |=
firstVideoTrackIndex == C.INDEX_UNSET ? C.ROLE_FLAG_MAIN : C.ROLE_FLAG_ALTERNATE;
}
if (track.format.frameRate == Format.NO_VALUE
&& trackDurationUs > 0
&& trackSampleTable.sampleCount > 0) {
float frameRate = trackSampleTable.sampleCount / (trackDurationUs / 1000000f);
formatBuilder.setFrameRate(frameRate);
}
if (readingAuxiliaryTracks) {
roleFlags |= C.ROLE_FLAG_AUXILIARY;
formatBuilder.setAuxiliaryTrackType(auxiliaryTrackTypesForAuxiliaryTracks.get(i));
@ -769,7 +775,6 @@ public final class Mp4Extractor implements Extractor, SeekMap {
track.type,
mdtaMetadata,
formatBuilder,
track.format.metadata,
slowMotionMetadataEntries.isEmpty() ? null : new Metadata(slowMotionMetadataEntries),
udtaMetadata,
mvhdMetadata);

View File

@ -260,11 +260,6 @@ public final class Mp4ExtractorParameterizedTest {
assertExtractorBehavior("media/mp4/sample_2_byte_NAL_length.mp4");
}
@Test
public void mp4SampleWithBtrt() throws Exception {
assertExtractorBehavior("media/mp4/sample_with_btrt.mp4");
}
private void assertExtractorBehavior(String file) throws IOException {
ExtractorAsserts.AssertionConfig.Builder assertionConfigBuilder =
new ExtractorAsserts.AssertionConfig.Builder();

View File

@ -113,7 +113,10 @@ import androidx.media3.common.util.Util;
public PendingIntent createMediaActionPendingIntent(
MediaSession mediaSession, @Player.Command long command) {
int keyCode = toKeyCode(command);
Intent intent = getMediaButtonIntent(mediaSession, keyCode);
Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON);
intent.setData(mediaSession.getImpl().getUri());
intent.setComponent(new ComponentName(service, service.getClass()));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, keyCode));
if (Util.SDK_INT >= 26
&& command == COMMAND_PLAY_PAUSE
&& !mediaSession.getPlayer().getPlayWhenReady()) {
@ -127,26 +130,6 @@ import androidx.media3.common.util.Util;
}
}
@Override
public PendingIntent createNotificationDismissalIntent(MediaSession mediaSession) {
Intent intent =
getMediaButtonIntent(mediaSession, KEYCODE_MEDIA_STOP)
.putExtra(MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY, true);
return PendingIntent.getService(
service,
/* requestCode= */ KEYCODE_MEDIA_STOP,
intent,
Util.SDK_INT >= 23 ? PendingIntent.FLAG_IMMUTABLE : 0);
}
private Intent getMediaButtonIntent(MediaSession mediaSession, int mediaKeyCode) {
Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON);
intent.setData(mediaSession.getImpl().getUri());
intent.setComponent(new ComponentName(service, service.getClass()));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, mediaKeyCode));
return intent;
}
private int toKeyCode(@Player.Command long action) {
if (action == COMMAND_SEEK_TO_NEXT_MEDIA_ITEM || action == COMMAND_SEEK_TO_NEXT) {
return KEYCODE_MEDIA_NEXT;

View File

@ -22,6 +22,7 @@ import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS;
import static androidx.media3.common.Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM;
import static androidx.media3.common.Player.COMMAND_STOP;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
@ -378,7 +379,8 @@ public class DefaultMediaNotificationProvider implements MediaNotification.Provi
Notification notification =
builder
.setContentIntent(mediaSession.getSessionActivity())
.setDeleteIntent(actionFactory.createNotificationDismissalIntent(mediaSession))
.setDeleteIntent(
actionFactory.createMediaActionPendingIntent(mediaSession, COMMAND_STOP))
.setOnlyAlertOnce(true)
.setSmallIcon(smallIconResourceId)
.setStyle(mediaStyle)

View File

@ -898,8 +898,6 @@ import java.util.concurrent.TimeoutException;
return metadata.writer;
case MediaMetadataCompat.METADATA_KEY_COMPOSER:
return metadata.composer;
case MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE:
return metadata.subtitle;
default:
return null;
}

View File

@ -61,7 +61,6 @@ public final class MediaBrowser extends MediaController {
private Looper applicationLooper;
private @MonotonicNonNull BitmapLoader bitmapLoader;
private int maxCommandsForMediaItems;
private long platformSessionCallbackAggregationTimeoutMs;
/**
* Creates a builder for {@link MediaBrowser}.
@ -79,8 +78,6 @@ public final class MediaBrowser extends MediaController {
connectionHints = Bundle.EMPTY;
listener = new Listener() {};
applicationLooper = Util.getCurrentOrMainLooper();
platformSessionCallbackAggregationTimeoutMs =
DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS;
}
/**
@ -159,24 +156,6 @@ public final class MediaBrowser extends MediaController {
return this;
}
/**
* Sets the timeout after which updates from the platform session callbacks are applied to the
* browser, in milliseconds.
*
* <p>The default is 100ms.
*
* @param platformSessionCallbackAggregationTimeoutMs The timeout, in milliseconds.
* @return The builder to allow chaining.
*/
@UnstableApi
@CanIgnoreReturnValue
public Builder experimentalSetPlatformSessionCallbackAggregationTimeoutMs(
long platformSessionCallbackAggregationTimeoutMs) {
this.platformSessionCallbackAggregationTimeoutMs =
platformSessionCallbackAggregationTimeoutMs;
return this;
}
/**
* Builds a {@link MediaBrowser} asynchronously.
*
@ -217,8 +196,7 @@ public final class MediaBrowser extends MediaController {
applicationLooper,
holder,
bitmapLoader,
maxCommandsForMediaItems,
platformSessionCallbackAggregationTimeoutMs);
maxCommandsForMediaItems);
postOrRun(new Handler(applicationLooper), () -> holder.setController(browser));
return holder;
}
@ -288,8 +266,7 @@ public final class MediaBrowser extends MediaController {
Looper applicationLooper,
ConnectionCallback connectionCallback,
@Nullable BitmapLoader bitmapLoader,
int maxCommandsForMediaItems,
long platformSessionCallbackAggregationTimeoutMs) {
int maxCommandsForMediaItems) {
super(
context,
token,
@ -298,8 +275,7 @@ public final class MediaBrowser extends MediaController {
applicationLooper,
connectionCallback,
bitmapLoader,
maxCommandsForMediaItems,
platformSessionCallbackAggregationTimeoutMs);
maxCommandsForMediaItems);
}
@Override
@ -310,19 +286,12 @@ public final class MediaBrowser extends MediaController {
SessionToken token,
Bundle connectionHints,
Looper applicationLooper,
@Nullable BitmapLoader bitmapLoader,
long platformSessionCallbackAggregationTimeoutMs) {
@Nullable BitmapLoader bitmapLoader) {
MediaBrowserImpl impl;
if (token.isLegacySession()) {
impl =
new MediaBrowserImplLegacy(
context,
this,
token,
connectionHints,
applicationLooper,
checkNotNull(bitmapLoader),
platformSessionCallbackAggregationTimeoutMs);
context, this, token, connectionHints, applicationLooper, checkNotNull(bitmapLoader));
} else {
impl = new MediaBrowserImplBase(context, this, token, connectionHints, applicationLooper);
}

View File

@ -64,16 +64,8 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
SessionToken token,
Bundle connectionHints,
Looper applicationLooper,
BitmapLoader bitmapLoader,
long platformSessionCallbackAggregationTimeoutMs) {
super(
context,
instance,
token,
connectionHints,
applicationLooper,
bitmapLoader,
platformSessionCallbackAggregationTimeoutMs);
BitmapLoader bitmapLoader) {
super(context, instance, token, connectionHints, applicationLooper, bitmapLoader);
this.instance = instance;
commandButtonsForMediaItems = ImmutableMap.of();
}

View File

@ -201,8 +201,6 @@ public class MediaController implements Player {
"MediaController method is called from a wrong thread."
+ " See javadoc of MediaController for details.";
@UnstableApi protected static final long DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS = 100L;
/** A builder for {@link MediaController}. */
public static final class Builder {
@ -213,7 +211,6 @@ public class MediaController implements Player {
private Looper applicationLooper;
private @MonotonicNonNull BitmapLoader bitmapLoader;
private int maxCommandsForMediaItems;
private long platformSessionCallbackAggregationTimeoutMs;
/**
* Creates a builder for {@link MediaController}.
@ -245,8 +242,6 @@ public class MediaController implements Player {
connectionHints = Bundle.EMPTY;
listener = new Listener() {};
applicationLooper = Util.getCurrentOrMainLooper();
platformSessionCallbackAggregationTimeoutMs =
DEFAULT_PLATFORM_CALLBACK_AGGREGATION_TIMEOUT_MS;
}
/**
@ -325,24 +320,6 @@ public class MediaController implements Player {
return this;
}
/**
* Sets the timeout after which updates from the platform session callbacks are applied to the
* browser, in milliseconds.
*
* <p>The default is 100ms.
*
* @param platformSessionCallbackAggregationTimeoutMs The timeout, in milliseconds.
* @return tThe builder to allow chaining.
*/
@UnstableApi
@CanIgnoreReturnValue
public Builder experimentalSetPlatformSessionCallbackAggregationTimeoutMs(
long platformSessionCallbackAggregationTimeoutMs) {
this.platformSessionCallbackAggregationTimeoutMs =
platformSessionCallbackAggregationTimeoutMs;
return this;
}
/**
* Builds a {@link MediaController} asynchronously.
*
@ -384,8 +361,7 @@ public class MediaController implements Player {
applicationLooper,
holder,
bitmapLoader,
maxCommandsForMediaItems,
platformSessionCallbackAggregationTimeoutMs);
maxCommandsForMediaItems);
postOrRun(new Handler(applicationLooper), () -> holder.setController(controller));
return holder;
}
@ -577,8 +553,7 @@ public class MediaController implements Player {
Looper applicationLooper,
ConnectionCallback connectionCallback,
@Nullable BitmapLoader bitmapLoader,
int maxCommandsForMediaItems,
long platformSessionCallbackAggregationTimeoutMs) {
int maxCommandsForMediaItems) {
checkNotNull(context, "context must not be null");
checkNotNull(token, "token must not be null");
Log.i(
@ -601,14 +576,7 @@ public class MediaController implements Player {
this.connectionCallback = connectionCallback;
this.maxCommandsForMediaItems = maxCommandsForMediaItems;
impl =
createImpl(
context,
token,
connectionHints,
applicationLooper,
bitmapLoader,
platformSessionCallbackAggregationTimeoutMs);
impl = createImpl(context, token, connectionHints, applicationLooper, bitmapLoader);
impl.connect();
}
@ -619,17 +587,10 @@ public class MediaController implements Player {
SessionToken token,
Bundle connectionHints,
Looper applicationLooper,
@Nullable BitmapLoader bitmapLoader,
long platformSessionCallbackAggregationTimeoutMs) {
@Nullable BitmapLoader bitmapLoader) {
if (token.isLegacySession()) {
return new MediaControllerImplLegacy(
context,
this,
token,
connectionHints,
applicationLooper,
checkNotNull(bitmapLoader),
platformSessionCallbackAggregationTimeoutMs);
context, this, token, connectionHints, applicationLooper, checkNotNull(bitmapLoader));
} else {
return new MediaControllerImplBase(context, this, token, connectionHints, applicationLooper);
}

View File

@ -93,6 +93,8 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
private static final String TAG = "MCImplLegacy";
private static final long AGGREGATES_CALLBACKS_WITHIN_TIMEOUT_MS = 500L;
/* package */ final Context context;
private final MediaController instance;
@ -102,7 +104,6 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
private final BitmapLoader bitmapLoader;
private final ImmutableList<CommandButton> commandButtonsForMediaItems;
private final Bundle connectionHints;
private final long platformSessionCallbackAggregationTimeoutMs;
@Nullable private MediaControllerCompat controllerCompat;
@Nullable private MediaBrowserCompat browserCompat;
@ -121,8 +122,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
SessionToken token,
Bundle connectionHints,
Looper applicationLooper,
BitmapLoader bitmapLoader,
long platformSessionCallbackAggregationTimeoutMs) {
BitmapLoader bitmapLoader) {
// Initialize default values.
legacyPlayerInfo = new LegacyPlayerInfo();
pendingLegacyPlayerInfo = new LegacyPlayerInfo();
@ -140,7 +140,6 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
this.token = token;
this.connectionHints = connectionHints;
this.bitmapLoader = bitmapLoader;
this.platformSessionCallbackAggregationTimeoutMs = platformSessionCallbackAggregationTimeoutMs;
currentPositionMs = C.TIME_UNSET;
lastSetPlayWhenReadyCalledTimeMs = C.TIME_UNSET;
// Always empty. Only supported for a MediaBrowser connected to a MediaBrowserServiceCompat.
@ -1993,7 +1992,7 @@ import org.checkerframework.checker.initialization.qual.UnderInitialization;
return;
}
pendingChangesHandler.sendEmptyMessageDelayed(
MSG_HANDLE_PENDING_UPDATES, platformSessionCallbackAggregationTimeoutMs);
MSG_HANDLE_PENDING_UPDATES, AGGREGATES_CALLBACKS_WITHIN_TIMEOUT_MS);
}
}

View File

@ -31,17 +31,6 @@ import com.google.common.collect.ImmutableList;
/** A notification for media playbacks. */
public final class MediaNotification {
/**
* Event key to indicate a media notification was dismissed.
*
* <p>This event key can be used as an extras key for a boolean extra on a media button pending
* intent, and as as custom session command action to inform the media notification controller
* that a notification was dismissed.
*/
@UnstableApi
public static final String NOTIFICATION_DISMISSED_EVENT_KEY =
"androidx.media3.session.NOTIFICATION_DISMISSED_EVENT_KEY";
/**
* Creates {@linkplain NotificationCompat.Action actions} and {@linkplain PendingIntent pending
* intents} for notifications.
@ -110,20 +99,10 @@ public final class MediaNotification {
* Creates a {@link PendingIntent} for a media action that will be handled by the library.
*
* @param mediaSession The media session to which the action will be sent.
* @param command The {@link PendingIntent}.
* @param command The intent's command.
*/
PendingIntent createMediaActionPendingIntent(
MediaSession mediaSession, @Player.Command long command);
/**
* Creates a {@link PendingIntent} triggered when the notification is dismissed.
*
* @param mediaSession The media session for which the intent is created.
* @return The {@link PendingIntent}.
*/
default PendingIntent createNotificationDismissalIntent(MediaSession mediaSession) {
return createMediaActionPendingIntent(mediaSession, Player.COMMAND_STOP);
}
}
/**

View File

@ -17,7 +17,6 @@ package androidx.media3.session;
import static android.app.Service.STOP_FOREGROUND_DETACH;
import static android.app.Service.STOP_FOREGROUND_REMOVE;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.annotation.SuppressLint;
@ -60,15 +59,14 @@ import java.util.concurrent.TimeoutException;
private static final int MSG_USER_ENGAGED_TIMEOUT = 1;
private final MediaSessionService mediaSessionService;
private final MediaNotification.Provider mediaNotificationProvider;
private final MediaNotification.ActionFactory actionFactory;
private final NotificationManagerCompat notificationManagerCompat;
private final Handler mainHandler;
private final Executor mainExecutor;
private final Intent startSelfIntent;
private final Map<MediaSession, ControllerInfo> controllerMap;
private final Map<MediaSession, ListenableFuture<MediaController>> controllerMap;
private MediaNotification.Provider mediaNotificationProvider;
private int totalNotificationCount;
@Nullable private MediaNotification mediaNotification;
private boolean startedInForeground;
@ -106,7 +104,7 @@ import java.util.concurrent.TimeoutException;
.setListener(listener)
.setApplicationLooper(Looper.getMainLooper())
.buildAsync();
controllerMap.put(session, new ControllerInfo(controllerFuture));
controllerMap.put(session, controllerFuture);
controllerFuture.addListener(
() -> {
try {
@ -125,9 +123,9 @@ import java.util.concurrent.TimeoutException;
}
public void removeSession(MediaSession session) {
@Nullable ControllerInfo controllerInfo = controllerMap.remove(session);
if (controllerInfo != null) {
MediaController.releaseFuture(controllerInfo.controllerFuture);
@Nullable ListenableFuture<MediaController> future = controllerMap.remove(session);
if (future != null) {
MediaController.releaseFuture(future);
}
}
@ -147,15 +145,6 @@ import java.util.concurrent.TimeoutException;
});
}
/**
* Updates the media notification provider.
*
* @param mediaNotificationProvider The {@link MediaNotification.Provider}.
*/
public void setMediaNotificationProvider(MediaNotification.Provider mediaNotificationProvider) {
this.mediaNotificationProvider = mediaNotificationProvider;
}
/**
* Updates the notification.
*
@ -169,8 +158,19 @@ import java.util.concurrent.TimeoutException;
}
int notificationSequence = ++totalNotificationCount;
MediaController mediaNotificationController = null;
ListenableFuture<MediaController> controller = controllerMap.get(session);
if (controller != null && controller.isDone()) {
try {
mediaNotificationController = Futures.getDone(controller);
} catch (ExecutionException e) {
// Ignore.
}
}
ImmutableList<CommandButton> mediaButtonPreferences =
checkNotNull(getConnectedControllerForSession(session)).getMediaButtonPreferences();
mediaNotificationController != null
? mediaNotificationController.getMediaButtonPreferences()
: ImmutableList.of();
MediaNotification.Provider.Callback callback =
notification ->
mainExecutor.execute(
@ -261,13 +261,6 @@ import java.util.concurrent.TimeoutException;
}
}
private void onNotificationDismissed(MediaSession session) {
@Nullable ControllerInfo controllerInfo = controllerMap.get(session);
if (controllerInfo != null) {
controllerInfo.wasNotificationDismissed = true;
}
}
// POST_NOTIFICATIONS permission is not required for media session related notifications.
// https://developer.android.com/develop/ui/views/notifications/notification-permission#exemptions-media-sessions
@SuppressLint("MissingPermission")
@ -308,25 +301,17 @@ import java.util.concurrent.TimeoutException;
private boolean shouldShowNotification(MediaSession session) {
MediaController controller = getConnectedControllerForSession(session);
if (controller == null || controller.getCurrentTimeline().isEmpty()) {
return false;
}
ControllerInfo controllerInfo = checkNotNull(controllerMap.get(session));
if (controller.getPlaybackState() != Player.STATE_IDLE) {
// Playback restarted, reset previous notification dismissed flag.
controllerInfo.wasNotificationDismissed = false;
}
return !controllerInfo.wasNotificationDismissed;
return controller != null && !controller.getCurrentTimeline().isEmpty();
}
@Nullable
private MediaController getConnectedControllerForSession(MediaSession session) {
@Nullable ControllerInfo controllerInfo = controllerMap.get(session);
if (controllerInfo == null || !controllerInfo.controllerFuture.isDone()) {
ListenableFuture<MediaController> controller = controllerMap.get(session);
if (controller == null || !controller.isDone()) {
return null;
}
try {
return Futures.getDone(controllerInfo.controllerFuture);
return Futures.getDone(controller);
} catch (ExecutionException exception) {
// We should never reach this.
throw new IllegalStateException(exception);
@ -365,7 +350,8 @@ import java.util.concurrent.TimeoutException;
}
}
private final class MediaControllerListener implements MediaController.Listener, Player.Listener {
private static final class MediaControllerListener
implements MediaController.Listener, Player.Listener {
private final MediaSessionService mediaSessionService;
private final MediaSession session;
@ -395,17 +381,6 @@ import java.util.concurrent.TimeoutException;
session, /* startInForegroundWhenPaused= */ false);
}
@Override
public ListenableFuture<SessionResult> onCustomCommand(
MediaController controller, SessionCommand command, Bundle args) {
@SessionResult.Code int resultCode = SessionError.ERROR_NOT_SUPPORTED;
if (command.customAction.equals(MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY)) {
onNotificationDismissed(session);
resultCode = SessionResult.RESULT_SUCCESS;
}
return Futures.immediateFuture(new SessionResult(resultCode));
}
@Override
public void onDisconnected(MediaController controller) {
if (mediaSessionService.isSessionAdded(session)) {
@ -452,18 +427,6 @@ import java.util.concurrent.TimeoutException;
startedInForeground = false;
}
private static final class ControllerInfo {
public final ListenableFuture<MediaController> controllerFuture;
/** Indicates whether the user actively dismissed the notification. */
public boolean wasNotificationDismissed;
public ControllerInfo(ListenableFuture<MediaController> controllerFuture) {
this.controllerFuture = controllerFuture;
}
}
@RequiresApi(24)
private static class Api24 {

View File

@ -1322,14 +1322,10 @@ import org.checkerframework.checker.initialization.qual.Initialized;
return false;
}
// Send from media notification controller.
boolean isDismissNotificationEvent =
intent.getBooleanExtra(
MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY, /* defaultValue= */ false);
return applyMediaButtonKeyEvent(keyEvent, doubleTapCompleted, isDismissNotificationEvent);
return applyMediaButtonKeyEvent(keyEvent, doubleTapCompleted);
}
private boolean applyMediaButtonKeyEvent(
KeyEvent keyEvent, boolean doubleTapCompleted, boolean isDismissNotificationEvent) {
private boolean applyMediaButtonKeyEvent(KeyEvent keyEvent, boolean doubleTapCompleted) {
ControllerInfo controllerInfo = checkNotNull(instance.getMediaNotificationControllerInfo());
Runnable command;
int keyCode = keyEvent.getKeyCode();
@ -1379,15 +1375,6 @@ import org.checkerframework.checker.initialization.qual.Initialized;
postOrRun(
getApplicationHandler(),
() -> {
if (isDismissNotificationEvent) {
ListenableFuture<SessionResult> ignored =
sendCustomCommand(
controllerInfo,
new SessionCommand(
MediaNotification.NOTIFICATION_DISMISSED_EVENT_KEY,
/* extras= */ Bundle.EMPTY),
/* args= */ Bundle.EMPTY);
}
command.run();
sessionStub.getConnectedControllersManager().flushCommandQueue(controllerInfo);
});
@ -1915,10 +1902,7 @@ import org.checkerframework.checker.initialization.qual.Initialized;
playPauseTask =
() -> {
if (isMediaNotificationController(controllerInfo)) {
applyMediaButtonKeyEvent(
keyEvent,
/* doubleTapCompleted= */ false,
/* isDismissNotificationEvent= */ false);
applyMediaButtonKeyEvent(keyEvent, /* doubleTapCompleted= */ false);
} else {
sessionLegacyStub.handleMediaPlayPauseOnHandler(
checkNotNull(controllerInfo.getRemoteUserInfo()));

View File

@ -180,6 +180,9 @@ public abstract class MediaSessionService extends Service {
@GuardedBy("lock")
private @MonotonicNonNull MediaNotificationManager mediaNotificationManager;
@GuardedBy("lock")
private MediaNotification.@MonotonicNonNull Provider mediaNotificationProvider;
@GuardedBy("lock")
private @MonotonicNonNull DefaultActionFactory actionFactory;
@ -634,6 +637,8 @@ public abstract class MediaSessionService extends Service {
/**
* Sets the {@link MediaNotification.Provider} to customize notifications.
*
* <p>This should be called before {@link #onCreate()} returns.
*
* <p>This method can be called from any thread.
*/
@UnstableApi
@ -641,8 +646,7 @@ public abstract class MediaSessionService extends Service {
MediaNotification.Provider mediaNotificationProvider) {
checkNotNull(mediaNotificationProvider);
synchronized (lock) {
getMediaNotificationManager(/* initialMediaNotificationProvider= */ mediaNotificationProvider)
.setMediaNotificationProvider(mediaNotificationProvider);
this.mediaNotificationProvider = mediaNotificationProvider;
}
}
@ -675,23 +679,16 @@ public abstract class MediaSessionService extends Service {
}
private MediaNotificationManager getMediaNotificationManager() {
return getMediaNotificationManager(/* initialMediaNotificationProvider= */ null);
}
private MediaNotificationManager getMediaNotificationManager(
@Nullable MediaNotification.Provider initialMediaNotificationProvider) {
synchronized (lock) {
if (mediaNotificationManager == null) {
if (initialMediaNotificationProvider == null) {
if (mediaNotificationProvider == null) {
checkStateNotNull(getBaseContext(), "Accessing service context before onCreate()");
initialMediaNotificationProvider =
mediaNotificationProvider =
new DefaultMediaNotificationProvider.Builder(getApplicationContext()).build();
}
mediaNotificationManager =
new MediaNotificationManager(
/* mediaSessionService= */ this,
initialMediaNotificationProvider,
getActionFactory());
/* mediaSessionService= */ this, mediaNotificationProvider, getActionFactory());
}
return mediaNotificationManager;
}

View File

@ -710,9 +710,7 @@ import java.util.List;
if (isCommandAvailable(COMMAND_GET_TIMELINE)) {
return getCurrentTimeline();
} else if (isCommandAvailable(COMMAND_GET_CURRENT_MEDIA_ITEM)) {
return getCurrentTimeline().isEmpty()
? Timeline.EMPTY
: new CurrentMediaItemOnlyTimeline(this);
return new CurrentMediaItemOnlyTimeline(this);
}
return Timeline.EMPTY;
}

View File

@ -317,8 +317,7 @@ public final class MediaMetadataCompat implements Parcelable {
METADATA_KEY_ALBUM_ARTIST,
METADATA_KEY_WRITER,
METADATA_KEY_AUTHOR,
METADATA_KEY_COMPOSER,
METADATA_KEY_DISPLAY_SUBTITLE
METADATA_KEY_COMPOSER
};
private static final @BitmapKey String[] PREFERRED_BITMAP_ORDER = {

View File

@ -267,35 +267,74 @@ public final class LegacyConversionsTest {
}
@Test
public void
convertToMediaDescriptionCompat_withoutDisplayTitleWithSubtitle_subtitleUsedAsSubtitle() {
MediaMetadata metadata =
new MediaMetadata.Builder().setTitle("a_title").setSubtitle("a_subtitle").build();
MediaItem mediaItem =
new MediaItem.Builder().setMediaId("testId").setMediaMetadata(metadata).build();
MediaDescriptionCompat descriptionCompat =
LegacyConversions.convertToMediaDescriptionCompat(mediaItem, /* artworkBitmap= */ null);
assertThat(descriptionCompat.getTitle().toString()).isEqualTo("a_title");
assertThat(descriptionCompat.getSubtitle().toString()).isEqualTo("a_subtitle");
}
@Test
public void convertToMediaDescriptionCompat_withDisplayTitleAndSubtitle_subtitleUsedAsSubtitle() {
MediaMetadata metadata =
public void convertToMediaMetadataCompat_displayTitleAndTitleHandledCorrectly() {
MediaMetadata mediaMetadataWithTitleOnly =
new MediaMetadata.Builder()
.setDisplayTitle("a_display_title")
.setSubtitle("a_subtitle")
.setTitle("title")
.setSubtitle("subtitle")
.setDescription("description")
.setArtist("artist")
.setAlbumArtist("albumArtist")
.build();
MediaMetadata mediaMetadataWithDisplayTitleOnly =
new MediaMetadata.Builder()
.setDisplayTitle("displayTitle")
.setSubtitle("subtitle")
.setDescription("description")
.setArtist("artist")
.setAlbumArtist("albumArtist")
.build();
MediaMetadata mediaMetadataWithDisplayTitleAndTitle =
new MediaMetadata.Builder()
.setTitle("title")
.setDisplayTitle("displayTitle")
.setSubtitle("subtitle")
.setDescription("description")
.setArtist("artist")
.setAlbumArtist("albumArtist")
.build();
MediaItem mediaItem =
new MediaItem.Builder().setMediaId("testId").setMediaMetadata(metadata).build();
MediaDescriptionCompat descriptionCompat =
LegacyConversions.convertToMediaDescriptionCompat(mediaItem, /* artworkBitmap= */ null);
MediaDescriptionCompat mediaDescriptionCompatFromDisplayTitleAndTitle =
LegacyConversions.convertToMediaMetadataCompat(
mediaMetadataWithDisplayTitleAndTitle,
"mediaId",
/* mediaUri= */ null,
/* durationMs= */ 10_000L,
/* artworkBitmap= */ null)
.getDescription();
MediaDescriptionCompat mediaDescriptionCompatFromDisplayTitleOnly =
LegacyConversions.convertToMediaMetadataCompat(
mediaMetadataWithDisplayTitleOnly,
"mediaId",
/* mediaUri= */ null,
/* durationMs= */ 10_000L,
/* artworkBitmap= */ null)
.getDescription();
MediaDescriptionCompat mediaDescriptionCompatFromTitleOnly =
LegacyConversions.convertToMediaMetadataCompat(
mediaMetadataWithTitleOnly,
"mediaId",
/* mediaUri= */ null,
/* durationMs= */ 10_000L,
/* artworkBitmap= */ null)
.getDescription();
assertThat(descriptionCompat.getTitle().toString()).isEqualTo("a_display_title");
assertThat(descriptionCompat.getSubtitle().toString()).isEqualTo("a_subtitle");
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getTitle().toString())
.isEqualTo("displayTitle");
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getSubtitle().toString())
.isEqualTo("subtitle");
assertThat(mediaDescriptionCompatFromDisplayTitleAndTitle.getDescription().toString())
.isEqualTo("description");
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getTitle().toString())
.isEqualTo("displayTitle");
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getSubtitle().toString())
.isEqualTo("subtitle");
assertThat(mediaDescriptionCompatFromDisplayTitleOnly.getDescription().toString())
.isEqualTo("description");
assertThat(mediaDescriptionCompatFromTitleOnly.getTitle().toString()).isEqualTo("title");
assertThat(mediaDescriptionCompatFromTitleOnly.getSubtitle().toString()).isEqualTo("artist");
assertThat(mediaDescriptionCompatFromTitleOnly.getDescription().toString())
.isEqualTo("albumArtist");
}
@Test

View File

@ -504,36 +504,6 @@ public class MediaSessionServiceTest {
serviceController.destroy();
}
@Test
public void setMediaNotificationProvider_afterSetForegroundServiceTimeoutMs_usesCustomProvider()
throws TimeoutException {
Context context = ApplicationProvider.getApplicationContext();
ExoPlayer player = new TestExoPlayerBuilder(context).build();
MediaSession session = new MediaSession.Builder(context, player).build();
ServiceController<TestService> serviceController = Robolectric.buildService(TestService.class);
TestService service = serviceController.create().get();
service.setForegroundServiceTimeoutMs(100);
service.setMediaNotificationProvider(
new DefaultMediaNotificationProvider(
service,
/* notificationIdProvider= */ mediaSession -> 2000,
DefaultMediaNotificationProvider.DEFAULT_CHANNEL_ID,
DefaultMediaNotificationProvider.DEFAULT_CHANNEL_NAME_RESOURCE_ID));
service.addSession(session);
// Start a player to trigger notification creation.
player.setMediaItem(MediaItem.fromUri("asset:///media/mp4/sample.mp4"));
player.prepare();
player.play();
runMainLooperUntil(() -> notificationManager.getActiveNotifications().length == 1);
assertThat(getStatusBarNotification(/* notificationId= */ 2000)).isNotNull();
session.release();
player.release();
serviceController.destroy();
}
@Test
public void onStartCommand_mediaButtonEvent_pausedByMediaNotificationController()
throws InterruptedException {

View File

@ -22,8 +22,6 @@ import static org.mockito.Mockito.when;
import android.os.Bundle;
import android.os.Looper;
import androidx.media3.common.Player;
import androidx.media3.common.Timeline;
import androidx.media3.test.utils.FakeTimeline;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.junit.Before;
@ -59,52 +57,6 @@ public class PlayerWrapperTest {
when(player.getApplicationLooper()).thenReturn(Looper.myLooper());
}
@Test
public void
getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineAndGetCurrentMediaItem_isEmpty() {
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(false);
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
assertThat(currentTimeline.isEmpty()).isTrue();
}
@Test
public void getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineWhenEmpty_isEmpty() {
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(true);
when(player.getCurrentTimeline()).thenReturn(Timeline.EMPTY);
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
assertThat(currentTimeline.isEmpty()).isTrue();
}
@Test
public void
getCurrentTimelineWithCommandCheck_withoutCommandGetTimelineWhenMultipleItems_hasSingleItemTimeline() {
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(false);
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(true);
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
assertThat(currentTimeline.getWindowCount()).isEqualTo(1);
}
@Test
public void getCurrentTimelineWithCommandCheck_withCommandGetTimeline_returnOriginalTimeline() {
when(player.isCommandAvailable(Player.COMMAND_GET_TIMELINE)).thenReturn(true);
when(player.isCommandAvailable(Player.COMMAND_GET_CURRENT_MEDIA_ITEM)).thenReturn(false);
when(player.getCurrentTimeline()).thenReturn(new FakeTimeline(/* windowCount= */ 3));
Timeline currentTimeline = playerWrapper.getCurrentTimelineWithCommandCheck();
assertThat(currentTimeline.getWindowCount()).isEqualTo(3);
}
@Test
public void createSessionPositionInfoForBundling() {
int testAdGroupIndex = 12;

View File

@ -11,7 +11,6 @@ track 0:
sample count = 58
track duration = 2100700
format 0:
averageBitrate = 12593505
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -270,7 +269,6 @@ track 1:
sample count = 1
track duration = 466800
format 0:
averageBitrate = 2593047
id = 2
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -301,7 +299,6 @@ track 2:
sample count = 58
track duration = 2100700
format 0:
averageBitrate = 99395
id = 3
containerMimeType = video/mp4
sampleMimeType = application/microvideo-meta-stream
@ -544,7 +541,6 @@ track 3:
sample count = 1
track duration = 1133700
format 0:
averageBitrate = 416
id = 4
containerMimeType = video/mp4
sampleMimeType = application/motionphoto-image-meta

View File

@ -11,7 +11,6 @@ track 0:
sample count = 44
track duration = 2100700
format 0:
averageBitrate = 12593505
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -214,7 +213,6 @@ track 1:
sample count = 1
track duration = 466800
format 0:
averageBitrate = 2593047
id = 2
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -245,7 +243,6 @@ track 2:
sample count = 38
track duration = 2100700
format 0:
averageBitrate = 99395
id = 3
containerMimeType = video/mp4
sampleMimeType = application/microvideo-meta-stream
@ -408,7 +405,6 @@ track 3:
sample count = 1
track duration = 1133700
format 0:
averageBitrate = 416
id = 4
containerMimeType = video/mp4
sampleMimeType = application/motionphoto-image-meta

View File

@ -11,7 +11,6 @@ track 0:
sample count = 23
track duration = 2100700
format 0:
averageBitrate = 12593505
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -130,7 +129,6 @@ track 1:
sample count = 1
track duration = 466800
format 0:
averageBitrate = 2593047
id = 2
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -161,7 +159,6 @@ track 2:
sample count = 17
track duration = 2100700
format 0:
averageBitrate = 99395
id = 3
containerMimeType = video/mp4
sampleMimeType = application/microvideo-meta-stream
@ -240,7 +237,6 @@ track 3:
sample count = 1
track duration = 1133700
format 0:
averageBitrate = 416
id = 4
containerMimeType = video/mp4
sampleMimeType = application/motionphoto-image-meta

View File

@ -11,7 +11,6 @@ track 0:
sample count = 2
track duration = 2100700
format 0:
averageBitrate = 12593505
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -46,7 +45,6 @@ track 1:
sample count = 1
track duration = 466800
format 0:
averageBitrate = 2593047
id = 2
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -77,7 +75,6 @@ track 2:
sample count = 1
track duration = 2100700
format 0:
averageBitrate = 99395
id = 3
containerMimeType = video/mp4
sampleMimeType = application/microvideo-meta-stream
@ -92,7 +89,6 @@ track 3:
sample count = 1
track duration = 1133700
format 0:
averageBitrate = 416
id = 4
containerMimeType = video/mp4
sampleMimeType = application/motionphoto-image-meta

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 867000
format 0:
averageBitrate = 35692
id = 1
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -26,7 +25,6 @@ track 0:
lumaBitdepth = 8
chromaBitdepth = 8
roleFlags = [main]
language = en
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 32, hash 1F3D6E87

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 867000
format 0:
averageBitrate = 35692
id = 1
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -26,7 +25,6 @@ track 0:
lumaBitdepth = 8
chromaBitdepth = 8
roleFlags = [main]
language = en
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 32, hash 1F3D6E87

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 867000
format 0:
averageBitrate = 35692
id = 1
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -26,7 +25,6 @@ track 0:
lumaBitdepth = 8
chromaBitdepth = 8
roleFlags = [main]
language = en
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 32, hash 1F3D6E87

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 867000
format 0:
averageBitrate = 35692
id = 1
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -26,7 +25,6 @@ track 0:
lumaBitdepth = 8
chromaBitdepth = 8
roleFlags = [main]
language = en
metadata = entries=[TSSE: description=null: values=[Lavf58.42.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 32, hash 1F3D6E87

View File

@ -11,7 +11,6 @@ track 0:
sample count = 43
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -201,7 +200,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -217,7 +215,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 31
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -153,7 +152,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -169,7 +167,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 16
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -93,7 +92,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -109,7 +107,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -33,7 +32,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -49,7 +47,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 43
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -201,7 +200,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -217,7 +215,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 31
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -153,7 +152,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -169,7 +167,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 16
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -93,7 +92,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -109,7 +107,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 1
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -33,7 +32,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -49,7 +47,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 43
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -201,7 +200,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -217,7 +215,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -11,7 +11,6 @@ track 0:
sample count = 43
track duration = 1019300
format 0:
averageBitrate = 74502
peakBitrate = 200000
id = 1
containerMimeType = video/mp4
@ -21,7 +20,7 @@ track 0:
channelCount = 1
sampleRate = 44100
language = und
metadata = entries=[Mp4AlternateGroup: 1, TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 2, hash 5F7
sample 0:
@ -201,7 +200,6 @@ track 1:
sample count = 30
track duration = 1020100
format 0:
averageBitrate = 1026770
id = 2
containerMimeType = video/mp4
sampleMimeType = video/avc
@ -217,7 +215,6 @@ track 1:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf56.1.0], Mp4Timestamp: creation time=3790610215, modification time=3790610215, timescale=30000]
initializationData:
data = length 19, hash D3863A4C

View File

@ -20,7 +20,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 27, hash 9F13E633
data = length 8, hash 94643657

View File

@ -20,7 +20,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 27, hash 9F13E633
data = length 8, hash 94643657

View File

@ -20,7 +20,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 27, hash 9F13E633
data = length 8, hash 94643657

View File

@ -20,7 +20,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 27, hash 9F13E633
data = length 8, hash 94643657

View File

@ -21,7 +21,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 2426, hash 25737613
sample 0:

View File

@ -21,7 +21,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 2426, hash 25737613
sample 0:

View File

@ -21,7 +21,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 2426, hash 25737613
sample 0:

View File

@ -21,7 +21,6 @@ track 0:
colorTransfer = 3
lumaBitdepth = 8
chromaBitdepth = 8
language = und
initializationData:
data = length 2426, hash 25737613
sample 0:

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

View File

@ -11,8 +11,6 @@ track 0:
sample count = 30
track duration = 1000000
format 0:
averageBitrate = 155160
peakBitrate = 155160
id = 1
containerMimeType = video/mp4
sampleMimeType = video/hevc
@ -27,7 +25,6 @@ track 0:
colorRange = 2
lumaBitdepth = 8
chromaBitdepth = 8
language = und
metadata = entries=[TSSE: description=null: values=[Lavf60.16.100], Mp4Timestamp: creation time=0, modification time=0, timescale=1000]
initializationData:
data = length 84, hash 6FF5034A

Some files were not shown because too many files have changed in this diff Show More