Compare commits

..

No commits in common. "dae5ebb820571e8c1886df11d99c15e911eb20ee" and "4189a7c8bd5cdea8341a086380bab75380e2b64a" have entirely different histories.

10 changed files with 287 additions and 452 deletions

View File

@ -171,6 +171,7 @@ public abstract class DecoderAudioRenderer<
private long largestQueuedPresentationTimeUs;
private long lastBufferInStreamPresentationTimeUs;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
public DecoderAudioRenderer() {
this(/* eventHandler= */ null, /* eventListener= */ null);
@ -249,13 +250,17 @@ public abstract class DecoderAudioRenderer<
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported.
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
bufferedDurationUs =
(long)
(bufferedDurationUs
@ -314,6 +319,7 @@ public abstract class DecoderAudioRenderer<
try {
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
@ -595,6 +601,7 @@ public abstract class DecoderAudioRenderer<
outputStreamEnded = true;
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
}
private void flushDecoder() throws ExoPlaybackException {
@ -670,6 +677,7 @@ public abstract class DecoderAudioRenderer<
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
@ -699,6 +707,7 @@ public abstract class DecoderAudioRenderer<
setOutputStreamOffsetUs(C.TIME_UNSET);
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
setSourceDrmSession(null);
releaseDecoder();

View File

@ -129,6 +129,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private int rendererPriority;
private boolean isStarted;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
/**
* @param context A context.
@ -527,14 +528,18 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported.
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
bufferedDurationUs =
(long)
(bufferedDurationUs
@ -692,6 +697,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
}
@ -716,6 +722,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSinkNeedsReset = true;
inputFormat = null;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
audioSink.flush();
} finally {
@ -731,6 +738,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onReset() {
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
super.onReset();
} finally {
@ -870,6 +878,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
}
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e,

View File

@ -25,14 +25,17 @@ import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.NullableType;
import androidx.media3.exoplayer.LoadingInfo;
import androidx.media3.exoplayer.SeekParameters;
import androidx.media3.exoplayer.source.chunk.Chunk;
import androidx.media3.exoplayer.source.chunk.MediaChunk;
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
import androidx.media3.exoplayer.trackselection.ForwardingTrackSelection;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
/** Merges multiple {@link MediaPeriod}s. */
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
@ -137,8 +140,7 @@ import java.util.IdentityHashMap;
TrackGroup mergedTrackGroup = mergedTrackSelection.getTrackGroup();
TrackGroup childTrackGroup =
checkNotNull(childTrackGroupByMergedTrackGroup.get(mergedTrackGroup));
childSelections[j] =
new MergingMediaPeriodTrackSelection(mergedTrackSelection, childTrackGroup);
childSelections[j] = new ForwardingTrackSelection(mergedTrackSelection, childTrackGroup);
} else {
childSelections[j] = null;
}
@ -311,47 +313,162 @@ import java.util.IdentityHashMap;
Assertions.checkNotNull(callback).onContinueLoadingRequested(this);
}
private static final class MergingMediaPeriodTrackSelection extends ForwardingTrackSelection {
private static final class ForwardingTrackSelection implements ExoTrackSelection {
private final ExoTrackSelection trackSelection;
private final TrackGroup trackGroup;
public MergingMediaPeriodTrackSelection(
ExoTrackSelection trackSelection, TrackGroup trackGroup) {
super(trackSelection);
public ForwardingTrackSelection(ExoTrackSelection trackSelection, TrackGroup trackGroup) {
this.trackSelection = trackSelection;
this.trackGroup = trackGroup;
}
@Override
public @Type int getType() {
return trackSelection.getType();
}
@Override
public TrackGroup getTrackGroup() {
return trackGroup;
}
@Override
public int length() {
return trackSelection.length();
}
@Override
public Format getFormat(int index) {
return trackGroup.getFormat(getWrappedInstance().getIndexInTrackGroup(index));
return trackGroup.getFormat(trackSelection.getIndexInTrackGroup(index));
}
@Override
public int getIndexInTrackGroup(int index) {
return trackSelection.getIndexInTrackGroup(index);
}
@Override
public int indexOf(Format format) {
return getWrappedInstance().indexOf(trackGroup.indexOf(format));
return trackSelection.indexOf(trackGroup.indexOf(format));
}
@Override
public int indexOf(int indexInTrackGroup) {
return trackSelection.indexOf(indexInTrackGroup);
}
@Override
public void enable() {
trackSelection.enable();
}
@Override
public void disable() {
trackSelection.disable();
}
@Override
public Format getSelectedFormat() {
return trackGroup.getFormat(getWrappedInstance().getSelectedIndexInTrackGroup());
return trackGroup.getFormat(trackSelection.getSelectedIndexInTrackGroup());
}
@Override
public boolean equals(@Nullable Object other) {
if (!super.equals(other) || !(other instanceof MergingMediaPeriodTrackSelection)) {
public int getSelectedIndexInTrackGroup() {
return trackSelection.getSelectedIndexInTrackGroup();
}
@Override
public int getSelectedIndex() {
return trackSelection.getSelectedIndex();
}
@Override
public @C.SelectionReason int getSelectionReason() {
return trackSelection.getSelectionReason();
}
@Nullable
@Override
public Object getSelectionData() {
return trackSelection.getSelectionData();
}
@Override
public void onPlaybackSpeed(float playbackSpeed) {
trackSelection.onPlaybackSpeed(playbackSpeed);
}
@Override
public void onDiscontinuity() {
trackSelection.onDiscontinuity();
}
@Override
public void onRebuffer() {
trackSelection.onRebuffer();
}
@Override
public void onPlayWhenReadyChanged(boolean playWhenReady) {
trackSelection.onPlayWhenReadyChanged(playWhenReady);
}
@Override
public void updateSelectedTrack(
long playbackPositionUs,
long bufferedDurationUs,
long availableDurationUs,
List<? extends MediaChunk> queue,
MediaChunkIterator[] mediaChunkIterators) {
trackSelection.updateSelectedTrack(
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
}
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
}
@Override
public boolean shouldCancelChunkLoad(
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
}
@Override
public boolean excludeTrack(int index, long exclusionDurationMs) {
return trackSelection.excludeTrack(index, exclusionDurationMs);
}
@Override
public boolean isTrackExcluded(int index, long nowMs) {
return trackSelection.isTrackExcluded(index, nowMs);
}
@Override
public long getLatestBitrateEstimate() {
return trackSelection.getLatestBitrateEstimate();
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ForwardingTrackSelection)) {
return false;
}
MergingMediaPeriodTrackSelection that = (MergingMediaPeriodTrackSelection) other;
return trackGroup.equals(that.trackGroup);
ForwardingTrackSelection that = (ForwardingTrackSelection) o;
return trackSelection.equals(that.trackSelection) && trackGroup.equals(that.trackGroup);
}
@Override
public int hashCode() {
return 31 * super.hashCode() + trackGroup.hashCode();
int result = 17;
result = 31 * result + trackGroup.hashCode();
result = 31 * result + trackSelection.hashCode();
return result;
}
}
}

View File

@ -1,191 +0,0 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer.trackselection;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.TrackGroup;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.source.chunk.Chunk;
import androidx.media3.exoplayer.source.chunk.MediaChunk;
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
import java.util.List;
/** An {@link ExoTrackSelection} forwarding all calls to a wrapped instance. */
@UnstableApi
public class ForwardingTrackSelection implements ExoTrackSelection {
private final ExoTrackSelection trackSelection;
/**
* Creates the forwarding track selection.
*
* @param trackSelection The wrapped {@link ExoTrackSelection}.
*/
public ForwardingTrackSelection(ExoTrackSelection trackSelection) {
this.trackSelection = trackSelection;
}
/** Returns the wrapped {@link ExoTrackSelection}. */
public ExoTrackSelection getWrappedInstance() {
return trackSelection;
}
@Override
public void enable() {
trackSelection.enable();
}
@Override
public void disable() {
trackSelection.disable();
}
@Override
public Format getSelectedFormat() {
return trackSelection.getSelectedFormat();
}
@Override
public int getSelectedIndexInTrackGroup() {
return trackSelection.getSelectedIndexInTrackGroup();
}
@Override
public int getSelectedIndex() {
return trackSelection.getSelectedIndex();
}
@Override
public @C.SelectionReason int getSelectionReason() {
return trackSelection.getSelectionReason();
}
@Nullable
@Override
public Object getSelectionData() {
return trackSelection.getSelectionData();
}
@Override
public void onPlaybackSpeed(float playbackSpeed) {
trackSelection.onPlaybackSpeed(playbackSpeed);
}
@Override
public void onDiscontinuity() {
trackSelection.onDiscontinuity();
}
@Override
public void onRebuffer() {
trackSelection.onRebuffer();
}
@Override
public void onPlayWhenReadyChanged(boolean playWhenReady) {
trackSelection.onPlayWhenReadyChanged(playWhenReady);
}
@Override
public void updateSelectedTrack(
long playbackPositionUs,
long bufferedDurationUs,
long availableDurationUs,
List<? extends MediaChunk> queue,
MediaChunkIterator[] mediaChunkIterators) {
trackSelection.updateSelectedTrack(
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
}
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
}
@Override
public boolean shouldCancelChunkLoad(
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
}
@Override
public boolean excludeTrack(int index, long exclusionDurationMs) {
return trackSelection.excludeTrack(index, exclusionDurationMs);
}
@Override
public boolean isTrackExcluded(int index, long nowMs) {
return trackSelection.isTrackExcluded(index, nowMs);
}
@Override
public long getLatestBitrateEstimate() {
return trackSelection.getLatestBitrateEstimate();
}
@Override
public @Type int getType() {
return trackSelection.getType();
}
@Override
public TrackGroup getTrackGroup() {
return trackSelection.getTrackGroup();
}
@Override
public int length() {
return trackSelection.length();
}
@Override
public Format getFormat(int index) {
return trackSelection.getFormat(index);
}
@Override
public int getIndexInTrackGroup(int index) {
return trackSelection.getIndexInTrackGroup(index);
}
@Override
public int indexOf(Format format) {
return trackSelection.indexOf(format);
}
@Override
public int indexOf(int indexInTrackGroup) {
return trackSelection.indexOf(indexInTrackGroup);
}
@Override
public int hashCode() {
return trackSelection.hashCode();
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ForwardingTrackSelection)) {
return false;
}
ForwardingTrackSelection other = (ForwardingTrackSelection) obj;
return trackSelection.equals(other.trackSelection);
}
}

View File

@ -2473,22 +2473,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
}
}
@Override
protected boolean flushOrReleaseCodec() {
MediaCodecInfo codecInfo = getCodecInfo();
if (videoSink != null
&& codecInfo != null
&& (codecInfo.name.equals("c2.mtk.avc.decoder")
|| codecInfo.name.equals("c2.mtk.hevc.decoder"))) {
// Flushing a c2.mtk decoder that outputs to a SurfaceTexture often fails and leaves
// the SurfaceTexture's BufferQueue in an unrecoverable state. Release the codec instead.
// See b/362904942 for more details.
releaseCodec();
return true;
}
return super.flushOrReleaseCodec();
}
/**
* Returns whether the device is known to do post processing by default that isn't compatible with
* ExoPlayer.

View File

@ -512,6 +512,65 @@ public class DecoderAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
ForwardingAudioSink forwardingAudioSink =
new ForwardingAudioSink(mockAudioSink) {
@Override
public void playToEndOfStream() throws WriteException {
super.playToEndOfStream();
hasCalledPlayToEndOfStream.set(true);
}
};
audioRenderer = createAudioRenderer(forwardingAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
audioRenderer.setCurrentStreamFinal();
while (!hasCalledPlayToEndOfStream.get()) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);

View File

@ -1026,6 +1026,78 @@ public class MediaCodecAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink) {
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
super.renderToEndOfStream();
hasCalledRenderToEndOfStream.set(true);
}
};
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!hasCalledRenderToEndOfStream.get()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);

View File

@ -1,38 +0,0 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer.trackselection;
import static androidx.media3.test.utils.TestUtil.assertForwardingClassForwardsAllMethods;
import static androidx.media3.test.utils.TestUtil.assertSubclassOverridesAllMethods;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link ForwardingTrackSelection}. */
@RunWith(AndroidJUnit4.class)
public class ForwardingTrackSelectionTest {
@Test
public void overridesAllMethods() throws NoSuchMethodException {
assertSubclassOverridesAllMethods(ExoTrackSelection.class, ForwardingTrackSelection.class);
}
@Test
public void forwardsAllMethods() throws Exception {
assertForwardingClassForwardsAllMethods(ExoTrackSelection.class, ForwardingTrackSelection::new);
}
}

View File

@ -46,7 +46,6 @@ import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** A default implementation of {@link Codec.EncoderFactory}. */
@ -396,33 +395,6 @@ public final class DefaultEncoderFactory implements Codec.EncoderFactory {
}
}
int maxBFrames = supportedVideoEncoderSettings.maxBFrames;
if (SDK_INT >= 29 && maxBFrames != VideoEncoderSettings.NO_VALUE) {
mediaFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames);
}
int numNonBidirectionalTemporalLayers =
supportedVideoEncoderSettings.numNonBidirectionalTemporalLayers;
int numBidirectionalTemporalLayers =
supportedVideoEncoderSettings.numBidirectionalTemporalLayers;
if (SDK_INT >= 25 && numNonBidirectionalTemporalLayers >= 0) {
String temporalSchema;
if (numNonBidirectionalTemporalLayers == 0) {
temporalSchema = "none";
} else if (numBidirectionalTemporalLayers > 0) {
temporalSchema =
String.format(
Locale.ROOT,
"android.generic.%d+%d",
numNonBidirectionalTemporalLayers,
numBidirectionalTemporalLayers);
} else {
temporalSchema =
String.format(Locale.ROOT, "android.generic.%d", numNonBidirectionalTemporalLayers);
}
mediaFormat.setString(MediaFormat.KEY_TEMPORAL_LAYERING, temporalSchema);
}
return new DefaultCodec(
context,
encoderSupportedFormat,

View File

@ -337,164 +337,6 @@ public class DefaultEncoderFactoryTest {
.isFalse();
}
@Test
@Config(sdk = 29)
public void createForVideoEncoding_withMaxBFrames_configuresEncoderWithMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().getInteger(MediaFormat.KEY_MAX_B_FRAMES))
.isEqualTo(3);
}
@Test
@Config(sdk = 23)
public void createForVideoEncoding_withMaxBFramesOnApi23_doesNotConfigureMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
.isFalse();
}
@Test
@Config(sdk = 29)
public void createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
.isFalse();
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithZeroLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 0, /* numBidirectionalLayers= */ 0)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("none");
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithoutBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 0)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("android.generic.1");
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("android.generic.1+2");
}
@Config(sdk = 23)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaOnApi23_doesNotConfigureTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder
.getConfigurationMediaFormat()
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
.isFalse();
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder
.getConfigurationMediaFormat()
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
.isFalse();
}
@Test
public void createForVideoEncoding_withNoAvailableEncoderFromEncoderSelector_throws() {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);