Compare commits

...

6 Commits

Author SHA1 Message Date
Googler
dae5ebb820 Update DefaultEncoderFactory to set GOP parameters
If set on the requested VideoEncoderSettings, then
these parameters are passed into the MediaFormat
used by the DefaultEncoderFactory to configure the
underlying codec.

PiperOrigin-RevId: 751059914
2025-04-24 11:09:49 -07:00
dancho
910b6ab884 Do not flush c2.mtk decoders when outputting to a videoSink
Flushing a c2.mtk decoder that outputs to a SurfaceTexture often
fails and leaves the SurfaceTexture's BufferQueue in an unrecoverable
state. Release the codec instead.

PiperOrigin-RevId: 751006875
2025-04-24 08:39:46 -07:00
michaelkatz
2e20d35c3d Set static interval as default for dynamic scheduling with audio
Applications providing custom AudioSink implementations should have the dynamic scheduling for audio playback fallback to the static interval if they are not supporting AudioSink#getAudioTrackBufferSizeUs().

This CL specifically removes the bypass for the static default for if rendering to end of stream. For scenarios with an intermediary layer between MediaCodecAudioRenderer and DefaultAudioSink, there can be potential underruns even though at the MediaCodecAudioRenderer it has nothing else to write.

PiperOrigin-RevId: 750986145
2025-04-24 07:26:04 -07:00
Copybara-Service
6bc54ff699 Merge pull request #2366 from colinkho:custom-updateselectedtrack
PiperOrigin-RevId: 750982824
2025-04-24 07:14:31 -07:00
tonihei
6ce9fdee8c Add test and formatting fixes 2025-04-24 12:52:22 +01:00
Colin Kho
25feac7664 Add ForwardingTrackSelection to allow access to the underlying instance 2025-04-24 12:26:31 +01:00
10 changed files with 452 additions and 287 deletions

View File

@ -171,7 +171,6 @@ public abstract class DecoderAudioRenderer<
private long largestQueuedPresentationTimeUs;
private long lastBufferInStreamPresentationTimeUs;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
public DecoderAudioRenderer() {
this(/* eventHandler= */ null, /* eventListener= */ null);
@ -250,17 +249,13 @@ public abstract class DecoderAudioRenderer<
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
// Return default if getAudioTrackBufferSizeUs is unsupported.
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
bufferedDurationUs =
(long)
(bufferedDurationUs
@ -319,7 +314,6 @@ public abstract class DecoderAudioRenderer<
try {
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
@ -601,7 +595,6 @@ public abstract class DecoderAudioRenderer<
outputStreamEnded = true;
audioSink.playToEndOfStream();
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
isRendereringToEndOfStream = true;
}
private void flushDecoder() throws ExoPlaybackException {
@ -677,7 +670,6 @@ public abstract class DecoderAudioRenderer<
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
@ -707,7 +699,6 @@ public abstract class DecoderAudioRenderer<
setOutputStreamOffsetUs(C.TIME_UNSET);
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
setSourceDrmSession(null);
releaseDecoder();

View File

@ -129,7 +129,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private int rendererPriority;
private boolean isStarted;
private long nextBufferToWritePresentationTimeUs;
private boolean isRendereringToEndOfStream;
/**
* @param context A context.
@ -528,18 +527,14 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
// to end of stream.
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
// Return default if getAudioTrackBufferSizeUs is unsupported.
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
return super.getDurationToProgressUs(
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
}
// Compare written, yet-to-play content duration against the audio track buffer size.
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
long bufferedDurationUs =
audioTrackBufferDurationUs != C.TIME_UNSET
? min(audioTrackBufferDurationUs, writtenDurationUs)
: writtenDurationUs;
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
bufferedDurationUs =
(long)
(bufferedDurationUs
@ -697,7 +692,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
currentPositionUs = positionUs;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
hasPendingReportedSkippedSilence = false;
allowPositionDiscontinuity = true;
}
@ -722,7 +716,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSinkNeedsReset = true;
inputFormat = null;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
audioSink.flush();
} finally {
@ -738,7 +731,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onReset() {
hasPendingReportedSkippedSilence = false;
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
isRendereringToEndOfStream = false;
try {
super.onReset();
} finally {
@ -878,7 +870,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
}
isRendereringToEndOfStream = true;
} catch (AudioSink.WriteException e) {
throw createRendererException(
e,

View File

@ -25,17 +25,14 @@ import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.NullableType;
import androidx.media3.exoplayer.LoadingInfo;
import androidx.media3.exoplayer.SeekParameters;
import androidx.media3.exoplayer.source.chunk.Chunk;
import androidx.media3.exoplayer.source.chunk.MediaChunk;
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
import androidx.media3.exoplayer.trackselection.ForwardingTrackSelection;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
/** Merges multiple {@link MediaPeriod}s. */
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
@ -140,7 +137,8 @@ import java.util.List;
TrackGroup mergedTrackGroup = mergedTrackSelection.getTrackGroup();
TrackGroup childTrackGroup =
checkNotNull(childTrackGroupByMergedTrackGroup.get(mergedTrackGroup));
childSelections[j] = new ForwardingTrackSelection(mergedTrackSelection, childTrackGroup);
childSelections[j] =
new MergingMediaPeriodTrackSelection(mergedTrackSelection, childTrackGroup);
} else {
childSelections[j] = null;
}
@ -313,162 +311,47 @@ import java.util.List;
Assertions.checkNotNull(callback).onContinueLoadingRequested(this);
}
private static final class ForwardingTrackSelection implements ExoTrackSelection {
private final ExoTrackSelection trackSelection;
private static final class MergingMediaPeriodTrackSelection extends ForwardingTrackSelection {
private final TrackGroup trackGroup;
public ForwardingTrackSelection(ExoTrackSelection trackSelection, TrackGroup trackGroup) {
this.trackSelection = trackSelection;
public MergingMediaPeriodTrackSelection(
ExoTrackSelection trackSelection, TrackGroup trackGroup) {
super(trackSelection);
this.trackGroup = trackGroup;
}
@Override
public @Type int getType() {
return trackSelection.getType();
}
@Override
public TrackGroup getTrackGroup() {
return trackGroup;
}
@Override
public int length() {
return trackSelection.length();
}
@Override
public Format getFormat(int index) {
return trackGroup.getFormat(trackSelection.getIndexInTrackGroup(index));
}
@Override
public int getIndexInTrackGroup(int index) {
return trackSelection.getIndexInTrackGroup(index);
return trackGroup.getFormat(getWrappedInstance().getIndexInTrackGroup(index));
}
@Override
public int indexOf(Format format) {
return trackSelection.indexOf(trackGroup.indexOf(format));
}
@Override
public int indexOf(int indexInTrackGroup) {
return trackSelection.indexOf(indexInTrackGroup);
}
@Override
public void enable() {
trackSelection.enable();
}
@Override
public void disable() {
trackSelection.disable();
return getWrappedInstance().indexOf(trackGroup.indexOf(format));
}
@Override
public Format getSelectedFormat() {
return trackGroup.getFormat(trackSelection.getSelectedIndexInTrackGroup());
return trackGroup.getFormat(getWrappedInstance().getSelectedIndexInTrackGroup());
}
@Override
public int getSelectedIndexInTrackGroup() {
return trackSelection.getSelectedIndexInTrackGroup();
}
@Override
public int getSelectedIndex() {
return trackSelection.getSelectedIndex();
}
@Override
public @C.SelectionReason int getSelectionReason() {
return trackSelection.getSelectionReason();
}
@Nullable
@Override
public Object getSelectionData() {
return trackSelection.getSelectionData();
}
@Override
public void onPlaybackSpeed(float playbackSpeed) {
trackSelection.onPlaybackSpeed(playbackSpeed);
}
@Override
public void onDiscontinuity() {
trackSelection.onDiscontinuity();
}
@Override
public void onRebuffer() {
trackSelection.onRebuffer();
}
@Override
public void onPlayWhenReadyChanged(boolean playWhenReady) {
trackSelection.onPlayWhenReadyChanged(playWhenReady);
}
@Override
public void updateSelectedTrack(
long playbackPositionUs,
long bufferedDurationUs,
long availableDurationUs,
List<? extends MediaChunk> queue,
MediaChunkIterator[] mediaChunkIterators) {
trackSelection.updateSelectedTrack(
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
}
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
}
@Override
public boolean shouldCancelChunkLoad(
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
}
@Override
public boolean excludeTrack(int index, long exclusionDurationMs) {
return trackSelection.excludeTrack(index, exclusionDurationMs);
}
@Override
public boolean isTrackExcluded(int index, long nowMs) {
return trackSelection.isTrackExcluded(index, nowMs);
}
@Override
public long getLatestBitrateEstimate() {
return trackSelection.getLatestBitrateEstimate();
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ForwardingTrackSelection)) {
public boolean equals(@Nullable Object other) {
if (!super.equals(other) || !(other instanceof MergingMediaPeriodTrackSelection)) {
return false;
}
ForwardingTrackSelection that = (ForwardingTrackSelection) o;
return trackSelection.equals(that.trackSelection) && trackGroup.equals(that.trackGroup);
MergingMediaPeriodTrackSelection that = (MergingMediaPeriodTrackSelection) other;
return trackGroup.equals(that.trackGroup);
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + trackGroup.hashCode();
result = 31 * result + trackSelection.hashCode();
return result;
return 31 * super.hashCode() + trackGroup.hashCode();
}
}
}

View File

@ -0,0 +1,191 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer.trackselection;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.TrackGroup;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.exoplayer.source.chunk.Chunk;
import androidx.media3.exoplayer.source.chunk.MediaChunk;
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
import java.util.List;
/** An {@link ExoTrackSelection} forwarding all calls to a wrapped instance. */
@UnstableApi
public class ForwardingTrackSelection implements ExoTrackSelection {
private final ExoTrackSelection trackSelection;
/**
* Creates the forwarding track selection.
*
* @param trackSelection The wrapped {@link ExoTrackSelection}.
*/
public ForwardingTrackSelection(ExoTrackSelection trackSelection) {
this.trackSelection = trackSelection;
}
/** Returns the wrapped {@link ExoTrackSelection}. */
public ExoTrackSelection getWrappedInstance() {
return trackSelection;
}
@Override
public void enable() {
trackSelection.enable();
}
@Override
public void disable() {
trackSelection.disable();
}
@Override
public Format getSelectedFormat() {
return trackSelection.getSelectedFormat();
}
@Override
public int getSelectedIndexInTrackGroup() {
return trackSelection.getSelectedIndexInTrackGroup();
}
@Override
public int getSelectedIndex() {
return trackSelection.getSelectedIndex();
}
@Override
public @C.SelectionReason int getSelectionReason() {
return trackSelection.getSelectionReason();
}
@Nullable
@Override
public Object getSelectionData() {
return trackSelection.getSelectionData();
}
@Override
public void onPlaybackSpeed(float playbackSpeed) {
trackSelection.onPlaybackSpeed(playbackSpeed);
}
@Override
public void onDiscontinuity() {
trackSelection.onDiscontinuity();
}
@Override
public void onRebuffer() {
trackSelection.onRebuffer();
}
@Override
public void onPlayWhenReadyChanged(boolean playWhenReady) {
trackSelection.onPlayWhenReadyChanged(playWhenReady);
}
@Override
public void updateSelectedTrack(
long playbackPositionUs,
long bufferedDurationUs,
long availableDurationUs,
List<? extends MediaChunk> queue,
MediaChunkIterator[] mediaChunkIterators) {
trackSelection.updateSelectedTrack(
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
}
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
}
@Override
public boolean shouldCancelChunkLoad(
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
}
@Override
public boolean excludeTrack(int index, long exclusionDurationMs) {
return trackSelection.excludeTrack(index, exclusionDurationMs);
}
@Override
public boolean isTrackExcluded(int index, long nowMs) {
return trackSelection.isTrackExcluded(index, nowMs);
}
@Override
public long getLatestBitrateEstimate() {
return trackSelection.getLatestBitrateEstimate();
}
@Override
public @Type int getType() {
return trackSelection.getType();
}
@Override
public TrackGroup getTrackGroup() {
return trackSelection.getTrackGroup();
}
@Override
public int length() {
return trackSelection.length();
}
@Override
public Format getFormat(int index) {
return trackSelection.getFormat(index);
}
@Override
public int getIndexInTrackGroup(int index) {
return trackSelection.getIndexInTrackGroup(index);
}
@Override
public int indexOf(Format format) {
return trackSelection.indexOf(format);
}
@Override
public int indexOf(int indexInTrackGroup) {
return trackSelection.indexOf(indexInTrackGroup);
}
@Override
public int hashCode() {
return trackSelection.hashCode();
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ForwardingTrackSelection)) {
return false;
}
ForwardingTrackSelection other = (ForwardingTrackSelection) obj;
return trackSelection.equals(other.trackSelection);
}
}

View File

@ -2473,6 +2473,22 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
}
}
@Override
protected boolean flushOrReleaseCodec() {
MediaCodecInfo codecInfo = getCodecInfo();
if (videoSink != null
&& codecInfo != null
&& (codecInfo.name.equals("c2.mtk.avc.decoder")
|| codecInfo.name.equals("c2.mtk.hevc.decoder"))) {
// Flushing a c2.mtk decoder that outputs to a SurfaceTexture often fails and leaves
// the SurfaceTexture's BufferQueue in an unrecoverable state. Release the codec instead.
// See b/362904942 for more details.
releaseCodec();
return true;
}
return super.flushOrReleaseCodec();
}
/**
* Returns whether the device is known to do post processing by default that isn't compatible with
* ExoPlayer.

View File

@ -512,65 +512,6 @@ public class DecoderAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
throws Exception {
when(mockAudioSink.isEnded()).thenReturn(true);
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
ForwardingAudioSink forwardingAudioSink =
new ForwardingAudioSink(mockAudioSink) {
@Override
public void playToEndOfStream() throws WriteException {
super.playToEndOfStream();
hasCalledPlayToEndOfStream.set(true);
}
};
audioRenderer = createAudioRenderer(forwardingAudioSink);
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ FORMAT,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
audioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {FORMAT},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
audioRenderer.start();
audioRenderer.setCurrentStreamFinal();
while (!hasCalledPlayToEndOfStream.get()) {
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
long durationToProgressUs =
audioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);

View File

@ -1026,78 +1026,6 @@ public class MediaCodecAudioRendererTest {
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
throws Exception {
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
mediaCodecAudioRenderer =
new MediaCodecAudioRenderer(
ApplicationProvider.getApplicationContext(),
new DefaultMediaCodecAdapterFactory(
ApplicationProvider.getApplicationContext(),
() -> {
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
return callbackThread;
},
() -> {
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
return queueingThread;
}),
mediaCodecSelector,
/* enableDecoderFallback= */ false,
new Handler(Looper.getMainLooper()),
audioRendererEventListener,
audioSink) {
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
super.renderToEndOfStream();
hasCalledRenderToEndOfStream.set(true);
}
};
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ AUDIO_AAC,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
END_OF_STREAM_ITEM));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecAudioRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {AUDIO_AAC},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ false,
/* startPositionUs= */ 0,
/* offsetUs= */ 0,
new MediaSource.MediaPeriodId(new Object()));
mediaCodecAudioRenderer.start();
mediaCodecAudioRenderer.setCurrentStreamFinal();
while (!hasCalledRenderToEndOfStream.get()) {
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
maybeIdleAsynchronousMediaCodecAdapterThreads();
}
long durationToProgressUs =
mediaCodecAudioRenderer.getDurationToProgressUs(
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
assertThat(durationToProgressUs).isEqualTo(25_000L);
}
@Test
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);

View File

@ -0,0 +1,38 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer.trackselection;
import static androidx.media3.test.utils.TestUtil.assertForwardingClassForwardsAllMethods;
import static androidx.media3.test.utils.TestUtil.assertSubclassOverridesAllMethods;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link ForwardingTrackSelection}. */
@RunWith(AndroidJUnit4.class)
public class ForwardingTrackSelectionTest {
@Test
public void overridesAllMethods() throws NoSuchMethodException {
assertSubclassOverridesAllMethods(ExoTrackSelection.class, ForwardingTrackSelection.class);
}
@Test
public void forwardsAllMethods() throws Exception {
assertForwardingClassForwardsAllMethods(ExoTrackSelection.class, ForwardingTrackSelection::new);
}
}

View File

@ -46,6 +46,7 @@ import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** A default implementation of {@link Codec.EncoderFactory}. */
@ -395,6 +396,33 @@ public final class DefaultEncoderFactory implements Codec.EncoderFactory {
}
}
int maxBFrames = supportedVideoEncoderSettings.maxBFrames;
if (SDK_INT >= 29 && maxBFrames != VideoEncoderSettings.NO_VALUE) {
mediaFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames);
}
int numNonBidirectionalTemporalLayers =
supportedVideoEncoderSettings.numNonBidirectionalTemporalLayers;
int numBidirectionalTemporalLayers =
supportedVideoEncoderSettings.numBidirectionalTemporalLayers;
if (SDK_INT >= 25 && numNonBidirectionalTemporalLayers >= 0) {
String temporalSchema;
if (numNonBidirectionalTemporalLayers == 0) {
temporalSchema = "none";
} else if (numBidirectionalTemporalLayers > 0) {
temporalSchema =
String.format(
Locale.ROOT,
"android.generic.%d+%d",
numNonBidirectionalTemporalLayers,
numBidirectionalTemporalLayers);
} else {
temporalSchema =
String.format(Locale.ROOT, "android.generic.%d", numNonBidirectionalTemporalLayers);
}
mediaFormat.setString(MediaFormat.KEY_TEMPORAL_LAYERING, temporalSchema);
}
return new DefaultCodec(
context,
encoderSupportedFormat,

View File

@ -337,6 +337,164 @@ public class DefaultEncoderFactoryTest {
.isFalse();
}
@Test
@Config(sdk = 29)
public void createForVideoEncoding_withMaxBFrames_configuresEncoderWithMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().getInteger(MediaFormat.KEY_MAX_B_FRAMES))
.isEqualTo(3);
}
@Test
@Config(sdk = 23)
public void createForVideoEncoding_withMaxBFramesOnApi23_doesNotConfigureMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
.isFalse();
}
@Test
@Config(sdk = 29)
public void createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureMaxBFrames()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
.isFalse();
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithZeroLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 0, /* numBidirectionalLayers= */ 0)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("none");
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithoutBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 0)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("android.generic.1");
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaWithBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
.isEqualTo("android.generic.1+2");
}
@Config(sdk = 23)
@Test
public void
createForVideoEncoding_withTemporalLayeringSchemaOnApi23_doesNotConfigureTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder()
.setTemporalLayers(
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
.build())
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder
.getConfigurationMediaFormat()
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
.isFalse();
}
@Config(sdk = 29)
@Test
public void
createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureTemporalLayeringSchema()
throws Exception {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
DefaultCodec videoEncoder =
new DefaultEncoderFactory.Builder(context)
.build()
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
assertThat(
videoEncoder
.getConfigurationMediaFormat()
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
.isFalse();
}
@Test
public void createForVideoEncoding_withNoAvailableEncoderFromEncoderSelector_throws() {
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);