mirror of
https://github.com/androidx/media.git
synced 2025-04-29 22:36:54 +08:00
Compare commits
6 Commits
4189a7c8bd
...
dae5ebb820
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dae5ebb820 | ||
![]() |
910b6ab884 | ||
![]() |
2e20d35c3d | ||
![]() |
6bc54ff699 | ||
![]() |
6ce9fdee8c | ||
![]() |
25feac7664 |
@ -171,7 +171,6 @@ public abstract class DecoderAudioRenderer<
|
|||||||
private long largestQueuedPresentationTimeUs;
|
private long largestQueuedPresentationTimeUs;
|
||||||
private long lastBufferInStreamPresentationTimeUs;
|
private long lastBufferInStreamPresentationTimeUs;
|
||||||
private long nextBufferToWritePresentationTimeUs;
|
private long nextBufferToWritePresentationTimeUs;
|
||||||
private boolean isRendereringToEndOfStream;
|
|
||||||
|
|
||||||
public DecoderAudioRenderer() {
|
public DecoderAudioRenderer() {
|
||||||
this(/* eventHandler= */ null, /* eventListener= */ null);
|
this(/* eventHandler= */ null, /* eventListener= */ null);
|
||||||
@ -250,17 +249,13 @@ public abstract class DecoderAudioRenderer<
|
|||||||
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||||
}
|
}
|
||||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||||
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
// Return default if getAudioTrackBufferSizeUs is unsupported.
|
||||||
// to end of stream.
|
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||||
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
|
||||||
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
return super.getDurationToProgressUs(positionUs, elapsedRealtimeUs);
|
||||||
}
|
}
|
||||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||||
long bufferedDurationUs =
|
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
|
||||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
|
||||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
|
||||||
: writtenDurationUs;
|
|
||||||
bufferedDurationUs =
|
bufferedDurationUs =
|
||||||
(long)
|
(long)
|
||||||
(bufferedDurationUs
|
(bufferedDurationUs
|
||||||
@ -319,7 +314,6 @@ public abstract class DecoderAudioRenderer<
|
|||||||
try {
|
try {
|
||||||
audioSink.playToEndOfStream();
|
audioSink.playToEndOfStream();
|
||||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||||
isRendereringToEndOfStream = true;
|
|
||||||
} catch (AudioSink.WriteException e) {
|
} catch (AudioSink.WriteException e) {
|
||||||
throw createRendererException(
|
throw createRendererException(
|
||||||
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
|
e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
|
||||||
@ -601,7 +595,6 @@ public abstract class DecoderAudioRenderer<
|
|||||||
outputStreamEnded = true;
|
outputStreamEnded = true;
|
||||||
audioSink.playToEndOfStream();
|
audioSink.playToEndOfStream();
|
||||||
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
nextBufferToWritePresentationTimeUs = lastBufferInStreamPresentationTimeUs;
|
||||||
isRendereringToEndOfStream = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void flushDecoder() throws ExoPlaybackException {
|
private void flushDecoder() throws ExoPlaybackException {
|
||||||
@ -677,7 +670,6 @@ public abstract class DecoderAudioRenderer<
|
|||||||
|
|
||||||
currentPositionUs = positionUs;
|
currentPositionUs = positionUs;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
isRendereringToEndOfStream = false;
|
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
allowPositionDiscontinuity = true;
|
allowPositionDiscontinuity = true;
|
||||||
inputStreamEnded = false;
|
inputStreamEnded = false;
|
||||||
@ -707,7 +699,6 @@ public abstract class DecoderAudioRenderer<
|
|||||||
setOutputStreamOffsetUs(C.TIME_UNSET);
|
setOutputStreamOffsetUs(C.TIME_UNSET);
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
isRendereringToEndOfStream = false;
|
|
||||||
try {
|
try {
|
||||||
setSourceDrmSession(null);
|
setSourceDrmSession(null);
|
||||||
releaseDecoder();
|
releaseDecoder();
|
||||||
|
@ -129,7 +129,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
private int rendererPriority;
|
private int rendererPriority;
|
||||||
private boolean isStarted;
|
private boolean isStarted;
|
||||||
private long nextBufferToWritePresentationTimeUs;
|
private long nextBufferToWritePresentationTimeUs;
|
||||||
private boolean isRendereringToEndOfStream;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param context A context.
|
* @param context A context.
|
||||||
@ -528,18 +527,14 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||||
}
|
}
|
||||||
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
long audioTrackBufferDurationUs = audioSink.getAudioTrackBufferSizeUs();
|
||||||
// Return default if getAudioTrackBufferSizeUs is unsupported and not in the midst of rendering
|
// Return default if getAudioTrackBufferSizeUs is unsupported.
|
||||||
// to end of stream.
|
if (audioTrackBufferDurationUs == C.TIME_UNSET) {
|
||||||
if (!isRendereringToEndOfStream && audioTrackBufferDurationUs == C.TIME_UNSET) {
|
|
||||||
return super.getDurationToProgressUs(
|
return super.getDurationToProgressUs(
|
||||||
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
positionUs, elapsedRealtimeUs, isOnBufferAvailableListenerRegistered);
|
||||||
}
|
}
|
||||||
// Compare written, yet-to-play content duration against the audio track buffer size.
|
// Compare written, yet-to-play content duration against the audio track buffer size.
|
||||||
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
long writtenDurationUs = (nextBufferToWritePresentationTimeUs - positionUs);
|
||||||
long bufferedDurationUs =
|
long bufferedDurationUs = min(audioTrackBufferDurationUs, writtenDurationUs);
|
||||||
audioTrackBufferDurationUs != C.TIME_UNSET
|
|
||||||
? min(audioTrackBufferDurationUs, writtenDurationUs)
|
|
||||||
: writtenDurationUs;
|
|
||||||
bufferedDurationUs =
|
bufferedDurationUs =
|
||||||
(long)
|
(long)
|
||||||
(bufferedDurationUs
|
(bufferedDurationUs
|
||||||
@ -697,7 +692,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
|
|
||||||
currentPositionUs = positionUs;
|
currentPositionUs = positionUs;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
isRendereringToEndOfStream = false;
|
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
allowPositionDiscontinuity = true;
|
allowPositionDiscontinuity = true;
|
||||||
}
|
}
|
||||||
@ -722,7 +716,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
audioSinkNeedsReset = true;
|
audioSinkNeedsReset = true;
|
||||||
inputFormat = null;
|
inputFormat = null;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
isRendereringToEndOfStream = false;
|
|
||||||
try {
|
try {
|
||||||
audioSink.flush();
|
audioSink.flush();
|
||||||
} finally {
|
} finally {
|
||||||
@ -738,7 +731,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
protected void onReset() {
|
protected void onReset() {
|
||||||
hasPendingReportedSkippedSilence = false;
|
hasPendingReportedSkippedSilence = false;
|
||||||
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
nextBufferToWritePresentationTimeUs = C.TIME_UNSET;
|
||||||
isRendereringToEndOfStream = false;
|
|
||||||
try {
|
try {
|
||||||
super.onReset();
|
super.onReset();
|
||||||
} finally {
|
} finally {
|
||||||
@ -878,7 +870,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
|||||||
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
|
if (getLastBufferInStreamPresentationTimeUs() != C.TIME_UNSET) {
|
||||||
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
|
nextBufferToWritePresentationTimeUs = getLastBufferInStreamPresentationTimeUs();
|
||||||
}
|
}
|
||||||
isRendereringToEndOfStream = true;
|
|
||||||
} catch (AudioSink.WriteException e) {
|
} catch (AudioSink.WriteException e) {
|
||||||
throw createRendererException(
|
throw createRendererException(
|
||||||
e,
|
e,
|
||||||
|
@ -25,17 +25,14 @@ import androidx.media3.common.util.Assertions;
|
|||||||
import androidx.media3.common.util.NullableType;
|
import androidx.media3.common.util.NullableType;
|
||||||
import androidx.media3.exoplayer.LoadingInfo;
|
import androidx.media3.exoplayer.LoadingInfo;
|
||||||
import androidx.media3.exoplayer.SeekParameters;
|
import androidx.media3.exoplayer.SeekParameters;
|
||||||
import androidx.media3.exoplayer.source.chunk.Chunk;
|
|
||||||
import androidx.media3.exoplayer.source.chunk.MediaChunk;
|
|
||||||
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
|
|
||||||
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
||||||
|
import androidx.media3.exoplayer.trackselection.ForwardingTrackSelection;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/** Merges multiple {@link MediaPeriod}s. */
|
/** Merges multiple {@link MediaPeriod}s. */
|
||||||
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
|
/* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback {
|
||||||
@ -140,7 +137,8 @@ import java.util.List;
|
|||||||
TrackGroup mergedTrackGroup = mergedTrackSelection.getTrackGroup();
|
TrackGroup mergedTrackGroup = mergedTrackSelection.getTrackGroup();
|
||||||
TrackGroup childTrackGroup =
|
TrackGroup childTrackGroup =
|
||||||
checkNotNull(childTrackGroupByMergedTrackGroup.get(mergedTrackGroup));
|
checkNotNull(childTrackGroupByMergedTrackGroup.get(mergedTrackGroup));
|
||||||
childSelections[j] = new ForwardingTrackSelection(mergedTrackSelection, childTrackGroup);
|
childSelections[j] =
|
||||||
|
new MergingMediaPeriodTrackSelection(mergedTrackSelection, childTrackGroup);
|
||||||
} else {
|
} else {
|
||||||
childSelections[j] = null;
|
childSelections[j] = null;
|
||||||
}
|
}
|
||||||
@ -313,162 +311,47 @@ import java.util.List;
|
|||||||
Assertions.checkNotNull(callback).onContinueLoadingRequested(this);
|
Assertions.checkNotNull(callback).onContinueLoadingRequested(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class ForwardingTrackSelection implements ExoTrackSelection {
|
private static final class MergingMediaPeriodTrackSelection extends ForwardingTrackSelection {
|
||||||
|
|
||||||
private final ExoTrackSelection trackSelection;
|
|
||||||
private final TrackGroup trackGroup;
|
private final TrackGroup trackGroup;
|
||||||
|
|
||||||
public ForwardingTrackSelection(ExoTrackSelection trackSelection, TrackGroup trackGroup) {
|
public MergingMediaPeriodTrackSelection(
|
||||||
this.trackSelection = trackSelection;
|
ExoTrackSelection trackSelection, TrackGroup trackGroup) {
|
||||||
|
super(trackSelection);
|
||||||
this.trackGroup = trackGroup;
|
this.trackGroup = trackGroup;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public @Type int getType() {
|
|
||||||
return trackSelection.getType();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TrackGroup getTrackGroup() {
|
public TrackGroup getTrackGroup() {
|
||||||
return trackGroup;
|
return trackGroup;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int length() {
|
|
||||||
return trackSelection.length();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Format getFormat(int index) {
|
public Format getFormat(int index) {
|
||||||
return trackGroup.getFormat(trackSelection.getIndexInTrackGroup(index));
|
return trackGroup.getFormat(getWrappedInstance().getIndexInTrackGroup(index));
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getIndexInTrackGroup(int index) {
|
|
||||||
return trackSelection.getIndexInTrackGroup(index);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int indexOf(Format format) {
|
public int indexOf(Format format) {
|
||||||
return trackSelection.indexOf(trackGroup.indexOf(format));
|
return getWrappedInstance().indexOf(trackGroup.indexOf(format));
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int indexOf(int indexInTrackGroup) {
|
|
||||||
return trackSelection.indexOf(indexInTrackGroup);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void enable() {
|
|
||||||
trackSelection.enable();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void disable() {
|
|
||||||
trackSelection.disable();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Format getSelectedFormat() {
|
public Format getSelectedFormat() {
|
||||||
return trackGroup.getFormat(trackSelection.getSelectedIndexInTrackGroup());
|
return trackGroup.getFormat(getWrappedInstance().getSelectedIndexInTrackGroup());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getSelectedIndexInTrackGroup() {
|
public boolean equals(@Nullable Object other) {
|
||||||
return trackSelection.getSelectedIndexInTrackGroup();
|
if (!super.equals(other) || !(other instanceof MergingMediaPeriodTrackSelection)) {
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getSelectedIndex() {
|
|
||||||
return trackSelection.getSelectedIndex();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public @C.SelectionReason int getSelectionReason() {
|
|
||||||
return trackSelection.getSelectionReason();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
@Override
|
|
||||||
public Object getSelectionData() {
|
|
||||||
return trackSelection.getSelectionData();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onPlaybackSpeed(float playbackSpeed) {
|
|
||||||
trackSelection.onPlaybackSpeed(playbackSpeed);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onDiscontinuity() {
|
|
||||||
trackSelection.onDiscontinuity();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onRebuffer() {
|
|
||||||
trackSelection.onRebuffer();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onPlayWhenReadyChanged(boolean playWhenReady) {
|
|
||||||
trackSelection.onPlayWhenReadyChanged(playWhenReady);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void updateSelectedTrack(
|
|
||||||
long playbackPositionUs,
|
|
||||||
long bufferedDurationUs,
|
|
||||||
long availableDurationUs,
|
|
||||||
List<? extends MediaChunk> queue,
|
|
||||||
MediaChunkIterator[] mediaChunkIterators) {
|
|
||||||
trackSelection.updateSelectedTrack(
|
|
||||||
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
|
|
||||||
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean shouldCancelChunkLoad(
|
|
||||||
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
|
|
||||||
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean excludeTrack(int index, long exclusionDurationMs) {
|
|
||||||
return trackSelection.excludeTrack(index, exclusionDurationMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isTrackExcluded(int index, long nowMs) {
|
|
||||||
return trackSelection.isTrackExcluded(index, nowMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getLatestBitrateEstimate() {
|
|
||||||
return trackSelection.getLatestBitrateEstimate();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(@Nullable Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (!(o instanceof ForwardingTrackSelection)) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
ForwardingTrackSelection that = (ForwardingTrackSelection) o;
|
MergingMediaPeriodTrackSelection that = (MergingMediaPeriodTrackSelection) other;
|
||||||
return trackSelection.equals(that.trackSelection) && trackGroup.equals(that.trackGroup);
|
return trackGroup.equals(that.trackGroup);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
int result = 17;
|
return 31 * super.hashCode() + trackGroup.hashCode();
|
||||||
result = 31 * result + trackGroup.hashCode();
|
|
||||||
result = 31 * result + trackSelection.hashCode();
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,191 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.exoplayer.trackselection;
|
||||||
|
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
|
import androidx.media3.common.TrackGroup;
|
||||||
|
import androidx.media3.common.util.UnstableApi;
|
||||||
|
import androidx.media3.exoplayer.source.chunk.Chunk;
|
||||||
|
import androidx.media3.exoplayer.source.chunk.MediaChunk;
|
||||||
|
import androidx.media3.exoplayer.source.chunk.MediaChunkIterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/** An {@link ExoTrackSelection} forwarding all calls to a wrapped instance. */
|
||||||
|
@UnstableApi
|
||||||
|
public class ForwardingTrackSelection implements ExoTrackSelection {
|
||||||
|
private final ExoTrackSelection trackSelection;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates the forwarding track selection.
|
||||||
|
*
|
||||||
|
* @param trackSelection The wrapped {@link ExoTrackSelection}.
|
||||||
|
*/
|
||||||
|
public ForwardingTrackSelection(ExoTrackSelection trackSelection) {
|
||||||
|
this.trackSelection = trackSelection;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns the wrapped {@link ExoTrackSelection}. */
|
||||||
|
public ExoTrackSelection getWrappedInstance() {
|
||||||
|
return trackSelection;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void enable() {
|
||||||
|
trackSelection.enable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void disable() {
|
||||||
|
trackSelection.disable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Format getSelectedFormat() {
|
||||||
|
return trackSelection.getSelectedFormat();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getSelectedIndexInTrackGroup() {
|
||||||
|
return trackSelection.getSelectedIndexInTrackGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getSelectedIndex() {
|
||||||
|
return trackSelection.getSelectedIndex();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public @C.SelectionReason int getSelectionReason() {
|
||||||
|
return trackSelection.getSelectionReason();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
@Override
|
||||||
|
public Object getSelectionData() {
|
||||||
|
return trackSelection.getSelectionData();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onPlaybackSpeed(float playbackSpeed) {
|
||||||
|
trackSelection.onPlaybackSpeed(playbackSpeed);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onDiscontinuity() {
|
||||||
|
trackSelection.onDiscontinuity();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onRebuffer() {
|
||||||
|
trackSelection.onRebuffer();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onPlayWhenReadyChanged(boolean playWhenReady) {
|
||||||
|
trackSelection.onPlayWhenReadyChanged(playWhenReady);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void updateSelectedTrack(
|
||||||
|
long playbackPositionUs,
|
||||||
|
long bufferedDurationUs,
|
||||||
|
long availableDurationUs,
|
||||||
|
List<? extends MediaChunk> queue,
|
||||||
|
MediaChunkIterator[] mediaChunkIterators) {
|
||||||
|
trackSelection.updateSelectedTrack(
|
||||||
|
playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
|
||||||
|
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean shouldCancelChunkLoad(
|
||||||
|
long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) {
|
||||||
|
return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean excludeTrack(int index, long exclusionDurationMs) {
|
||||||
|
return trackSelection.excludeTrack(index, exclusionDurationMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isTrackExcluded(int index, long nowMs) {
|
||||||
|
return trackSelection.isTrackExcluded(index, nowMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getLatestBitrateEstimate() {
|
||||||
|
return trackSelection.getLatestBitrateEstimate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public @Type int getType() {
|
||||||
|
return trackSelection.getType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TrackGroup getTrackGroup() {
|
||||||
|
return trackSelection.getTrackGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int length() {
|
||||||
|
return trackSelection.length();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Format getFormat(int index) {
|
||||||
|
return trackSelection.getFormat(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getIndexInTrackGroup(int index) {
|
||||||
|
return trackSelection.getIndexInTrackGroup(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int indexOf(Format format) {
|
||||||
|
return trackSelection.indexOf(format);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int indexOf(int indexInTrackGroup) {
|
||||||
|
return trackSelection.indexOf(indexInTrackGroup);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return trackSelection.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(@Nullable Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (!(obj instanceof ForwardingTrackSelection)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ForwardingTrackSelection other = (ForwardingTrackSelection) obj;
|
||||||
|
return trackSelection.equals(other.trackSelection);
|
||||||
|
}
|
||||||
|
}
|
@ -2473,6 +2473,22 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean flushOrReleaseCodec() {
|
||||||
|
MediaCodecInfo codecInfo = getCodecInfo();
|
||||||
|
if (videoSink != null
|
||||||
|
&& codecInfo != null
|
||||||
|
&& (codecInfo.name.equals("c2.mtk.avc.decoder")
|
||||||
|
|| codecInfo.name.equals("c2.mtk.hevc.decoder"))) {
|
||||||
|
// Flushing a c2.mtk decoder that outputs to a SurfaceTexture often fails and leaves
|
||||||
|
// the SurfaceTexture's BufferQueue in an unrecoverable state. Release the codec instead.
|
||||||
|
// See b/362904942 for more details.
|
||||||
|
releaseCodec();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return super.flushOrReleaseCodec();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns whether the device is known to do post processing by default that isn't compatible with
|
* Returns whether the device is known to do post processing by default that isn't compatible with
|
||||||
* ExoPlayer.
|
* ExoPlayer.
|
||||||
|
@ -512,65 +512,6 @@ public class DecoderAudioRendererTest {
|
|||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterProcessEndOfStream_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
when(mockAudioSink.isEnded()).thenReturn(true);
|
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
|
||||||
when(mockAudioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
when(mockAudioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
AtomicBoolean hasCalledPlayToEndOfStream = new AtomicBoolean();
|
|
||||||
ForwardingAudioSink forwardingAudioSink =
|
|
||||||
new ForwardingAudioSink(mockAudioSink) {
|
|
||||||
@Override
|
|
||||||
public void playToEndOfStream() throws WriteException {
|
|
||||||
super.playToEndOfStream();
|
|
||||||
hasCalledPlayToEndOfStream.set(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
audioRenderer = createAudioRenderer(forwardingAudioSink);
|
|
||||||
audioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ FORMAT,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
audioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {FORMAT},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ true,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
audioRenderer.start();
|
|
||||||
audioRenderer.setCurrentStreamFinal();
|
|
||||||
while (!hasCalledPlayToEndOfStream.get()) {
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
}
|
|
||||||
audioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
audioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
||||||
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
when(mockAudioSink.handleBuffer(any(), anyLong(), anyInt())).thenReturn(true);
|
||||||
|
@ -1026,78 +1026,6 @@ public class MediaCodecAudioRendererTest {
|
|||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void
|
|
||||||
getDurationToProgressUs_withAudioTrackBufferDurationUnsupportedAfterRenderToEndOfStream_returnsCalculatedDuration()
|
|
||||||
throws Exception {
|
|
||||||
AtomicBoolean hasCalledRenderToEndOfStream = new AtomicBoolean();
|
|
||||||
mediaCodecAudioRenderer =
|
|
||||||
new MediaCodecAudioRenderer(
|
|
||||||
ApplicationProvider.getApplicationContext(),
|
|
||||||
new DefaultMediaCodecAdapterFactory(
|
|
||||||
ApplicationProvider.getApplicationContext(),
|
|
||||||
() -> {
|
|
||||||
callbackThread = new HandlerThread("MCARTest:MediaCodecAsyncAdapter");
|
|
||||||
return callbackThread;
|
|
||||||
},
|
|
||||||
() -> {
|
|
||||||
queueingThread = new HandlerThread("MCARTest:MediaCodecQueueingThread");
|
|
||||||
return queueingThread;
|
|
||||||
}),
|
|
||||||
mediaCodecSelector,
|
|
||||||
/* enableDecoderFallback= */ false,
|
|
||||||
new Handler(Looper.getMainLooper()),
|
|
||||||
audioRendererEventListener,
|
|
||||||
audioSink) {
|
|
||||||
@Override
|
|
||||||
protected void renderToEndOfStream() throws ExoPlaybackException {
|
|
||||||
super.renderToEndOfStream();
|
|
||||||
hasCalledRenderToEndOfStream.set(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
mediaCodecAudioRenderer.init(/* index= */ 0, PlayerId.UNSET, Clock.DEFAULT);
|
|
||||||
when(audioSink.getAudioTrackBufferSizeUs()).thenReturn(C.TIME_UNSET);
|
|
||||||
when(audioSink.getPlaybackParameters()).thenReturn(PlaybackParameters.DEFAULT);
|
|
||||||
FakeSampleStream fakeSampleStream =
|
|
||||||
new FakeSampleStream(
|
|
||||||
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
|
|
||||||
/* mediaSourceEventDispatcher= */ null,
|
|
||||||
DrmSessionManager.DRM_UNSUPPORTED,
|
|
||||||
new DrmSessionEventListener.EventDispatcher(),
|
|
||||||
/* initialFormat= */ AUDIO_AAC,
|
|
||||||
ImmutableList.of(
|
|
||||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 50_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 100_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 150_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 200_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
oneByteSample(/* timeUs= */ 250_000, C.BUFFER_FLAG_KEY_FRAME),
|
|
||||||
END_OF_STREAM_ITEM));
|
|
||||||
fakeSampleStream.writeData(/* startPositionUs= */ 0);
|
|
||||||
mediaCodecAudioRenderer.enable(
|
|
||||||
RendererConfiguration.DEFAULT,
|
|
||||||
new Format[] {AUDIO_AAC},
|
|
||||||
fakeSampleStream,
|
|
||||||
/* positionUs= */ 0,
|
|
||||||
/* joining= */ false,
|
|
||||||
/* mayRenderStartOfStream= */ false,
|
|
||||||
/* startPositionUs= */ 0,
|
|
||||||
/* offsetUs= */ 0,
|
|
||||||
new MediaSource.MediaPeriodId(new Object()));
|
|
||||||
mediaCodecAudioRenderer.start();
|
|
||||||
mediaCodecAudioRenderer.setCurrentStreamFinal();
|
|
||||||
while (!hasCalledRenderToEndOfStream.get()) {
|
|
||||||
mediaCodecAudioRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
maybeIdleAsynchronousMediaCodecAdapterThreads();
|
|
||||||
}
|
|
||||||
|
|
||||||
long durationToProgressUs =
|
|
||||||
mediaCodecAudioRenderer.getDurationToProgressUs(
|
|
||||||
/* positionUs= */ 200_000L, SystemClock.elapsedRealtime() * 1000);
|
|
||||||
|
|
||||||
assertThat(durationToProgressUs).isEqualTo(25_000L);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
public void getDurationToProgressUs_afterResetPosition_returnsDefaultDuration() throws Exception {
|
||||||
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
FakeClock fakeClock = new FakeClock(/* initialTimeMs= */ 100, /* isAutoAdvancing= */ true);
|
||||||
|
@ -0,0 +1,38 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.exoplayer.trackselection;
|
||||||
|
|
||||||
|
import static androidx.media3.test.utils.TestUtil.assertForwardingClassForwardsAllMethods;
|
||||||
|
import static androidx.media3.test.utils.TestUtil.assertSubclassOverridesAllMethods;
|
||||||
|
|
||||||
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
/** Unit test for {@link ForwardingTrackSelection}. */
|
||||||
|
@RunWith(AndroidJUnit4.class)
|
||||||
|
public class ForwardingTrackSelectionTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void overridesAllMethods() throws NoSuchMethodException {
|
||||||
|
assertSubclassOverridesAllMethods(ExoTrackSelection.class, ForwardingTrackSelection.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void forwardsAllMethods() throws Exception {
|
||||||
|
assertForwardingClassForwardsAllMethods(ExoTrackSelection.class, ForwardingTrackSelection::new);
|
||||||
|
}
|
||||||
|
}
|
@ -46,6 +46,7 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||||
|
|
||||||
/** A default implementation of {@link Codec.EncoderFactory}. */
|
/** A default implementation of {@link Codec.EncoderFactory}. */
|
||||||
@ -395,6 +396,33 @@ public final class DefaultEncoderFactory implements Codec.EncoderFactory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int maxBFrames = supportedVideoEncoderSettings.maxBFrames;
|
||||||
|
if (SDK_INT >= 29 && maxBFrames != VideoEncoderSettings.NO_VALUE) {
|
||||||
|
mediaFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames);
|
||||||
|
}
|
||||||
|
|
||||||
|
int numNonBidirectionalTemporalLayers =
|
||||||
|
supportedVideoEncoderSettings.numNonBidirectionalTemporalLayers;
|
||||||
|
int numBidirectionalTemporalLayers =
|
||||||
|
supportedVideoEncoderSettings.numBidirectionalTemporalLayers;
|
||||||
|
if (SDK_INT >= 25 && numNonBidirectionalTemporalLayers >= 0) {
|
||||||
|
String temporalSchema;
|
||||||
|
if (numNonBidirectionalTemporalLayers == 0) {
|
||||||
|
temporalSchema = "none";
|
||||||
|
} else if (numBidirectionalTemporalLayers > 0) {
|
||||||
|
temporalSchema =
|
||||||
|
String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"android.generic.%d+%d",
|
||||||
|
numNonBidirectionalTemporalLayers,
|
||||||
|
numBidirectionalTemporalLayers);
|
||||||
|
} else {
|
||||||
|
temporalSchema =
|
||||||
|
String.format(Locale.ROOT, "android.generic.%d", numNonBidirectionalTemporalLayers);
|
||||||
|
}
|
||||||
|
mediaFormat.setString(MediaFormat.KEY_TEMPORAL_LAYERING, temporalSchema);
|
||||||
|
}
|
||||||
|
|
||||||
return new DefaultCodec(
|
return new DefaultCodec(
|
||||||
context,
|
context,
|
||||||
encoderSupportedFormat,
|
encoderSupportedFormat,
|
||||||
|
@ -337,6 +337,164 @@ public class DefaultEncoderFactoryTest {
|
|||||||
.isFalse();
|
.isFalse();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Config(sdk = 29)
|
||||||
|
public void createForVideoEncoding_withMaxBFrames_configuresEncoderWithMaxBFrames()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(videoEncoder.getConfigurationMediaFormat().getInteger(MediaFormat.KEY_MAX_B_FRAMES))
|
||||||
|
.isEqualTo(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Config(sdk = 23)
|
||||||
|
public void createForVideoEncoding_withMaxBFramesOnApi23_doesNotConfigureMaxBFrames()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder().setMaxBFrames(3).build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
|
||||||
|
.isFalse();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Config(sdk = 29)
|
||||||
|
public void createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureMaxBFrames()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(videoEncoder.getConfigurationMediaFormat().containsKey(MediaFormat.KEY_MAX_B_FRAMES))
|
||||||
|
.isFalse();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Config(sdk = 29)
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
createForVideoEncoding_withTemporalLayeringSchemaWithZeroLayers_configuresEncoderWithTemporalLayeringSchema()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder()
|
||||||
|
.setTemporalLayers(
|
||||||
|
/* numNonBidirectionalLayers= */ 0, /* numBidirectionalLayers= */ 0)
|
||||||
|
.build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(
|
||||||
|
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
|
||||||
|
.isEqualTo("none");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Config(sdk = 29)
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
createForVideoEncoding_withTemporalLayeringSchemaWithoutBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder()
|
||||||
|
.setTemporalLayers(
|
||||||
|
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 0)
|
||||||
|
.build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(
|
||||||
|
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
|
||||||
|
.isEqualTo("android.generic.1");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Config(sdk = 29)
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
createForVideoEncoding_withTemporalLayeringSchemaWithBidirectionalLayers_configuresEncoderWithTemporalLayeringSchema()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder()
|
||||||
|
.setTemporalLayers(
|
||||||
|
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
|
||||||
|
.build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(
|
||||||
|
videoEncoder.getConfigurationMediaFormat().getString(MediaFormat.KEY_TEMPORAL_LAYERING))
|
||||||
|
.isEqualTo("android.generic.1+2");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Config(sdk = 23)
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
createForVideoEncoding_withTemporalLayeringSchemaOnApi23_doesNotConfigureTemporalLayeringSchema()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.setRequestedVideoEncoderSettings(
|
||||||
|
new VideoEncoderSettings.Builder()
|
||||||
|
.setTemporalLayers(
|
||||||
|
/* numNonBidirectionalLayers= */ 1, /* numBidirectionalLayers= */ 2)
|
||||||
|
.build())
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(
|
||||||
|
videoEncoder
|
||||||
|
.getConfigurationMediaFormat()
|
||||||
|
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
|
||||||
|
.isFalse();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Config(sdk = 29)
|
||||||
|
@Test
|
||||||
|
public void
|
||||||
|
createForVideoEncoding_withDefaultEncoderSettings_doesNotConfigureTemporalLayeringSchema()
|
||||||
|
throws Exception {
|
||||||
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
|
||||||
|
DefaultCodec videoEncoder =
|
||||||
|
new DefaultEncoderFactory.Builder(context)
|
||||||
|
.build()
|
||||||
|
.createForVideoEncoding(requestedVideoFormat, /* logSessionId= */ null);
|
||||||
|
|
||||||
|
assertThat(
|
||||||
|
videoEncoder
|
||||||
|
.getConfigurationMediaFormat()
|
||||||
|
.containsKey(MediaFormat.KEY_TEMPORAL_LAYERING))
|
||||||
|
.isFalse();
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void createForVideoEncoding_withNoAvailableEncoderFromEncoderSelector_throws() {
|
public void createForVideoEncoding_withNoAvailableEncoderFromEncoderSelector_throws() {
|
||||||
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
Format requestedVideoFormat = createVideoFormat(MimeTypes.VIDEO_H264, 1920, 1080, 30);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user