Compare commits

...

6 Commits

Author SHA1 Message Date
kimvde
0ad7ceadb3 Remove references to Renderers from VideoSink
Remaining references will be removed in follow-up changes

PiperOrigin-RevId: 740362812
2025-03-25 08:43:44 -07:00
ibaker
4338355422 Use dumper.addIfNonDefault in CapturingAudioSink
If encoded audio is dumped, some of these values may not be set.

Also add a related TODO

PiperOrigin-RevId: 740357230
2025-03-25 08:24:44 -07:00
michaelkatz
6e510c26df Set that any error during pre-warming disables and resets pre-warming
For now, even if a recoverable error occurs during pre-warming, the current process will be that pre-warming is disabled until subsequent media item transition.

PiperOrigin-RevId: 740349517
2025-03-25 08:02:42 -07:00
kimvde
2642d895bd Remove VideoSink.onRendererEnabled
This is part of the effort to make VideoSink independent from renderers.

PiperOrigin-RevId: 740344126
2025-03-25 07:44:50 -07:00
michaelkatz
a220b0cb5e Add support for RTSPT scheme in RtspMediaSource.Factory
If the `RtspMediaSource.Factory` is provided a `MediaItem` containing a uri with the scheme `rtspt`, then the factory will create its `RtspMediaSource` configured to use TCP.

Issue: androidx/media#1484
PiperOrigin-RevId: 740340604
2025-03-25 07:32:58 -07:00
ibaker
0cba160c22 Add non-raw MIME type to audio sink dumps
We currently don't have audio sink dumps for non raw audio, but this
change will make it more clear if we do in future (e.g. for testing
audio offload).

PiperOrigin-RevId: 740330118
2025-03-25 06:52:32 -07:00
15 changed files with 349 additions and 203 deletions

View File

@ -54,6 +54,9 @@
* DASH extension:
* Smooth Streaming extension:
* RTSP extension:
* Add support for URI with RTSPT scheme as a way to configure the RTSP
session to use TCP
([#1484](https://github.com/androidx/media/issues/1484)).
* Decoder extensions (FFmpeg, VP9, AV1, etc.):
* MIDI extension:
* Leanback extension:

View File

@ -2570,7 +2570,8 @@ public final class Util {
*/
public static @ContentType int inferContentType(Uri uri) {
@Nullable String scheme = uri.getScheme();
if (scheme != null && Ascii.equalsIgnoreCase("rtsp", scheme)) {
if (scheme != null
&& (Ascii.equalsIgnoreCase("rtsp", scheme) || Ascii.equalsIgnoreCase("rtspt", scheme))) {
return C.CONTENT_TYPE_RTSP;
}

View File

@ -741,25 +741,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
e = e.copyWithMediaPeriodId(readingPeriod.info.id);
}
}
if (e.isRecoverable
&& (pendingRecoverableRendererError == null
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
// If pendingRecoverableRendererError != null and error was
// ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED then upon retry, renderer will attempt with
// offload disabled.
Log.w(TAG, "Recoverable renderer error", e);
if (pendingRecoverableRendererError != null) {
pendingRecoverableRendererError.addSuppressed(e);
e = pendingRecoverableRendererError;
} else {
pendingRecoverableRendererError = e;
}
// Given that the player is now in an unhandled exception state, the error needs to be
// recovered or the player stopped before any other message is handled.
handler.sendMessageAtFrontOfQueue(
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
} else if (e.type == ExoPlaybackException.TYPE_RENDERER
if (e.type == ExoPlaybackException.TYPE_RENDERER
&& e.mediaPeriodId != null
&& isRendererPrewarmingMediaPeriod(e.rendererIndex, e.mediaPeriodId)) {
// TODO(b/380273486): Investigate recovery for pre-warming renderer errors
@ -778,6 +760,24 @@ import java.util.concurrent.atomic.AtomicBoolean;
maybeContinueLoading();
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
} else if (e.isRecoverable
&& (pendingRecoverableRendererError == null
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_INIT_FAILED
|| e.errorCode == PlaybackException.ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED)) {
// If pendingRecoverableRendererError != null and error was
// ERROR_CODE_AUDIO_TRACK_OFFLOAD_WRITE_FAILED then upon retry, renderer will attempt with
// offload disabled.
Log.w(TAG, "Recoverable renderer error", e);
if (pendingRecoverableRendererError != null) {
pendingRecoverableRendererError.addSuppressed(e);
e = pendingRecoverableRendererError;
} else {
pendingRecoverableRendererError = e;
}
// Given that the player is now in an unhandled exception state, the error needs to be
// recovered or the player stopped before any other message is handled.
handler.sendMessageAtFrontOfQueue(
handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e));
} else {
if (pendingRecoverableRendererError != null) {
pendingRecoverableRendererError.addSuppressed(e);

View File

@ -79,23 +79,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoFrameMetadataListener = (presentationTimeUs, releaseTimeNs, format, mediaFormat) -> {};
}
/**
* {@inheritDoc}
*
* <p>This method will always throw an {@link UnsupportedOperationException}.
*/
@Override
public void onRendererEnabled(boolean mayRenderStartOfStream) {
throw new UnsupportedOperationException();
}
@Override
public void onRendererStarted() {
public void onStarted() {
videoFrameReleaseControl.onStarted();
}
@Override
public void onRendererStopped() {
public void onStopped() {
videoFrameReleaseControl.onStopped();
}
@ -131,8 +121,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public boolean isReady(boolean rendererOtherwiseReady) {
return videoFrameReleaseControl.isReady(rendererOtherwiseReady);
public boolean isReady(boolean otherwiseReady) {
return videoFrameReleaseControl.isReady(otherwiseReady);
}
@Override
@ -197,16 +187,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoFrameReleaseControl.setChangeFrameRateStrategy(changeFrameRateStrategy);
}
/**
* {@inheritDoc}
*
* <p>This method will always throw an {@link UnsupportedOperationException}.
*/
@Override
public void enableMayRenderStartOfStream() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
@ -233,6 +213,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
@Override
public void allowReleaseFirstFrameBeforeStarted() {
videoFrameReleaseControl.allowReleaseFirstFrameBeforeStarted();
}
@Override
public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) {

View File

@ -190,6 +190,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
private boolean codecHandlesHdr10PlusOutOfBandMetadata;
private @MonotonicNonNull VideoSink videoSink;
private boolean hasSetVideoSink;
private @VideoSink.FirstFrameReleaseInstruction int nextVideoSinkFirstFrameReleaseInstruction;
private @MonotonicNonNull List<Effect> videoEffects;
@Nullable private Surface displaySurface;
@Nullable private PlaceholderSurface placeholderSurface;
@ -928,7 +929,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
if (videoEffects != null) {
videoSink.setVideoEffects(videoEffects);
}
videoSink.onRendererEnabled(mayRenderStartOfStream);
nextVideoSinkFirstFrameReleaseInstruction =
mayRenderStartOfStream
? RELEASE_FIRST_FRAME_IMMEDIATELY
: RELEASE_FIRST_FRAME_WHEN_STARTED;
@Nullable WakeupListener wakeupListener = getWakeupListener();
if (wakeupListener != null) {
videoSink.setWakeupListener(wakeupListener);
@ -956,7 +960,13 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
@Override
public void enableMayRenderStartOfStream() {
if (videoSink != null) {
videoSink.enableMayRenderStartOfStream();
if (nextVideoSinkFirstFrameReleaseInstruction == RELEASE_FIRST_FRAME_IMMEDIATELY
|| nextVideoSinkFirstFrameReleaseInstruction == RELEASE_FIRST_FRAME_WHEN_STARTED) {
// The first stream change hasn't been queued to the sink.
nextVideoSinkFirstFrameReleaseInstruction = RELEASE_FIRST_FRAME_IMMEDIATELY;
} else {
videoSink.allowReleaseFirstFrameBeforeStarted();
}
} else {
videoFrameReleaseControl.allowReleaseFirstFrameBeforeStarted();
}
@ -1039,7 +1049,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
totalVideoFrameProcessingOffsetUs = 0;
videoFrameProcessingOffsetCount = 0;
if (videoSink != null) {
videoSink.onRendererStarted();
videoSink.onStarted();
} else {
videoFrameReleaseControl.onStarted();
}
@ -1050,7 +1060,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
maybeNotifyDroppedFrames();
maybeNotifyVideoFrameProcessingOffset();
if (videoSink != null) {
videoSink.onRendererStopped();
videoSink.onStopped();
} else {
videoFrameReleaseControl.onStopped();
}
@ -1642,7 +1652,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
.setWidth(width)
.setHeight(height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
.build(),
nextVideoSinkFirstFrameReleaseInstruction);
nextVideoSinkFirstFrameReleaseInstruction =
RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
} else {
videoFrameReleaseControl.setFrameRate(format.frameRate);
}
@ -1656,13 +1669,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
* <p>The default implementation applies this renderer's video effects.
*/
protected void changeVideoSinkInputStream(
VideoSink videoSink, @VideoSink.InputType int inputType, Format format) {
VideoSink videoSink,
@VideoSink.InputType int inputType,
Format format,
@VideoSink.FirstFrameReleaseInstruction int firstFrameReleaseInstruction) {
List<Effect> videoEffectsToApply = videoEffects != null ? videoEffects : ImmutableList.of();
videoSink.onInputStreamChanged(
inputType,
format,
getOutputStreamStartPositionUs(),
RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED,
firstFrameReleaseInstruction,
videoEffectsToApply);
}

View File

@ -22,7 +22,6 @@ import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static androidx.media3.common.util.Util.getMaxPendingFramesCountForMediaCodecDecoders;
import static androidx.media3.exoplayer.video.VideoSink.INPUT_TYPE_SURFACE;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.content.Context;
@ -281,12 +280,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final Context context;
/**
* A queue of unprocessed input frame start positions. Each position is associated with the
* timestamp from which it should be applied.
*/
private final TimedValueQueue<Long> streamStartPositionsUs;
private final VideoGraph.Factory videoGraphFactory;
private final SparseArray<InputVideoSink> inputVideoSinks;
private final List<Effect> compositionEffects;
@ -297,12 +290,18 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private final CopyOnWriteArraySet<PlaybackVideoGraphWrapper.Listener> listeners;
private final boolean requestOpenGlToneMapping;
/**
* A queue of unprocessed stream changes. Each stream change is associated with the timestamp from
* which it should be applied.
*/
private TimedValueQueue<StreamChangeInfo> pendingStreamChanges;
private Format videoGraphOutputFormat;
private @MonotonicNonNull HandlerWrapper handler;
private @MonotonicNonNull VideoGraph videoGraph;
private @MonotonicNonNull VideoFrameMetadataListener videoFrameMetadataListener;
private long outputStreamStartPositionUs;
private @VideoSink.FirstFrameReleaseInstruction int nextFirstOutputFrameReleaseInstruction;
private @VideoSink.FirstFrameReleaseInstruction int outputStreamFirstFrameReleaseInstruction;
@Nullable private Pair<Surface, Size> currentSurfaceAndSize;
private int pendingFlushCount;
private @State int state;
@ -331,7 +330,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
private PlaybackVideoGraphWrapper(Builder builder) {
context = builder.context;
streamStartPositionsUs = new TimedValueQueue<>();
pendingStreamChanges = new TimedValueQueue<>();
videoGraphFactory = checkStateNotNull(builder.videoGraphFactory);
inputVideoSinks = new SparseArray<>();
compositionEffects = builder.compositionEffects;
@ -432,13 +431,13 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// We forward output size changes to the sink even if we are still flushing.
videoGraphOutputFormat =
videoGraphOutputFormat.buildUpon().setWidth(width).setHeight(height).build();
onOutputStreamChanged(nextFirstOutputFrameReleaseInstruction);
onOutputStreamChanged();
}
@Override
public void onOutputFrameRateChanged(float frameRate) {
videoGraphOutputFormat = videoGraphOutputFormat.buildUpon().setFrameRate(frameRate).build();
onOutputStreamChanged(nextFirstOutputFrameReleaseInstruction);
onOutputStreamChanged();
}
@Override
@ -469,13 +468,11 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// The frame presentation time is relative to the start of the Composition and without the
// renderer offset
lastOutputBufferPresentationTimeUs = bufferPresentationTimeUs;
Long newOutputStreamStartPositionUs =
streamStartPositionsUs.pollFloor(bufferPresentationTimeUs);
if (newOutputStreamStartPositionUs != null
&& newOutputStreamStartPositionUs != outputStreamStartPositionUs) {
outputStreamStartPositionUs = newOutputStreamStartPositionUs;
onOutputStreamChanged(nextFirstOutputFrameReleaseInstruction);
nextFirstOutputFrameReleaseInstruction = RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
StreamChangeInfo streamChangeInfo = pendingStreamChanges.pollFloor(bufferPresentationTimeUs);
if (streamChangeInfo != null) {
outputStreamStartPositionUs = streamChangeInfo.startPositionUs;
outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction;
onOutputStreamChanged();
}
boolean isLastFrame =
finalBufferPresentationTimeUs != C.TIME_UNSET
@ -595,9 +592,8 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
}
private boolean isReady(boolean rendererOtherwiseReady) {
return defaultVideoSink.isReady(
/* rendererOtherwiseReady= */ rendererOtherwiseReady && pendingFlushCount == 0);
private boolean isReady(boolean otherwiseReady) {
return defaultVideoSink.isReady(/* otherwiseReady= */ otherwiseReady && pendingFlushCount == 0);
}
private boolean isEnded() {
@ -623,13 +619,15 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
pendingFlushCount++;
defaultVideoSink.flush(resetPosition);
while (streamStartPositionsUs.size() > 1) {
streamStartPositionsUs.pollFirst();
while (pendingStreamChanges.size() > 1) {
pendingStreamChanges.pollFirst();
}
if (streamStartPositionsUs.size() == 1) {
// Use the latest startPositionUs if none is passed after flushing.
outputStreamStartPositionUs = checkNotNull(streamStartPositionsUs.pollFirst());
onOutputStreamChanged(nextFirstOutputFrameReleaseInstruction);
if (pendingStreamChanges.size() == 1) {
// Use the latest stream change info if none is passed after flushing.
StreamChangeInfo streamChangeInfo = checkNotNull(pendingStreamChanges.pollFirst());
outputStreamStartPositionUs = streamChangeInfo.startPositionUs;
outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction;
onOutputStreamChanged();
}
lastOutputBufferPresentationTimeUs = C.TIME_UNSET;
finalBufferPresentationTimeUs = C.TIME_UNSET;
@ -667,13 +665,12 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
return inputColorInfo;
}
private void onOutputStreamChanged(
@VideoSink.FirstFrameReleaseInstruction int firstFrameReleaseInstruction) {
private void onOutputStreamChanged() {
defaultVideoSink.onInputStreamChanged(
INPUT_TYPE_SURFACE,
videoGraphOutputFormat,
outputStreamStartPositionUs,
firstFrameReleaseInstruction,
outputStreamFirstFrameReleaseInstruction,
/* videoEffects= */ ImmutableList.of());
}
@ -713,21 +710,13 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
@Override
public void onRendererEnabled(boolean mayRenderStartOfStream) {
nextFirstOutputFrameReleaseInstruction =
mayRenderStartOfStream
? RELEASE_FIRST_FRAME_IMMEDIATELY
: RELEASE_FIRST_FRAME_WHEN_STARTED;
public void onStarted() {
defaultVideoSink.onStarted();
}
@Override
public void onRendererStarted() {
defaultVideoSink.onRendererStarted();
}
@Override
public void onRendererStopped() {
defaultVideoSink.onRendererStopped();
public void onStopped() {
defaultVideoSink.onStopped();
}
@Override
@ -770,9 +759,9 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
@Override
public boolean isReady(boolean rendererOtherwiseReady) {
public boolean isReady(boolean otherwiseReady) {
return PlaybackVideoGraphWrapper.this.isReady(
/* rendererOtherwiseReady= */ rendererOtherwiseReady && isInitialized());
/* otherwiseReady= */ otherwiseReady && isInitialized());
}
@Override
@ -808,12 +797,8 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@FirstFrameReleaseInstruction int firstFrameReleaseInstruction,
List<Effect> videoEffects) {
checkState(isInitialized());
switch (inputType) {
case INPUT_TYPE_SURFACE:
case INPUT_TYPE_BITMAP:
break;
default:
throw new UnsupportedOperationException("Unsupported input type " + inputType);
if (inputType != INPUT_TYPE_SURFACE && inputType != INPUT_TYPE_BITMAP) {
throw new UnsupportedOperationException("Unsupported input type " + inputType);
}
setPendingVideoEffects(videoEffects);
this.inputType = inputType;
@ -822,11 +807,56 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
hasSignaledEndOfCurrentInputStream = false;
registerInputStream(format);
// Input timestamps should always be positive because they are offset by ExoPlayer. Adding a
// position to the queue with timestamp 0 should therefore always apply it as long as it is
// the only position in the queue.
streamStartPositionsUs.add(
lastBufferPresentationTimeUs == C.TIME_UNSET ? 0 : lastBufferPresentationTimeUs + 1,
startPositionUs);
// stream change info to the queue with timestamp 0 should therefore always apply it as long
// as it is the only one in the queue.
long fromTimestampUs =
lastBufferPresentationTimeUs == C.TIME_UNSET ? 0 : lastBufferPresentationTimeUs + 1;
pendingStreamChanges.add(
fromTimestampUs,
new StreamChangeInfo(startPositionUs, firstFrameReleaseInstruction, fromTimestampUs));
}
@Override
public void allowReleaseFirstFrameBeforeStarted() {
// We know that this sink is connected to renderers. Each renderer will first queue a stream
// change that has firstFrameReleaseInstruction set to either RELEASE_FIRST_FRAME_IMMEDIATELY
// or RELEASE_FIRST_FRAME_WHEN_STARTED, and then queue stream changes that have
// firstFrameReleaseInstruction set to RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED.
// When a renderer queues the first stream change, all previous streams should have been fully
// processed.
// We want to release the first frame immediately if the firstFrameReleaseInstruction of the
// first stream change queued by the current renderer was RELEASE_FIRST_FRAME_WHEN_STARTED and
// the first frame hasn't been released yet.
if (pendingStreamChanges.size() == 0) {
// All the stream changes have already been processed by the VideoGraph. Delegate to the
// downstream component.
defaultVideoSink.allowReleaseFirstFrameBeforeStarted();
return;
}
TimedValueQueue<StreamChangeInfo> newPendingStreamChanges = new TimedValueQueue<>();
boolean isFirstStreamChange = true;
while (pendingStreamChanges.size() > 0) {
StreamChangeInfo streamChangeInfo = checkNotNull(pendingStreamChanges.pollFirst());
if (isFirstStreamChange) {
if (streamChangeInfo.firstFrameReleaseInstruction == RELEASE_FIRST_FRAME_IMMEDIATELY
|| streamChangeInfo.firstFrameReleaseInstruction
== RELEASE_FIRST_FRAME_WHEN_STARTED) {
// The first stream change hasn't been processed by the VideoGraph yet.
streamChangeInfo =
new StreamChangeInfo(
streamChangeInfo.startPositionUs,
RELEASE_FIRST_FRAME_IMMEDIATELY,
streamChangeInfo.fromTimestampUs);
} else {
// The first stream change has already been processed by the VideoGraph. Delegate to the
// downstream component.
defaultVideoSink.allowReleaseFirstFrameBeforeStarted();
}
isFirstStreamChange = false;
}
newPendingStreamChanges.add(streamChangeInfo.fromTimestampUs, streamChangeInfo);
}
pendingStreamChanges = newPendingStreamChanges;
}
@Override
@ -883,13 +913,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
defaultVideoSink.setChangeFrameRateStrategy(changeFrameRateStrategy);
}
@Override
public void enableMayRenderStartOfStream() {
if (nextFirstOutputFrameReleaseInstruction == RELEASE_FIRST_FRAME_WHEN_STARTED) {
nextFirstOutputFrameReleaseInstruction = RELEASE_FIRST_FRAME_IMMEDIATELY;
}
}
@Override
public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) {
@ -1060,6 +1083,21 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
}
}
private static final class StreamChangeInfo {
public final long startPositionUs;
public final @VideoSink.FirstFrameReleaseInstruction int firstFrameReleaseInstruction;
public final long fromTimestampUs;
public StreamChangeInfo(
long startPositionUs,
@VideoSink.FirstFrameReleaseInstruction int firstFrameReleaseInstruction,
long fromTimestampUs) {
this.startPositionUs = startPositionUs;
this.firstFrameReleaseInstruction = firstFrameReleaseInstruction;
this.fromTimestampUs = fromTimestampUs;
}
}
/** Delays reflection for loading a {@link VideoGraph.Factory SingleInputVideoGraph} instance. */
private static final class ReflectiveSingleInputVideoGraphFactory implements VideoGraph.Factory {

View File

@ -36,12 +36,7 @@ import java.lang.annotation.Target;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A sink that consumes decoded video frames and images from video and image {@linkplain
* androidx.media3.exoplayer.Renderer renderers}.
*
* <p>Multiple renderers can feed the same sink, but not in parallel.
*/
/** A sink that consumes decoded video frames and images. */
@UnstableApi
public interface VideoSink {
@ -154,14 +149,11 @@ public interface VideoSink {
*/
int RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED = 2;
/** Called when the {@link Renderer} currently feeding this sink is enabled. */
void onRendererEnabled(boolean mayRenderStartOfStream);
/** Called when rendering starts. */
void onStarted();
/** Called when the {@link Renderer} currently feeding this sink is started. */
void onRendererStarted();
/** Called when the {@link Renderer} currently feeding this sink is stopped. */
void onRendererStopped();
/** Called when rendering stops. */
void onStopped();
/**
* Sets a {@link Listener} on this sink. Callbacks are triggered on the supplied {@link Executor}.
@ -199,12 +191,11 @@ public interface VideoSink {
* Returns whether the video sink is able to immediately render media to its output surface from
* the current position.
*
* <p>The renderer should be {@linkplain Renderer#isReady() ready} if and only if the video sink
* is ready.
* <p>The caller should be ready if and only if the video sink is ready.
*
* @param rendererOtherwiseReady Whether the renderer is ready except for the video sink.
* @param otherwiseReady Whether the caller is ready except for the video sink.
*/
boolean isReady(boolean rendererOtherwiseReady);
boolean isReady(boolean otherwiseReady);
/** Signals the end of the current input stream. */
void signalEndOfCurrentInputStream();
@ -261,15 +252,6 @@ public interface VideoSink {
*/
void setChangeFrameRateStrategy(@C.VideoChangeFrameRateStrategy int changeFrameRateStrategy);
/**
* Enables this video sink to render the start of the stream to its output surface even if the
* renderer is not {@linkplain #onRendererStarted() started} yet.
*
* <p>This is used to update the value of {@code mayRenderStartOfStream} passed to {@link
* #onRendererEnabled(boolean)}.
*/
void enableMayRenderStartOfStream();
/**
* Informs the video sink that a new input stream will be queued with the given effects.
*
@ -290,6 +272,15 @@ public interface VideoSink {
@FirstFrameReleaseInstruction int firstFrameReleaseInstruction,
List<Effect> videoEffects);
/**
* Allows the sink to release the first frame even if rendering is not {@linkplain #onStarted()
* started}.
*
* <p>This is used to update the {@link FirstFrameReleaseInstruction} of the {@linkplain
* #onInputStreamChanged(int, Format, long, int, List) stream} that is currently being processed.
*/
void allowReleaseFirstFrameBeforeStarted();
/**
* Handles a video input frame.
*

View File

@ -1524,6 +1524,91 @@ public class ExoPlayerWithPrewarmingRenderersTest {
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_ENABLED);
}
@Test
public void
play_recoverableErrorWithPrimaryRendererDuringPrewarming_doesNotResetSecondaryRenderer()
throws Exception {
Clock fakeClock = new FakeClock(/* isAutoAdvancing= */ true);
Player.Listener listener = mock(Player.Listener.class);
AtomicBoolean shouldPrimaryRendererThrowRecoverable = new AtomicBoolean(false);
ExoPlayer player =
new TestExoPlayerBuilder(context)
.setClock(fakeClock)
.setRenderersFactory(
new FakeRenderersFactorySupportingSecondaryVideoRenderer(fakeClock) {
@Override
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput) {
HandlerWrapper clockAwareHandler =
clock.createHandler(eventHandler.getLooper(), /* callback= */ null);
return new Renderer[] {
new FakeVideoRenderer(clockAwareHandler, videoRendererEventListener) {
@Override
public void render(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
if (!shouldPrimaryRendererThrowRecoverable.get()) {
super.render(positionUs, elapsedRealtimeUs);
} else {
shouldPrimaryRendererThrowRecoverable.set(false);
throw createRendererException(
new MediaCodecRenderer.DecoderInitializationException(
new Format.Builder().build(),
new IllegalArgumentException(),
false,
0),
this.getFormatHolder().format,
true,
PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
}
},
new FakeAudioRenderer(clockAwareHandler, audioRendererEventListener)
};
}
})
.build();
player.addListener(listener);
Renderer videoRenderer = player.getRenderer(/* index= */ 0);
Renderer secondaryVideoRenderer = player.getSecondaryRenderer(/* index= */ 0);
// Set a playlist that allows a new renderer to be enabled early.
player.setMediaSources(
ImmutableList.of(
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
new FakeBlockingMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT),
new FakeMediaSource(new FakeTimeline(), ExoPlayerTestRunner.VIDEO_FORMAT)));
player.prepare();
// Play a bit until the second renderer is pre-warming.
player.play();
advance(player)
.untilBackgroundThreadCondition(
() -> secondaryVideoRenderer.getState() == Renderer.STATE_ENABLED);
@Renderer.State int videoState1 = videoRenderer.getState();
@Renderer.State int secondaryVideoState1 = secondaryVideoRenderer.getState();
advance(player)
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_ENABLED);
@Renderer.State int videoState2 = videoRenderer.getState();
@Renderer.State int secondaryVideoState2 = secondaryVideoRenderer.getState();
shouldPrimaryRendererThrowRecoverable.set(true);
advance(player)
.untilBackgroundThreadCondition(() -> videoRenderer.getState() == Renderer.STATE_DISABLED);
@Renderer.State int videoState3 = videoRenderer.getState();
@Renderer.State int secondaryVideoState3 = secondaryVideoRenderer.getState();
player.release();
verify(listener).onPositionDiscontinuity(any(), any(), anyInt());
assertThat(videoState1).isEqualTo(Renderer.STATE_STARTED);
assertThat(secondaryVideoState1).isEqualTo(Renderer.STATE_ENABLED);
assertThat(videoState2).isEqualTo(Renderer.STATE_ENABLED);
assertThat(secondaryVideoState2).isEqualTo(Renderer.STATE_STARTED);
assertThat(videoState3).isEqualTo(Renderer.STATE_DISABLED);
assertThat(secondaryVideoState3).isEqualTo(Renderer.STATE_STARTED);
}
/** {@link FakeMediaSource} that prevents any reading of samples off the sample queue. */
private static final class FakeBlockingMediaSource extends FakeMediaSource {

View File

@ -145,6 +145,7 @@ public final class OggOpusPlaybackTest {
}
}
// TODO: b/406216855 - Remove this and use CapturingAudioSink instead.
private static final class DumpingAudioSink extends ForwardingAudioSink
implements Dumper.Dumpable {
/** All handleBuffer interactions recorded with this audio sink. */

View File

@ -40,6 +40,7 @@ import androidx.media3.exoplayer.source.MediaSourceFactory;
import androidx.media3.exoplayer.source.SinglePeriodTimeline;
import androidx.media3.exoplayer.upstream.Allocator;
import androidx.media3.exoplayer.upstream.LoadErrorHandlingPolicy;
import com.google.common.base.Ascii;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.IOException;
import javax.net.SocketFactory;
@ -182,13 +183,21 @@ public final class RtspMediaSource extends BaseMediaSource {
checkNotNull(mediaItem.localConfiguration);
return new RtspMediaSource(
mediaItem,
forceUseRtpTcp
shouldForceUseRtpTcp(mediaItem)
? new TransferRtpDataChannelFactory(timeoutMs)
: new UdpDataSourceRtpDataChannelFactory(timeoutMs),
userAgent,
socketFactory,
debugLoggingEnabled);
}
private boolean shouldForceUseRtpTcp(MediaItem mediaItem) {
if (forceUseRtpTcp) {
return true;
}
@Nullable String scheme = checkNotNull(mediaItem.localConfiguration).uri.getScheme();
return scheme != null && Ascii.equalsIgnoreCase("rtspt", scheme);
}
}
/** Thrown when an exception or error is encountered during loading an RTSP stream. */
@ -237,7 +246,7 @@ public final class RtspMediaSource extends BaseMediaSource {
this.mediaItem = mediaItem;
this.rtpDataChannelFactory = rtpDataChannelFactory;
this.userAgent = userAgent;
this.uri = checkNotNull(mediaItem.localConfiguration).uri;
this.uri = maybeConvertRtsptUriScheme(checkNotNull(mediaItem.localConfiguration).uri);
this.socketFactory = socketFactory;
this.debugLoggingEnabled = debugLoggingEnabled;
this.timelineDurationUs = C.TIME_UNSET;
@ -262,7 +271,8 @@ public final class RtspMediaSource extends BaseMediaSource {
@Override
public boolean canUpdateMediaItem(MediaItem mediaItem) {
@Nullable MediaItem.LocalConfiguration newConfiguration = mediaItem.localConfiguration;
return newConfiguration != null && newConfiguration.uri.equals(this.uri);
return newConfiguration != null
&& maybeConvertRtsptUriScheme(newConfiguration.uri).equals(this.uri);
}
@Override
@ -309,6 +319,14 @@ public final class RtspMediaSource extends BaseMediaSource {
// Internal methods.
private static Uri maybeConvertRtsptUriScheme(Uri uri) {
@Nullable String scheme = uri.getScheme();
if (scheme == null || !Ascii.equalsIgnoreCase("rtspt", scheme)) {
return uri;
}
return Uri.parse("rtsp" + uri.toString().substring(5));
}
private void notifySourceInfoRefreshed() {
Timeline timeline =
new SinglePeriodTimeline(

View File

@ -66,6 +66,18 @@ public class RtspMediaSourceTest {
assertThat(canUpdateMediaItem).isFalse();
}
@Test
public void canUpdateMediaItem_withChangeToRtspFromRtspt_returnsTrue() {
MediaItem initialMediaItem = new MediaItem.Builder().setUri("rtspt://test.test").build();
MediaItem updatedMediaItem =
TestUtil.buildFullyCustomizedMediaItem().buildUpon().setUri("rtsp://test.test").build();
MediaSource mediaSource = buildMediaSource(initialMediaItem);
boolean canUpdateMediaItem = mediaSource.canUpdateMediaItem(updatedMediaItem);
assertThat(canUpdateMediaItem).isTrue();
}
@Test
public void updateMediaItem_createsTimelineWithUpdatedItem() throws Exception {
MediaItem initialMediaItem =

View File

@ -80,12 +80,7 @@ public class CapturingAudioSink extends ForwardingAudioSink implements Dumper.Du
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels)
throws ConfigurationException {
this.format = inputFormat;
interceptedData.add(
new DumpableConfiguration(
inputFormat.pcmEncoding,
inputFormat.channelCount,
inputFormat.sampleRate,
outputChannels));
interceptedData.add(new DumpableConfiguration(inputFormat, outputChannels));
super.configure(inputFormat, specifiedBufferSize, outputChannels);
}
@ -158,29 +153,26 @@ public class CapturingAudioSink extends ForwardingAudioSink implements Dumper.Du
private static final class DumpableConfiguration implements Dumper.Dumpable {
private final @C.PcmEncoding int inputPcmEncoding;
private final int inputChannelCount;
private final int inputSampleRate;
private final Format inputFormat;
@Nullable private final int[] outputChannels;
public DumpableConfiguration(
@C.PcmEncoding int inputPcmEncoding,
int inputChannelCount,
int inputSampleRate,
@Nullable int[] outputChannels) {
this.inputPcmEncoding = inputPcmEncoding;
this.inputChannelCount = inputChannelCount;
this.inputSampleRate = inputSampleRate;
public DumpableConfiguration(Format inputFormat, @Nullable int[] outputChannels) {
this.inputFormat = inputFormat;
this.outputChannels = outputChannels;
}
@Override
public void dump(Dumper dumper) {
dumper.startBlock("config");
if (inputFormat.sampleMimeType != null
&& !inputFormat.sampleMimeType.equals(MimeTypes.AUDIO_RAW)) {
dumper.add("mimeType", inputFormat.sampleMimeType);
}
dumper
.startBlock("config")
.add("pcmEncoding", inputPcmEncoding)
.add("channelCount", inputChannelCount)
.add("sampleRate", inputSampleRate);
.addIfNonDefault("pcmEncoding", inputFormat.pcmEncoding, Format.NO_VALUE)
.addIfNonDefault("channelCount", inputFormat.channelCount, Format.NO_VALUE)
.addIfNonDefault("sampleRate", inputFormat.sampleRate, Format.NO_VALUE);
if (outputChannels != null) {
dumper.add("outputChannels", Arrays.toString(outputChannels));
}

View File

@ -82,18 +82,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public void onRendererEnabled(boolean mayRenderStartOfStream) {
executeOrDelay(videoSink -> videoSink.onRendererEnabled(mayRenderStartOfStream));
public void onStarted() {
executeOrDelay(VideoSink::onStarted);
}
@Override
public void onRendererStarted() {
executeOrDelay(VideoSink::onRendererStarted);
}
@Override
public void onRendererStopped() {
executeOrDelay(VideoSink::onRendererStopped);
public void onStopped() {
executeOrDelay(VideoSink::onStopped);
}
@Override
@ -138,10 +133,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* is {@code null}.
*/
@Override
public boolean isReady(boolean rendererOtherwiseReady) {
public boolean isReady(boolean otherwiseReady) {
// Return true if the VideoSink is null to indicate that the renderer can be started. Indeed,
// for prewarming, a VideoSink is set on the BufferingVideoSink when the renderer is started.
return videoSink == null || videoSink.isReady(rendererOtherwiseReady);
return videoSink == null || videoSink.isReady(otherwiseReady);
}
@Override
@ -213,11 +208,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
executeOrDelay(videoSink -> videoSink.setChangeFrameRateStrategy(changeFrameRateStrategy));
}
@Override
public void enableMayRenderStartOfStream() {
executeOrDelay(VideoSink::enableMayRenderStartOfStream);
}
@Override
public void onInputStreamChanged(
@InputType int inputType,
@ -231,6 +221,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
inputType, format, startPositionUs, firstFrameReleaseInstruction, videoEffects));
}
@Override
public void allowReleaseFirstFrameBeforeStarted() {
executeOrDelay(VideoSink::allowReleaseFirstFrameBeforeStarted);
}
/**
* {@inheritDoc}
*

View File

@ -24,7 +24,9 @@ import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.exoplayer.DefaultRenderersFactory.DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS;
import static androidx.media3.exoplayer.DefaultRenderersFactory.MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_IMMEDIATELY;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
import static androidx.media3.exoplayer.video.VideoSink.RELEASE_FIRST_FRAME_WHEN_STARTED;
import android.content.Context;
import android.graphics.Bitmap;
@ -435,12 +437,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
protected void changeVideoSinkInputStream(
VideoSink videoSink, @VideoSink.InputType int inputType, Format format) {
VideoSink videoSink,
@VideoSink.InputType int inputType,
Format format,
@VideoSink.FirstFrameReleaseInstruction int firstFrameReleaseInstruction) {
videoSink.onInputStreamChanged(
inputType,
format,
getOutputStreamStartPositionUs(),
RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED,
firstFrameReleaseInstruction,
pendingEffects);
}
@ -493,6 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private boolean inputStreamPending;
private long streamStartPositionUs;
private boolean mayRenderStartOfStream;
private @VideoSink.FirstFrameReleaseInstruction int nextFirstFrameReleaseInstruction;
private long offsetToCompositionTimeUs;
public SequenceImageRenderer(
@ -513,7 +519,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throws ExoPlaybackException {
super.onEnabled(joining, mayRenderStartOfStream);
this.mayRenderStartOfStream = mayRenderStartOfStream;
videoSink.onRendererEnabled(mayRenderStartOfStream);
nextFirstFrameReleaseInstruction =
mayRenderStartOfStream
? RELEASE_FIRST_FRAME_IMMEDIATELY
: RELEASE_FIRST_FRAME_WHEN_STARTED;
// TODO: b/328444280 - Do not set a listener on VideoSink, but MediaCodecVideoRenderer must
// unregister itself as a listener too.
videoSink.setListener(VideoSink.Listener.NO_OP, /* executor= */ (runnable) -> {});
@ -531,7 +540,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (mayRenderStartOfStream) {
// The image renderer is not playing after a video. We must wait until the first frame is
// rendered.
return videoSink.isReady(/* rendererOtherwiseReady= */ super.isReady());
return videoSink.isReady(/* otherwiseReady= */ super.isReady());
} else {
// The image renderer is playing after a video. We don't need to wait until the first frame
// is rendered.
@ -557,7 +566,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
protected void onStarted() throws ExoPlaybackException {
super.onStarted();
videoSink.onRendererStarted();
videoSink.onStarted();
}
@Override
@ -576,7 +585,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
protected void onStopped() {
super.onStopped();
videoSink.onRendererStopped();
videoSink.onStopped();
}
@Override
@ -630,8 +639,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
.setFrameRate(/* frameRate= */ DEFAULT_FRAME_RATE)
.build(),
streamStartPositionUs,
RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED,
nextFirstFrameReleaseInstruction,
videoEffects);
nextFirstFrameReleaseInstruction = RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED;
inputStreamPending = false;
}
if (!videoSink.handleInputBitmap(outputImage, checkStateNotNull(timestampIterator))) {

View File

@ -40,26 +40,25 @@ public class BufferingVideoSinkTest {
VideoSink videoSinkMock = mock(VideoSink.class);
bufferingVideoSink.setVideoSink(videoSinkMock);
bufferingVideoSink.onRendererEnabled(/* mayRenderStartOfStream= */ true);
bufferingVideoSink.onRendererStarted();
bufferingVideoSink.onStarted();
bufferingVideoSink.flush(/* resetPosition= */ true);
InOrder inOrder = Mockito.inOrder(videoSinkMock);
inOrder.verify(videoSinkMock).onRendererEnabled(/* mayRenderStartOfStream= */ true);
inOrder.verify(videoSinkMock).onRendererStarted();
inOrder.verify(videoSinkMock).onStarted();
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
}
@Test
public void setVideoSink_executesPendingOperations() {
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
VideoSink videoSinkMock = mock(VideoSink.class);
bufferingVideoSink.onRendererEnabled(/* mayRenderStartOfStream= */ true);
bufferingVideoSink.onRendererStarted();
bufferingVideoSink.onStarted();
bufferingVideoSink.flush(/* resetPosition= */ true);
bufferingVideoSink.setVideoSink(videoSinkMock);
InOrder inOrder = Mockito.inOrder(videoSinkMock);
inOrder.verify(videoSinkMock).onRendererEnabled(/* mayRenderStartOfStream= */ true);
inOrder.verify(videoSinkMock).onRendererStarted();
inOrder.verify(videoSinkMock).onStarted();
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
}
@Test
@ -69,11 +68,11 @@ public class BufferingVideoSinkTest {
bufferingVideoSink.setVideoSink(videoSinkMock);
bufferingVideoSink.setVideoSink(null);
bufferingVideoSink.onRendererEnabled(/* mayRenderStartOfStream= */ true);
bufferingVideoSink.onRendererStarted();
bufferingVideoSink.onStarted();
bufferingVideoSink.flush(/* resetPosition= */ true);
verify(videoSinkMock, never()).onRendererEnabled(/* mayRenderStartOfStream= */ true);
verify(videoSinkMock, never()).onRendererStarted();
verify(videoSinkMock, never()).onStarted();
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
}
@Test
@ -81,12 +80,12 @@ public class BufferingVideoSinkTest {
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
VideoSink videoSinkMock = mock(VideoSink.class);
bufferingVideoSink.onRendererEnabled(/* mayRenderStartOfStream= */ true);
bufferingVideoSink.onRendererStarted();
bufferingVideoSink.onStarted();
bufferingVideoSink.flush(/* resetPosition= */ true);
bufferingVideoSink.clearPendingOperations();
bufferingVideoSink.setVideoSink(videoSinkMock);
verify(videoSinkMock, never()).onRendererEnabled(/* mayRenderStartOfStream= */ true);
verify(videoSinkMock, never()).onRendererStarted();
verify(videoSinkMock, never()).onStarted();
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
}
}