mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Compare commits
8 Commits
96222478cd
...
9785c1fd92
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9785c1fd92 | ||
![]() |
feae2dadeb | ||
![]() |
1f3a5e7d1c | ||
![]() |
ee611a1ab8 | ||
![]() |
f755c7081f | ||
![]() |
036bed3632 | ||
![]() |
8327a2a52d | ||
![]() |
18e9a3fe36 |
@ -36,6 +36,9 @@
|
|||||||
* DataSource:
|
* DataSource:
|
||||||
* Audio:
|
* Audio:
|
||||||
* Allow constant power upmixing/downmixing in DefaultAudioMixer.
|
* Allow constant power upmixing/downmixing in DefaultAudioMixer.
|
||||||
|
* Fix offload issue where position might get stuck when playing a playlist
|
||||||
|
of short content
|
||||||
|
([#1920](https://github.com/androidx/media/issues/1920)).
|
||||||
* Video:
|
* Video:
|
||||||
* Add experimental `ExoPlayer` API to include the
|
* Add experimental `ExoPlayer` API to include the
|
||||||
`MediaCodec.BUFFER_FLAG_DECODE_ONLY` flag when queuing decode-only input
|
`MediaCodec.BUFFER_FLAG_DECODE_ONLY` flag when queuing decode-only input
|
||||||
|
@ -63,7 +63,7 @@ public class HttpEngineDataSourceContractTest extends DataSourceContractTest {
|
|||||||
new HttpEngine.Builder(ApplicationProvider.getApplicationContext()).build();
|
new HttpEngine.Builder(ApplicationProvider.getApplicationContext()).build();
|
||||||
return new HttpEngineDataSource.Factory(httpEngine, executorService)
|
return new HttpEngineDataSource.Factory(httpEngine, executorService)
|
||||||
// Ensure that 'resource not found' tests fail fast (b/403179253).
|
// Ensure that 'resource not found' tests fail fast (b/403179253).
|
||||||
.setConnectionTimeoutMs(400)
|
.setConnectionTimeoutMs(600)
|
||||||
.setReadTimeoutMs(400)
|
.setReadTimeoutMs(400)
|
||||||
.createDataSource();
|
.createDataSource();
|
||||||
}
|
}
|
||||||
|
@ -2820,6 +2820,10 @@ import java.util.Objects;
|
|||||||
private void maybeUpdateOffloadScheduling() {
|
private void maybeUpdateOffloadScheduling() {
|
||||||
// If playing period is audio-only with offload mode preference to enable, then offload
|
// If playing period is audio-only with offload mode preference to enable, then offload
|
||||||
// scheduling should be enabled.
|
// scheduling should be enabled.
|
||||||
|
if (queue.getPlayingPeriod() != queue.getReadingPeriod()) {
|
||||||
|
// Do not enable offload scheduling when starting to process the next media item.
|
||||||
|
return;
|
||||||
|
}
|
||||||
@Nullable MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
|
@Nullable MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
|
||||||
if (playingPeriodHolder != null) {
|
if (playingPeriodHolder != null) {
|
||||||
TrackSelectorResult trackSelectorResult = playingPeriodHolder.getTrackSelectorResult();
|
TrackSelectorResult trackSelectorResult = playingPeriodHolder.getTrackSelectorResult();
|
||||||
|
@ -102,6 +102,11 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
/** Returns a new {@link AudioTrack} for the given parameters. */
|
/** Returns a new {@link AudioTrack} for the given parameters. */
|
||||||
AudioTrack getAudioTrack(
|
AudioTrack getAudioTrack(
|
||||||
AudioTrackConfig audioTrackConfig, AudioAttributes audioAttributes, int audioSessionId);
|
AudioTrackConfig audioTrackConfig, AudioAttributes audioAttributes, int audioSessionId);
|
||||||
|
|
||||||
|
/** Returns the channel mask config for the given channel count. */
|
||||||
|
default int getAudioTrackChannelConfig(int channelCount) {
|
||||||
|
return Util.getAudioTrackChannelConfig(channelCount);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -731,7 +736,10 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
outputMode = OUTPUT_MODE_PCM;
|
outputMode = OUTPUT_MODE_PCM;
|
||||||
outputEncoding = outputFormat.encoding;
|
outputEncoding = outputFormat.encoding;
|
||||||
outputSampleRate = outputFormat.sampleRate;
|
outputSampleRate = outputFormat.sampleRate;
|
||||||
outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount);
|
|
||||||
|
outputChannelConfig =
|
||||||
|
audioTrackProvider.getAudioTrackChannelConfig(outputFormat.channelCount);
|
||||||
|
|
||||||
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
|
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
|
||||||
enableAudioTrackPlaybackParams = preferAudioTrackPlaybackParams;
|
enableAudioTrackPlaybackParams = preferAudioTrackPlaybackParams;
|
||||||
} else {
|
} else {
|
||||||
@ -748,7 +756,10 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
outputMode = OUTPUT_MODE_OFFLOAD;
|
outputMode = OUTPUT_MODE_OFFLOAD;
|
||||||
outputEncoding =
|
outputEncoding =
|
||||||
MimeTypes.getEncoding(checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs);
|
MimeTypes.getEncoding(checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs);
|
||||||
outputChannelConfig = Util.getAudioTrackChannelConfig(inputFormat.channelCount);
|
|
||||||
|
outputChannelConfig =
|
||||||
|
audioTrackProvider.getAudioTrackChannelConfig(inputFormat.channelCount);
|
||||||
|
|
||||||
// Offload requires AudioTrack playback parameters to apply speed changes quickly.
|
// Offload requires AudioTrack playback parameters to apply speed changes quickly.
|
||||||
enableAudioTrackPlaybackParams = true;
|
enableAudioTrackPlaybackParams = true;
|
||||||
enableOffloadGapless = audioOffloadSupport.isGaplessSupported;
|
enableOffloadGapless = audioOffloadSupport.isGaplessSupported;
|
||||||
|
@ -78,12 +78,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStarted() {
|
public void startRendering() {
|
||||||
videoFrameReleaseControl.onStarted();
|
videoFrameReleaseControl.onStarted();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStopped() {
|
public void stopRendering() {
|
||||||
videoFrameReleaseControl.onStopped();
|
videoFrameReleaseControl.onStopped();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,6 +153,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
|||||||
*/
|
*/
|
||||||
private static final long OFFSET_FROM_PERIOD_END_TO_TREAT_AS_LAST_US = 100_000L;
|
private static final long OFFSET_FROM_PERIOD_END_TO_TREAT_AS_LAST_US = 100_000L;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The offset from {@link #getLastResetPositionUs()} in microseconds, before which input buffers
|
||||||
|
* are not allowed to be dropped.
|
||||||
|
*
|
||||||
|
* <p>This value must be greater than the pre-roll distance used by common audio codecs, such as
|
||||||
|
* 80ms used by Opus <a
|
||||||
|
* href="https://opus-codec.org/docs/opus_in_isobmff.html#4.3.6.2">Encapsulation of Opus in ISO
|
||||||
|
* Base Media File Format</a>
|
||||||
|
*/
|
||||||
|
private static final long OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US = 200_000L;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The maximum number of consecutive dropped input buffers that allow discarding frame headers.
|
* The maximum number of consecutive dropped input buffers that allow discarding frame headers.
|
||||||
*
|
*
|
||||||
@ -644,7 +655,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
|||||||
boolean treatDroppedBuffersAsSkipped)
|
boolean treatDroppedBuffersAsSkipped)
|
||||||
throws ExoPlaybackException {
|
throws ExoPlaybackException {
|
||||||
if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) {
|
if (minEarlyUsToDropDecoderInput != C.TIME_UNSET) {
|
||||||
shouldDropDecoderInputBuffers = earlyUs < minEarlyUsToDropDecoderInput;
|
// TODO: b/161996553 - Remove the isAwayFromLastResetPosition check when audio pre-rolling
|
||||||
|
// is implemented correctly. Audio codecs such as Opus require pre-roll samples to be decoded
|
||||||
|
// and discarded on a seek. Depending on the audio decoder, the positionUs may jump forward
|
||||||
|
// by the pre-roll duration. Do not drop more frames than necessary when this happens.
|
||||||
|
boolean isAwayFromLastResetPosition =
|
||||||
|
positionUs
|
||||||
|
> getLastResetPositionUs()
|
||||||
|
+ OFFSET_FROM_RESET_POSITION_TO_ALLOW_INPUT_BUFFER_DROPPING_US;
|
||||||
|
shouldDropDecoderInputBuffers =
|
||||||
|
isAwayFromLastResetPosition && earlyUs < minEarlyUsToDropDecoderInput;
|
||||||
}
|
}
|
||||||
return shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastFrame)
|
return shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastFrame)
|
||||||
&& maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped);
|
&& maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped);
|
||||||
@ -1058,7 +1078,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
|||||||
totalVideoFrameProcessingOffsetUs = 0;
|
totalVideoFrameProcessingOffsetUs = 0;
|
||||||
videoFrameProcessingOffsetCount = 0;
|
videoFrameProcessingOffsetCount = 0;
|
||||||
if (videoSink != null) {
|
if (videoSink != null) {
|
||||||
videoSink.onStarted();
|
videoSink.startRendering();
|
||||||
} else {
|
} else {
|
||||||
videoFrameReleaseControl.onStarted();
|
videoFrameReleaseControl.onStarted();
|
||||||
}
|
}
|
||||||
@ -1069,7 +1089,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
|
|||||||
maybeNotifyDroppedFrames();
|
maybeNotifyDroppedFrames();
|
||||||
maybeNotifyVideoFrameProcessingOffset();
|
maybeNotifyVideoFrameProcessingOffset();
|
||||||
if (videoSink != null) {
|
if (videoSink != null) {
|
||||||
videoSink.onStopped();
|
videoSink.stopRendering();
|
||||||
} else {
|
} else {
|
||||||
videoFrameReleaseControl.onStopped();
|
videoFrameReleaseControl.onStopped();
|
||||||
}
|
}
|
||||||
|
@ -701,13 +701,13 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStarted() {
|
public void startRendering() {
|
||||||
defaultVideoSink.onStarted();
|
defaultVideoSink.startRendering();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStopped() {
|
public void stopRendering() {
|
||||||
defaultVideoSink.onStopped();
|
defaultVideoSink.stopRendering();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -139,11 +139,11 @@ public interface VideoSink {
|
|||||||
*/
|
*/
|
||||||
int RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED = 2;
|
int RELEASE_FIRST_FRAME_WHEN_PREVIOUS_STREAM_PROCESSED = 2;
|
||||||
|
|
||||||
/** Called when rendering starts. */
|
/** Starts rendering to the output surface. */
|
||||||
void onStarted();
|
void startRendering();
|
||||||
|
|
||||||
/** Called when rendering stops. */
|
/** Stops rendering to the output surface. */
|
||||||
void onStopped();
|
void stopRendering();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets a {@link Listener} on this sink. Callbacks are triggered on the supplied {@link Executor}.
|
* Sets a {@link Listener} on this sink. Callbacks are triggered on the supplied {@link Executor}.
|
||||||
@ -263,8 +263,8 @@ public interface VideoSink {
|
|||||||
List<Effect> videoEffects);
|
List<Effect> videoEffects);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Allows the sink to release the first frame even if rendering is not {@linkplain #onStarted()
|
* Allows the sink to release the first frame even if rendering is not {@linkplain
|
||||||
* started}.
|
* #startRendering() started}.
|
||||||
*
|
*
|
||||||
* <p>This is used to update the {@link FirstFrameReleaseInstruction} of the {@linkplain
|
* <p>This is used to update the {@link FirstFrameReleaseInstruction} of the {@linkplain
|
||||||
* #onInputStreamChanged(int, Format, long, int, List) stream} that is currently being processed.
|
* #onInputStreamChanged(int, Format, long, int, List) stream} that is currently being processed.
|
||||||
|
@ -11787,6 +11787,54 @@ public final class ExoPlayerTest {
|
|||||||
player.release();
|
player.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void enablingOffload_withFastReadingPeriodAdvancement_playerDoesNotSleep()
|
||||||
|
throws Exception {
|
||||||
|
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO);
|
||||||
|
AtomicInteger sleepingForOffloadCounter = new AtomicInteger();
|
||||||
|
ExoPlayer player =
|
||||||
|
parameterizeTestExoPlayerBuilder(
|
||||||
|
new TestExoPlayerBuilder(context).setRenderers(sleepRenderer))
|
||||||
|
.build();
|
||||||
|
ExoPlayer.AudioOffloadListener listener =
|
||||||
|
new ExoPlayer.AudioOffloadListener() {
|
||||||
|
@Override
|
||||||
|
public void onSleepingForOffloadChanged(boolean sleepingForOffload) {
|
||||||
|
if (sleepingForOffload) {
|
||||||
|
sleepingForOffloadCounter.getAndIncrement();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
player.addAudioOffloadListener(listener);
|
||||||
|
// Set a playlist of multiple, short audio-only items such that the reading period quickly
|
||||||
|
// advances past the playing period.
|
||||||
|
Timeline timeline = new FakeTimeline();
|
||||||
|
player.setMediaSources(
|
||||||
|
ImmutableList.of(
|
||||||
|
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
|
||||||
|
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT),
|
||||||
|
new FakeMediaSource(timeline, ExoPlayerTestRunner.AUDIO_FORMAT)));
|
||||||
|
player.setTrackSelectionParameters(
|
||||||
|
player
|
||||||
|
.getTrackSelectionParameters()
|
||||||
|
.buildUpon()
|
||||||
|
.setAudioOffloadPreferences(
|
||||||
|
new AudioOffloadPreferences.Builder()
|
||||||
|
.setAudioOffloadMode(AudioOffloadPreferences.AUDIO_OFFLOAD_MODE_REQUIRED)
|
||||||
|
.build())
|
||||||
|
.build());
|
||||||
|
player.prepare();
|
||||||
|
player.play();
|
||||||
|
advance(player).untilStartOfMediaItem(/* mediaItemIndex= */ 1);
|
||||||
|
|
||||||
|
sleepRenderer.sleepOnNextRender();
|
||||||
|
runUntilPlaybackState(player, Player.STATE_ENDED);
|
||||||
|
|
||||||
|
assertThat(sleepingForOffloadCounter.get()).isEqualTo(0);
|
||||||
|
|
||||||
|
player.release();
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void wakeupListenerWhileSleepingForOffload_isWokenUp_renderingResumes() throws Exception {
|
public void wakeupListenerWhileSleepingForOffload_isWokenUp_renderingResumes() throws Exception {
|
||||||
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO).sleepOnNextRender();
|
FakeSleepRenderer sleepRenderer = new FakeSleepRenderer(C.TRACK_TYPE_AUDIO).sleepOnNextRender();
|
||||||
|
@ -29,6 +29,7 @@ import androidx.media3.exoplayer.DecoderCounters;
|
|||||||
import androidx.media3.exoplayer.DefaultRenderersFactory;
|
import androidx.media3.exoplayer.DefaultRenderersFactory;
|
||||||
import androidx.media3.exoplayer.ExoPlayer;
|
import androidx.media3.exoplayer.ExoPlayer;
|
||||||
import androidx.media3.exoplayer.Renderer;
|
import androidx.media3.exoplayer.Renderer;
|
||||||
|
import androidx.media3.exoplayer.analytics.AnalyticsListener;
|
||||||
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
|
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
|
||||||
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
|
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
|
||||||
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
|
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
|
||||||
@ -103,6 +104,14 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
|||||||
new ExoPlayer.Builder(applicationContext, renderersFactory)
|
new ExoPlayer.Builder(applicationContext, renderersFactory)
|
||||||
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
|
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
|
||||||
.build();
|
.build();
|
||||||
|
player.addAnalyticsListener(
|
||||||
|
new AnalyticsListener() {
|
||||||
|
@Override
|
||||||
|
public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {
|
||||||
|
// Input buffers near the reset position should not be dropped.
|
||||||
|
assertThat(eventTime.currentPlaybackPositionMs).isAtLeast(200);
|
||||||
|
}
|
||||||
|
});
|
||||||
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
|
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 1));
|
||||||
player.setVideoSurface(surface);
|
player.setVideoSurface(surface);
|
||||||
player.setMediaItem(MediaItem.fromUri(TEST_MP4_URI));
|
player.setMediaItem(MediaItem.fromUri(TEST_MP4_URI));
|
||||||
@ -121,7 +130,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
|||||||
// Which input buffer is dropped first depends on the number of MediaCodec buffer slots.
|
// Which input buffer is dropped first depends on the number of MediaCodec buffer slots.
|
||||||
// This means the asserts cannot be isEqualTo.
|
// This means the asserts cannot be isEqualTo.
|
||||||
assertThat(decoderCounters.maxConsecutiveDroppedBufferCount).isAtMost(2);
|
assertThat(decoderCounters.maxConsecutiveDroppedBufferCount).isAtMost(2);
|
||||||
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(8);
|
assertThat(decoderCounters.droppedInputBufferCount).isAtLeast(4);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class CapturingRenderersFactoryWithLateThresholdToDropDecoderInputUs
|
private static final class CapturingRenderersFactoryWithLateThresholdToDropDecoderInputUs
|
||||||
@ -155,7 +164,6 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
|||||||
/* enableDecoderFallback= */ false,
|
/* enableDecoderFallback= */ false,
|
||||||
eventHandler,
|
eventHandler,
|
||||||
videoRendererEventListener,
|
videoRendererEventListener,
|
||||||
DefaultRenderersFactory.MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY,
|
|
||||||
/* parseAv1SampleDependencies= */ true,
|
/* parseAv1SampleDependencies= */ true,
|
||||||
/* lateThresholdToDropDecoderInputUs= */ -100_000_000L)
|
/* lateThresholdToDropDecoderInputUs= */ -100_000_000L)
|
||||||
};
|
};
|
||||||
@ -173,7 +181,6 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
|||||||
boolean enableDecoderFallback,
|
boolean enableDecoderFallback,
|
||||||
@Nullable Handler eventHandler,
|
@Nullable Handler eventHandler,
|
||||||
@Nullable VideoRendererEventListener eventListener,
|
@Nullable VideoRendererEventListener eventListener,
|
||||||
int maxDroppedFramesToNotify,
|
|
||||||
boolean parseAv1SampleDependencies,
|
boolean parseAv1SampleDependencies,
|
||||||
long lateThresholdToDropDecoderInputUs) {
|
long lateThresholdToDropDecoderInputUs) {
|
||||||
super(
|
super(
|
||||||
@ -184,7 +191,7 @@ public class ParseAv1SampleDependenciesPlaybackTest {
|
|||||||
.setEnableDecoderFallback(enableDecoderFallback)
|
.setEnableDecoderFallback(enableDecoderFallback)
|
||||||
.setEventHandler(eventHandler)
|
.setEventHandler(eventHandler)
|
||||||
.setEventListener(eventListener)
|
.setEventListener(eventListener)
|
||||||
.setMaxDroppedFramesToNotify(maxDroppedFramesToNotify)
|
.setMaxDroppedFramesToNotify(1)
|
||||||
.experimentalSetParseAv1SampleDependencies(parseAv1SampleDependencies)
|
.experimentalSetParseAv1SampleDependencies(parseAv1SampleDependencies)
|
||||||
.experimentalSetLateThresholdToDropDecoderInputUs(
|
.experimentalSetLateThresholdToDropDecoderInputUs(
|
||||||
lateThresholdToDropDecoderInputUs));
|
lateThresholdToDropDecoderInputUs));
|
||||||
|
@ -81,13 +81,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStarted() {
|
public void startRendering() {
|
||||||
executeOrDelay(VideoSink::onStarted);
|
executeOrDelay(VideoSink::startRendering);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onStopped() {
|
public void stopRendering() {
|
||||||
executeOrDelay(VideoSink::onStopped);
|
executeOrDelay(VideoSink::stopRendering);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -77,6 +77,7 @@ import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
|
|||||||
import androidx.media3.exoplayer.upstream.Allocator;
|
import androidx.media3.exoplayer.upstream.Allocator;
|
||||||
import androidx.media3.exoplayer.util.EventLogger;
|
import androidx.media3.exoplayer.util.EventLogger;
|
||||||
import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
||||||
|
import androidx.media3.exoplayer.video.VideoFrameMetadataListener;
|
||||||
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
|
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
@ -320,6 +321,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
private @MonotonicNonNull Composition composition;
|
private @MonotonicNonNull Composition composition;
|
||||||
private @MonotonicNonNull Size videoOutputSize;
|
private @MonotonicNonNull Size videoOutputSize;
|
||||||
private @MonotonicNonNull PlaybackVideoGraphWrapper playbackVideoGraphWrapper;
|
private @MonotonicNonNull PlaybackVideoGraphWrapper playbackVideoGraphWrapper;
|
||||||
|
private @MonotonicNonNull VideoFrameMetadataListener pendingVideoFrameMetadatListener;
|
||||||
|
|
||||||
private long compositionDurationUs;
|
private long compositionDurationUs;
|
||||||
private boolean playWhenReady;
|
private boolean playWhenReady;
|
||||||
@ -336,7 +338,6 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
private LivePositionSupplier positionSupplier;
|
private LivePositionSupplier positionSupplier;
|
||||||
private LivePositionSupplier bufferedPositionSupplier;
|
private LivePositionSupplier bufferedPositionSupplier;
|
||||||
private LivePositionSupplier totalBufferedDurationSupplier;
|
private LivePositionSupplier totalBufferedDurationSupplier;
|
||||||
private boolean isSeeking;
|
|
||||||
|
|
||||||
// "this" reference for position suppliers.
|
// "this" reference for position suppliers.
|
||||||
@SuppressWarnings("initialization:methodref.receiver.bound.invalid")
|
@SuppressWarnings("initialization:methodref.receiver.bound.invalid")
|
||||||
@ -506,9 +507,9 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
playWhenReadyChangeReason = PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST;
|
playWhenReadyChangeReason = PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST;
|
||||||
if (playbackState == STATE_READY) {
|
if (playbackState == STATE_READY) {
|
||||||
if (playWhenReady) {
|
if (playWhenReady) {
|
||||||
finalAudioSink.play();
|
checkStateNotNull(compositionPlayerInternal).startRendering();
|
||||||
} else {
|
} else {
|
||||||
finalAudioSink.pause();
|
checkStateNotNull(compositionPlayerInternal).stopRendering();
|
||||||
}
|
}
|
||||||
for (int i = 0; i < players.size(); i++) {
|
for (int i = 0; i < players.size(); i++) {
|
||||||
players.get(i).setPlayWhenReady(playWhenReady);
|
players.get(i).setPlayWhenReady(playWhenReady);
|
||||||
@ -587,7 +588,9 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
@Override
|
@Override
|
||||||
protected ListenableFuture<?> handleSetVolume(float volume) {
|
protected ListenableFuture<?> handleSetVolume(float volume) {
|
||||||
this.volume = Util.constrainValue(volume, /* min= */ 0.0f, /* max= */ 1.0f);
|
this.volume = Util.constrainValue(volume, /* min= */ 0.0f, /* max= */ 1.0f);
|
||||||
finalAudioSink.setVolume(this.volume);
|
if (compositionPlayerInternal != null) {
|
||||||
|
compositionPlayerInternal.setVolume(this.volume);
|
||||||
|
}
|
||||||
return Futures.immediateVoidFuture();
|
return Futures.immediateVoidFuture();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -597,7 +600,6 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
resetLivePositionSuppliers();
|
resetLivePositionSuppliers();
|
||||||
CompositionPlayerInternal compositionPlayerInternal =
|
CompositionPlayerInternal compositionPlayerInternal =
|
||||||
checkStateNotNull(this.compositionPlayerInternal);
|
checkStateNotNull(this.compositionPlayerInternal);
|
||||||
isSeeking = true;
|
|
||||||
compositionPlayerInternal.startSeek(positionMs);
|
compositionPlayerInternal.startSeek(positionMs);
|
||||||
for (int i = 0; i < players.size(); i++) {
|
for (int i = 0; i < players.size(); i++) {
|
||||||
players.get(i).seekTo(positionMs);
|
players.get(i).seekTo(positionMs);
|
||||||
@ -606,6 +608,15 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
return Futures.immediateVoidFuture();
|
return Futures.immediateVoidFuture();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Sets the {@link VideoFrameMetadataListener}. */
|
||||||
|
public void setVideoFrameMetadataListener(VideoFrameMetadataListener videoFrameMetadataListener) {
|
||||||
|
if (players.isEmpty()) {
|
||||||
|
pendingVideoFrameMetadatListener = videoFrameMetadataListener;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
players.get(0).setVideoFrameMetadataListener(videoFrameMetadataListener);
|
||||||
|
}
|
||||||
|
|
||||||
// CompositionPlayerInternal.Listener methods
|
// CompositionPlayerInternal.Listener methods
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -683,22 +694,17 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
for (int i = 0; i < players.size(); i++) {
|
for (int i = 0; i < players.size(); i++) {
|
||||||
players.get(i).setPlayWhenReady(false);
|
players.get(i).setPlayWhenReady(false);
|
||||||
}
|
}
|
||||||
if (!isSeeking) {
|
checkStateNotNull(compositionPlayerInternal).stopRendering();
|
||||||
// The finalAudioSink cannot be paused more than once. The audio pipeline pauses it during
|
|
||||||
// a seek, so don't pause here when seeking.
|
|
||||||
finalAudioSink.pause();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else if (endedCount == players.size()) {
|
} else if (endedCount == players.size()) {
|
||||||
playbackState = STATE_ENDED;
|
playbackState = STATE_ENDED;
|
||||||
} else {
|
} else {
|
||||||
playbackState = STATE_READY;
|
playbackState = STATE_READY;
|
||||||
isSeeking = false;
|
|
||||||
if (oldPlaybackState != STATE_READY && playWhenReady) {
|
if (oldPlaybackState != STATE_READY && playWhenReady) {
|
||||||
for (int i = 0; i < players.size(); i++) {
|
for (int i = 0; i < players.size(); i++) {
|
||||||
players.get(i).setPlayWhenReady(true);
|
players.get(i).setPlayWhenReady(true);
|
||||||
}
|
}
|
||||||
finalAudioSink.play();
|
checkStateNotNull(compositionPlayerInternal).startRendering();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -775,6 +781,9 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
|
|
||||||
if (i == 0) {
|
if (i == 0) {
|
||||||
setPrimaryPlayerSequence(player, editedMediaItemSequence);
|
setPrimaryPlayerSequence(player, editedMediaItemSequence);
|
||||||
|
if (pendingVideoFrameMetadatListener != null) {
|
||||||
|
player.setVideoFrameMetadataListener(pendingVideoFrameMetadatListener);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
setSecondaryPlayerSequence(player, editedMediaItemSequence, primarySequenceDurationUs);
|
setSecondaryPlayerSequence(player, editedMediaItemSequence, primarySequenceDurationUs);
|
||||||
}
|
}
|
||||||
@ -799,6 +808,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
|
|||||||
playbackVideoGraphWrapper,
|
playbackVideoGraphWrapper,
|
||||||
/* listener= */ this,
|
/* listener= */ this,
|
||||||
compositionInternalListenerHandler);
|
compositionInternalListenerHandler);
|
||||||
|
compositionPlayerInternal.setVolume(volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setPrimaryPlayerSequence(ExoPlayer player, EditedMediaItemSequence sequence) {
|
private void setPrimaryPlayerSequence(ExoPlayer player, EditedMediaItemSequence sequence) {
|
||||||
|
@ -47,11 +47,14 @@ import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static final String TAG = "CompPlayerInternal";
|
private static final String TAG = "CompPlayerInternal";
|
||||||
private static final int MSG_SET_OUTPUT_SURFACE_INFO = 1;
|
private static final int MSG_START_RENDERING = 1;
|
||||||
private static final int MSG_CLEAR_OUTPUT_SURFACE = 2;
|
private static final int MSG_STOP_RENDERING = 2;
|
||||||
private static final int MSG_START_SEEK = 3;
|
private static final int MSG_SET_VOLUME = 3;
|
||||||
private static final int MSG_END_SEEK = 4;
|
private static final int MSG_SET_OUTPUT_SURFACE_INFO = 4;
|
||||||
private static final int MSG_RELEASE = 5;
|
private static final int MSG_CLEAR_OUTPUT_SURFACE = 5;
|
||||||
|
private static final int MSG_START_SEEK = 6;
|
||||||
|
private static final int MSG_END_SEEK = 7;
|
||||||
|
private static final int MSG_RELEASE = 8;
|
||||||
|
|
||||||
private final Clock clock;
|
private final Clock clock;
|
||||||
private final HandlerWrapper handler;
|
private final HandlerWrapper handler;
|
||||||
@ -94,6 +97,18 @@ import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
|||||||
|
|
||||||
// Public methods
|
// Public methods
|
||||||
|
|
||||||
|
public void startRendering() {
|
||||||
|
handler.sendEmptyMessage(MSG_START_RENDERING);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stopRendering() {
|
||||||
|
handler.sendEmptyMessage(MSG_STOP_RENDERING);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVolume(float volume) {
|
||||||
|
handler.obtainMessage(MSG_SET_VOLUME, volume).sendToTarget();
|
||||||
|
}
|
||||||
|
|
||||||
/** Sets the output surface information on the video pipeline. */
|
/** Sets the output surface information on the video pipeline. */
|
||||||
public void setOutputSurfaceInfo(Surface surface, Size size) {
|
public void setOutputSurfaceInfo(Surface surface, Size size) {
|
||||||
handler
|
handler
|
||||||
@ -103,7 +118,7 @@ import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
|||||||
|
|
||||||
/** Clears the output surface from the video pipeline. */
|
/** Clears the output surface from the video pipeline. */
|
||||||
public void clearOutputSurface() {
|
public void clearOutputSurface() {
|
||||||
handler.obtainMessage(MSG_CLEAR_OUTPUT_SURFACE).sendToTarget();
|
handler.sendEmptyMessage(MSG_CLEAR_OUTPUT_SURFACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void startSeek(long positionMs) {
|
public void startSeek(long positionMs) {
|
||||||
@ -111,7 +126,7 @@ import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void endSeek() {
|
public void endSeek() {
|
||||||
handler.obtainMessage(MSG_END_SEEK).sendToTarget();
|
handler.sendEmptyMessage(MSG_END_SEEK);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -139,6 +154,15 @@ import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
|
|||||||
public boolean handleMessage(Message message) {
|
public boolean handleMessage(Message message) {
|
||||||
try {
|
try {
|
||||||
switch (message.what) {
|
switch (message.what) {
|
||||||
|
case MSG_START_RENDERING:
|
||||||
|
playbackAudioGraphWrapper.startRendering();
|
||||||
|
break;
|
||||||
|
case MSG_STOP_RENDERING:
|
||||||
|
playbackAudioGraphWrapper.stopRendering();
|
||||||
|
break;
|
||||||
|
case MSG_SET_VOLUME:
|
||||||
|
playbackAudioGraphWrapper.setVolume(/* volume= */ (float) message.obj);
|
||||||
|
break;
|
||||||
case MSG_SET_OUTPUT_SURFACE_INFO:
|
case MSG_SET_OUTPUT_SURFACE_INFO:
|
||||||
setOutputSurfaceInfoOnInternalThread(
|
setOutputSurfaceInfoOnInternalThread(
|
||||||
/* outputSurfaceInfo= */ (OutputSurfaceInfo) message.obj);
|
/* outputSurfaceInfo= */ (OutputSurfaceInfo) message.obj);
|
||||||
|
@ -47,6 +47,7 @@ import java.util.Objects;
|
|||||||
private AudioFormat outputAudioFormat;
|
private AudioFormat outputAudioFormat;
|
||||||
private long outputFramesWritten;
|
private long outputFramesWritten;
|
||||||
private long seekPositionUs;
|
private long seekPositionUs;
|
||||||
|
private boolean isRenderingStarted;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an instance.
|
* Creates an instance.
|
||||||
@ -135,6 +136,24 @@ import java.util.Objects;
|
|||||||
+ sampleCountToDurationUs(outputFramesWritten, outputAudioFormat.sampleRate);
|
+ sampleCountToDurationUs(outputFramesWritten, outputAudioFormat.sampleRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void startRendering() {
|
||||||
|
finalAudioSink.play();
|
||||||
|
isRenderingStarted = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stopRendering() {
|
||||||
|
if (!isRenderingStarted) {
|
||||||
|
// The finalAudioSink cannot be paused more than once.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
finalAudioSink.pause();
|
||||||
|
isRenderingStarted = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVolume(float volume) {
|
||||||
|
finalAudioSink.setVolume(volume);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles the steps that need to be executed for a seek before seeking the upstream players.
|
* Handles the steps that need to be executed for a seek before seeking the upstream players.
|
||||||
*
|
*
|
||||||
@ -144,7 +163,7 @@ import java.util.Objects;
|
|||||||
if (positionUs == C.TIME_UNSET) {
|
if (positionUs == C.TIME_UNSET) {
|
||||||
positionUs = 0;
|
positionUs = 0;
|
||||||
}
|
}
|
||||||
finalAudioSink.pause();
|
stopRendering();
|
||||||
audioGraph.blockInput();
|
audioGraph.blockInput();
|
||||||
audioGraph.setPendingStartTimeUs(positionUs);
|
audioGraph.setPendingStartTimeUs(positionUs);
|
||||||
audioGraph.flush();
|
audioGraph.flush();
|
||||||
|
@ -576,7 +576,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
@Override
|
@Override
|
||||||
protected void onStarted() throws ExoPlaybackException {
|
protected void onStarted() throws ExoPlaybackException {
|
||||||
super.onStarted();
|
super.onStarted();
|
||||||
videoSink.onStarted();
|
videoSink.startRendering();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -595,7 +595,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
@Override
|
@Override
|
||||||
protected void onStopped() {
|
protected void onStopped() {
|
||||||
super.onStopped();
|
super.onStopped();
|
||||||
videoSink.onStopped();
|
videoSink.stopRendering();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -756,8 +756,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private void createEncodedSampleExporter(@C.TrackType int trackType) {
|
private void createEncodedSampleExporter(@C.TrackType int trackType) {
|
||||||
checkState(assetLoaderInputTracker.getSampleExporter(trackType) == null);
|
checkState(assetLoaderInputTracker.getSampleExporter(trackType) == null);
|
||||||
checkArgument(
|
checkArgument(
|
||||||
trackType != TRACK_TYPE_AUDIO || !composition.sequences.get(sequenceIndex).hasGaps(),
|
!composition.sequences.get(sequenceIndex).hasGaps(), "Gaps can not be transmuxed.");
|
||||||
"Gaps can not be transmuxed.");
|
|
||||||
assetLoaderInputTracker.registerSampleExporter(
|
assetLoaderInputTracker.registerSampleExporter(
|
||||||
trackType,
|
trackType,
|
||||||
new EncodedSampleExporter(
|
new EncodedSampleExporter(
|
||||||
|
@ -18,7 +18,6 @@ package androidx.media3.transformer;
|
|||||||
|
|
||||||
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
|
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
|
||||||
import static androidx.media3.common.ColorInfo.isTransferHdr;
|
import static androidx.media3.common.ColorInfo.isTransferHdr;
|
||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
|
||||||
import static androidx.media3.exoplayer.mediacodec.MediaCodecUtil.getAlternativeCodecMimeType;
|
import static androidx.media3.exoplayer.mediacodec.MediaCodecUtil.getAlternativeCodecMimeType;
|
||||||
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
|
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
|
||||||
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
|
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
|
||||||
@ -91,8 +90,6 @@ public final class TransformerUtil {
|
|||||||
MuxerWrapper muxerWrapper) {
|
MuxerWrapper muxerWrapper) {
|
||||||
if (composition.sequences.size() > 1
|
if (composition.sequences.size() > 1
|
||||||
|| composition.sequences.get(sequenceIndex).editedMediaItems.size() > 1) {
|
|| composition.sequences.get(sequenceIndex).editedMediaItems.size() > 1) {
|
||||||
checkArgument(
|
|
||||||
!composition.hasGaps() || !composition.transmuxAudio, "Gaps can not be transmuxed.");
|
|
||||||
return !composition.transmuxAudio;
|
return !composition.transmuxAudio;
|
||||||
}
|
}
|
||||||
if (composition.hasGaps()) {
|
if (composition.hasGaps()) {
|
||||||
|
@ -40,11 +40,11 @@ public class BufferingVideoSinkTest {
|
|||||||
VideoSink videoSinkMock = mock(VideoSink.class);
|
VideoSink videoSinkMock = mock(VideoSink.class);
|
||||||
|
|
||||||
bufferingVideoSink.setVideoSink(videoSinkMock);
|
bufferingVideoSink.setVideoSink(videoSinkMock);
|
||||||
bufferingVideoSink.onStarted();
|
bufferingVideoSink.startRendering();
|
||||||
bufferingVideoSink.flush(/* resetPosition= */ true);
|
bufferingVideoSink.flush(/* resetPosition= */ true);
|
||||||
|
|
||||||
InOrder inOrder = Mockito.inOrder(videoSinkMock);
|
InOrder inOrder = Mockito.inOrder(videoSinkMock);
|
||||||
inOrder.verify(videoSinkMock).onStarted();
|
inOrder.verify(videoSinkMock).startRendering();
|
||||||
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
|
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,12 +52,12 @@ public class BufferingVideoSinkTest {
|
|||||||
public void setVideoSink_executesPendingOperations() {
|
public void setVideoSink_executesPendingOperations() {
|
||||||
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
|
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
|
||||||
VideoSink videoSinkMock = mock(VideoSink.class);
|
VideoSink videoSinkMock = mock(VideoSink.class);
|
||||||
bufferingVideoSink.onStarted();
|
bufferingVideoSink.startRendering();
|
||||||
bufferingVideoSink.flush(/* resetPosition= */ true);
|
bufferingVideoSink.flush(/* resetPosition= */ true);
|
||||||
bufferingVideoSink.setVideoSink(videoSinkMock);
|
bufferingVideoSink.setVideoSink(videoSinkMock);
|
||||||
|
|
||||||
InOrder inOrder = Mockito.inOrder(videoSinkMock);
|
InOrder inOrder = Mockito.inOrder(videoSinkMock);
|
||||||
inOrder.verify(videoSinkMock).onStarted();
|
inOrder.verify(videoSinkMock).startRendering();
|
||||||
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
|
inOrder.verify(videoSinkMock).flush(/* resetPosition= */ true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,10 +68,10 @@ public class BufferingVideoSinkTest {
|
|||||||
bufferingVideoSink.setVideoSink(videoSinkMock);
|
bufferingVideoSink.setVideoSink(videoSinkMock);
|
||||||
|
|
||||||
bufferingVideoSink.setVideoSink(null);
|
bufferingVideoSink.setVideoSink(null);
|
||||||
bufferingVideoSink.onStarted();
|
bufferingVideoSink.startRendering();
|
||||||
bufferingVideoSink.flush(/* resetPosition= */ true);
|
bufferingVideoSink.flush(/* resetPosition= */ true);
|
||||||
|
|
||||||
verify(videoSinkMock, never()).onStarted();
|
verify(videoSinkMock, never()).startRendering();
|
||||||
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
|
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,12 +80,12 @@ public class BufferingVideoSinkTest {
|
|||||||
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
|
BufferingVideoSink bufferingVideoSink = new BufferingVideoSink(context);
|
||||||
VideoSink videoSinkMock = mock(VideoSink.class);
|
VideoSink videoSinkMock = mock(VideoSink.class);
|
||||||
|
|
||||||
bufferingVideoSink.onStarted();
|
bufferingVideoSink.startRendering();
|
||||||
bufferingVideoSink.flush(/* resetPosition= */ true);
|
bufferingVideoSink.flush(/* resetPosition= */ true);
|
||||||
bufferingVideoSink.clearPendingOperations();
|
bufferingVideoSink.clearPendingOperations();
|
||||||
bufferingVideoSink.setVideoSink(videoSinkMock);
|
bufferingVideoSink.setVideoSink(videoSinkMock);
|
||||||
|
|
||||||
verify(videoSinkMock, never()).onStarted();
|
verify(videoSinkMock, never()).startRendering();
|
||||||
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
|
verify(videoSinkMock, never()).flush(/* resetPosition= */ true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user