Handle buffers in DefaultAudioSink with AudioProcessingPipeline.
PiperOrigin-RevId: 488412695
This commit is contained in:
parent
84a378415f
commit
59aedcf309
@ -311,7 +311,7 @@ public class CastTimelineTrackerTest {
|
||||
}
|
||||
|
||||
private static MediaInfo getMediaInfo(long durationMs) {
|
||||
return new MediaInfo.Builder(/*contentId= */ "")
|
||||
return new MediaInfo.Builder(/* contentId= */ "")
|
||||
.setStreamDuration(durationMs)
|
||||
.setContentType(MimeTypes.APPLICATION_MP4)
|
||||
.setStreamType(MediaInfo.STREAM_TYPE_NONE)
|
||||
|
@ -0,0 +1,333 @@
|
||||
/*
|
||||
* Copyright 2022 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.audio.AudioProcessor.EMPTY_BUFFER;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Handles passing buffers through multiple {@link AudioProcessor} instances.
|
||||
*
|
||||
* <p>Two instances of {@link AudioProcessingPipeline} are considered {@linkplain #equals(Object)
|
||||
* equal} if they have the same underlying {@link AudioProcessor} references, in the same order.
|
||||
*
|
||||
* <p>To make use of this class, the caller must:
|
||||
*
|
||||
* <ul>
|
||||
* <li>Initialize an instance, passing in all audio processors that may be used for processing.
|
||||
* <li>Call {@link #configure(AudioFormat)} with the {@link AudioFormat} of the input data. This
|
||||
* method will give back the {@link AudioFormat} that will be output from the pipeline when
|
||||
* this configuration is in use.
|
||||
* <li>Call {@link #flush()} to apply the pending configuration.
|
||||
* <li>Check if the pipeline {@link #isOperational()}. If not, then the pipeline can not be used
|
||||
* to process buffers in the current configuration. This is because none of the underlying
|
||||
* {@link AudioProcessor} instances are {@linkplain AudioProcessor#isActive active}.
|
||||
* <li>If the pipeline {@link #isOperational()}, {@link #queueInput(ByteBuffer)} then {@link
|
||||
* #getOutput()} to process buffers.
|
||||
* <li>{@link #queueEndOfStream()} to inform the pipeline the current input stream is at an end.
|
||||
* <li>Repeatedly call {@link #getOutput()} and handle those buffers until {@link #isEnded()}
|
||||
* returns true.
|
||||
* <li>When finished with the pipeline, call {@link #reset()} to release underlying resources.
|
||||
* </ul>
|
||||
*
|
||||
* <p>If underlying {@link AudioProcessor} instances have pending configuration changes, or the
|
||||
* {@link AudioFormat} of the input is changing:
|
||||
*
|
||||
* <ul>
|
||||
* <li>Call {@link #configure(AudioFormat)} to configure the pipeline for the new input stream.
|
||||
* You can still {@link #queueInput(ByteBuffer)} and {@link #getOutput()} in the old setup at
|
||||
* this time.
|
||||
* <li>{@link #queueEndOfStream()} to inform the pipeline the current input stream is at an end.
|
||||
* <li>Repeatedly call {@link #getOutput()} until {@link #isEnded()} returns true.
|
||||
* <li>Call {@link #flush()} to apply the new configuration and flush the pipeline.
|
||||
* <li>Begin {@linkplain #queueInput(ByteBuffer) queuing input} and handling the {@linkplain
|
||||
* #getOutput() output} in the new configuration.
|
||||
* </ul>
|
||||
*/
|
||||
@UnstableApi
|
||||
public final class AudioProcessingPipeline {
|
||||
/** The {@link AudioProcessor} instances passed to {@link AudioProcessingPipeline}. */
|
||||
private final ImmutableList<AudioProcessor> audioProcessors;
|
||||
/**
|
||||
* The processors that are {@linkplain AudioProcessor#isActive() active} based on the current
|
||||
* configuration.
|
||||
*/
|
||||
private final List<AudioProcessor> activeAudioProcessors;
|
||||
|
||||
/**
|
||||
* The buffers output by the {@link #activeAudioProcessors}. This has the same number of elements
|
||||
* as {@link #activeAudioProcessors}.
|
||||
*/
|
||||
private ByteBuffer[] outputBuffers;
|
||||
/** The {@link AudioFormat} currently being output by the pipeline. */
|
||||
private AudioFormat outputAudioFormat;
|
||||
/** The {@link AudioFormat} that will be output following a {@link #flush()}. */
|
||||
private AudioFormat pendingOutputAudioFormat;
|
||||
/** Whether input has ended, either due to configuration change or end of stream. */
|
||||
private boolean inputEnded;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
*
|
||||
* @param audioProcessors The {@link AudioProcessor} instances to be used for processing buffers.
|
||||
*/
|
||||
public AudioProcessingPipeline(ImmutableList<AudioProcessor> audioProcessors) {
|
||||
this.audioProcessors = audioProcessors;
|
||||
activeAudioProcessors = new ArrayList<>();
|
||||
outputBuffers = new ByteBuffer[0];
|
||||
outputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingOutputAudioFormat = AudioFormat.NOT_SET;
|
||||
inputEnded = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the pipeline to process input audio with the specified format. Returns the
|
||||
* configured output audio format.
|
||||
*
|
||||
* <p>To apply the new configuration for use, the pipeline must be {@linkplain #flush() flushed}.
|
||||
* Before applying the new configuration, it is safe to queue input and get output in the old
|
||||
* input/output formats/configuration. Call {@link #queueEndOfStream()} when no more input will be
|
||||
* supplied for processing in the old configuration.
|
||||
*
|
||||
* @param inputAudioFormat The format of audio that will be queued after the next call to {@link
|
||||
* #flush()}.
|
||||
* @return The configured output audio format.
|
||||
* @throws AudioProcessor.UnhandledAudioFormatException If the specified format is not supported
|
||||
* by the pipeline.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
public AudioFormat configure(AudioFormat inputAudioFormat)
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
if (inputAudioFormat.equals(AudioFormat.NOT_SET)) {
|
||||
throw new AudioProcessor.UnhandledAudioFormatException(inputAudioFormat);
|
||||
}
|
||||
|
||||
AudioFormat intermediateAudioFormat = inputAudioFormat;
|
||||
|
||||
for (int i = 0; i < audioProcessors.size(); i++) {
|
||||
AudioProcessor audioProcessor = audioProcessors.get(i);
|
||||
AudioFormat nextFormat = audioProcessor.configure(intermediateAudioFormat);
|
||||
if (audioProcessor.isActive()) {
|
||||
checkState(!nextFormat.equals(AudioFormat.NOT_SET));
|
||||
intermediateAudioFormat = nextFormat;
|
||||
}
|
||||
}
|
||||
|
||||
return pendingOutputAudioFormat = intermediateAudioFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears any buffered data and pending output. If any underlying audio processors are {@linkplain
|
||||
* AudioProcessor#isActive() active}, this also prepares them to receive a new stream of input in
|
||||
* the last {@linkplain #configure(AudioFormat) configured} (pending) format.
|
||||
*
|
||||
* <p>{@link #configure(AudioFormat)} must have been called at least once since the last call to
|
||||
* {@link #reset()} before calling this.
|
||||
*/
|
||||
public void flush() {
|
||||
activeAudioProcessors.clear();
|
||||
outputAudioFormat = pendingOutputAudioFormat;
|
||||
inputEnded = false;
|
||||
|
||||
for (int i = 0; i < audioProcessors.size(); i++) {
|
||||
AudioProcessor audioProcessor = audioProcessors.get(i);
|
||||
audioProcessor.flush();
|
||||
if (audioProcessor.isActive()) {
|
||||
activeAudioProcessors.add(audioProcessor);
|
||||
}
|
||||
}
|
||||
|
||||
outputBuffers = new ByteBuffer[activeAudioProcessors.size()];
|
||||
for (int i = 0; i <= getFinalOutputBufferIndex(); i++) {
|
||||
outputBuffers[i] = activeAudioProcessors.get(i).getOutput();
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns the {@link AudioFormat} currently being output. */
|
||||
public AudioFormat getOutputAudioFormat() {
|
||||
return outputAudioFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the pipeline can be used for processing buffers.
|
||||
*
|
||||
* <p>For this to happen the pipeline must be {@linkplain #configure(AudioFormat) configured},
|
||||
* {@linkplain #flush() flushed} and have {@linkplain AudioProcessor#isActive() active}
|
||||
* {@linkplain AudioProcessor underlying audio processors} that are ready to process buffers with
|
||||
* the current configuration.
|
||||
*/
|
||||
public boolean isOperational() {
|
||||
return !activeAudioProcessors.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues audio data between the position and limit of the {@code inputBuffer} for processing.
|
||||
* After calling this method, processed output may be available via {@link #getOutput()}.
|
||||
*
|
||||
* @param inputBuffer The input buffer to process. It must be a direct {@link ByteBuffer} with
|
||||
* native byte order. Its contents are treated as read-only. Its position will be advanced by
|
||||
* the number of bytes consumed (which may be zero). The caller retains ownership of the
|
||||
* provided buffer.
|
||||
*/
|
||||
public void queueInput(ByteBuffer inputBuffer) {
|
||||
if (!isOperational() || inputEnded) {
|
||||
return;
|
||||
}
|
||||
processData(inputBuffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link ByteBuffer} containing processed output data between its position and limit.
|
||||
* The buffer will be empty if no output is available.
|
||||
*
|
||||
* <p>Buffers returned from this method are retained by pipeline, and it is necessary to consume
|
||||
* the data (or copy it into another buffer) to allow the pipeline to progress.
|
||||
*
|
||||
* @return A buffer containing processed output data between its position and limit.
|
||||
*/
|
||||
public ByteBuffer getOutput() {
|
||||
if (!isOperational()) {
|
||||
return EMPTY_BUFFER;
|
||||
}
|
||||
processData(EMPTY_BUFFER);
|
||||
return outputBuffers[getFinalOutputBufferIndex()];
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues an end of stream signal. After this method has been called, {@link
|
||||
* #queueInput(ByteBuffer)} should not be called until after the next call to {@link #flush()}.
|
||||
* Calling {@link #getOutput()} will return any remaining output data. Multiple calls may be
|
||||
* required to read all of the remaining output data. {@link #isEnded()} will return {@code true}
|
||||
* once all remaining output data has been read.
|
||||
*/
|
||||
public void queueEndOfStream() {
|
||||
if (!isOperational() || inputEnded) {
|
||||
return;
|
||||
}
|
||||
inputEnded = true;
|
||||
activeAudioProcessors.get(0).queueEndOfStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the pipeline has ended.
|
||||
*
|
||||
* <p>The pipeline is considered ended when:
|
||||
*
|
||||
* <ul>
|
||||
* <li>End of stream has been {@linkplain #queueEndOfStream() queued}.
|
||||
* <li>Every {@linkplain #queueInput(ByteBuffer) input buffer} has been processed.
|
||||
* <li>Every {@linkplain #getOutput() output buffer} has been fully consumed.
|
||||
* </ul>
|
||||
*/
|
||||
public boolean isEnded() {
|
||||
return inputEnded
|
||||
&& activeAudioProcessors.get(getFinalOutputBufferIndex()).isEnded()
|
||||
&& !outputBuffers[getFinalOutputBufferIndex()].hasRemaining();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the pipeline and its underlying {@link AudioProcessor} instances to their unconfigured
|
||||
* state, releasing any resources.
|
||||
*/
|
||||
public void reset() {
|
||||
for (int i = 0; i < audioProcessors.size(); i++) {
|
||||
AudioProcessor audioProcessor = audioProcessors.get(i);
|
||||
audioProcessor.flush();
|
||||
audioProcessor.reset();
|
||||
}
|
||||
outputBuffers = new ByteBuffer[0];
|
||||
outputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingOutputAudioFormat = AudioFormat.NOT_SET;
|
||||
inputEnded = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether some other object is "equal to" this one.
|
||||
*
|
||||
* <p>Two instances of {@link AudioProcessingPipeline} are considered equal if they have the same
|
||||
* underlying {@link AudioProcessor} references in the same order.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof AudioProcessingPipeline)) {
|
||||
return false;
|
||||
}
|
||||
AudioProcessingPipeline that = (AudioProcessingPipeline) o;
|
||||
if (this.audioProcessors.size() != that.audioProcessors.size()) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < this.audioProcessors.size(); i++) {
|
||||
if (this.audioProcessors.get(i) != that.audioProcessors.get(i)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return audioProcessors.hashCode();
|
||||
}
|
||||
|
||||
private void processData(ByteBuffer inputBuffer) {
|
||||
boolean progressMade = true;
|
||||
while (progressMade) {
|
||||
progressMade = false;
|
||||
for (int index = 0; index <= getFinalOutputBufferIndex(); index++) {
|
||||
if (outputBuffers[index].hasRemaining()) {
|
||||
// Processor at this index has output that has not been consumed. Do not queue input.
|
||||
continue;
|
||||
}
|
||||
|
||||
AudioProcessor audioProcessor = activeAudioProcessors.get(index);
|
||||
|
||||
if (audioProcessor.isEnded()) {
|
||||
if (!outputBuffers[index].hasRemaining() && index < getFinalOutputBufferIndex()) {
|
||||
activeAudioProcessors.get(index + 1).queueEndOfStream();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
ByteBuffer input =
|
||||
index > 0
|
||||
? outputBuffers[index - 1]
|
||||
: inputBuffer.hasRemaining() ? inputBuffer : EMPTY_BUFFER;
|
||||
long inputBytes = input.remaining();
|
||||
audioProcessor.queueInput(input);
|
||||
outputBuffers[index] = audioProcessor.getOutput();
|
||||
|
||||
progressMade |= (inputBytes - input.remaining()) > 0 || outputBuffers[index].hasRemaining();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int getFinalOutputBufferIndex() {
|
||||
return outputBuffers.length - 1;
|
||||
}
|
||||
}
|
@ -37,6 +37,13 @@ public interface AudioProcessor {
|
||||
|
||||
/** PCM audio format that may be handled by an audio processor. */
|
||||
final class AudioFormat {
|
||||
/**
|
||||
* An {@link AudioFormat} instance to represent an unset {@link AudioFormat}. This should not be
|
||||
* returned by {@link #configure(AudioFormat)} if the processor {@link #isActive()}.
|
||||
*
|
||||
* <p>Typically used to represent an inactive {@link AudioProcessor} {@linkplain
|
||||
* #configure(AudioFormat) output format}.
|
||||
*/
|
||||
public static final AudioFormat NOT_SET =
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ Format.NO_VALUE,
|
||||
|
@ -328,7 +328,7 @@ public final class GlProgram {
|
||||
/* unusedLength */ new int[1],
|
||||
/* lengthOffset= */ 0,
|
||||
/* unusedSize */ new int[1],
|
||||
/*sizeOffset= */ 0,
|
||||
/* sizeOffset= */ 0,
|
||||
type,
|
||||
/* typeOffset= */ 0,
|
||||
nameBytes,
|
||||
|
@ -80,7 +80,7 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void withAdGroupTimeUs_updatesAdGroupTimeUs() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 5_000, 10_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0, 5_000, 10_000)
|
||||
.withRemovedAdGroupCount(1);
|
||||
|
||||
state =
|
||||
@ -96,7 +96,7 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void withNewAdGroup_addsGroupAndKeepsExistingGroups() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 3_000, 6_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0, 3_000, 6_000)
|
||||
.withRemovedAdGroupCount(1)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
@ -122,12 +122,12 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void withAdDurationsUs_updatesAdDurations() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 10_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0, 10_000)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2)
|
||||
.withAdDurationsUs(new long[][] {new long[] {5_000, 6_000}, new long[] {7_000, 8_000}});
|
||||
|
||||
state = state.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 1_000, 2_000);
|
||||
state = state.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs= */ 1_000, 2_000);
|
||||
|
||||
assertThat(state.getAdGroup(0).durationsUs[0]).isEqualTo(5_000);
|
||||
assertThat(state.getAdGroup(0).durationsUs[1]).isEqualTo(6_000);
|
||||
@ -314,7 +314,7 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void withOriginalAdCount() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 5_000_000)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2);
|
||||
|
||||
state = state.withOriginalAdCount(/* adGroupIndex= */ 0, /* originalAdCount= */ 3);
|
||||
@ -326,7 +326,7 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void withOriginalAdCount_unsetValue_defaultsToIndexUnset() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 5_000_000)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2);
|
||||
|
||||
assertThat(state.getAdGroup(0).count).isEqualTo(2);
|
||||
@ -335,7 +335,7 @@ public class AdPlaybackStateTest {
|
||||
|
||||
@Test
|
||||
public void withLastAdGroupRemoved() {
|
||||
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000);
|
||||
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 5_000_000);
|
||||
state =
|
||||
state
|
||||
.withAdCount(/* adGroupIndex= */ 0, 3)
|
||||
@ -362,7 +362,7 @@ public class AdPlaybackStateTest {
|
||||
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 5);
|
||||
state =
|
||||
state.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 1, /* adDurationsUs...= */ 1_000L, 2_000L, 3_000L, 4_000L, 5_000L);
|
||||
/* adGroupIndex= */ 1, /* adDurationsUs= */ 1_000L, 2_000L, 3_000L, 4_000L, 5_000L);
|
||||
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
|
||||
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
|
||||
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 3, TEST_URI);
|
||||
@ -418,8 +418,8 @@ public class AdPlaybackStateTest {
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 3333)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 12)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 34, 56)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs= */ 12)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 34, 56)
|
||||
.withAdResumePositionUs(123)
|
||||
.withContentDurationUs(456);
|
||||
|
||||
@ -456,7 +456,7 @@ public class AdPlaybackStateTest {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(),
|
||||
/* adGroupTimesUs...= */ 0,
|
||||
/* adGroupTimesUs= */ 0,
|
||||
1000,
|
||||
2000,
|
||||
3000,
|
||||
@ -539,7 +539,7 @@ public class AdPlaybackStateTest {
|
||||
@Test
|
||||
public void getAdGroupIndexAfterPositionUs_withServerSideInsertedAds_returnsNextAdGroup() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0, 1000, 2000)
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0, 1000, 2000)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
|
||||
|
@ -80,7 +80,7 @@ public final class FlagSetTest {
|
||||
public void containsAny_withoutAdd_returnsFalseForAllValues() {
|
||||
FlagSet flags = new FlagSet.Builder().build();
|
||||
|
||||
assertThat(flags.containsAny(/* flags...= */ -1234, 0, 2, Integer.MAX_VALUE)).isFalse();
|
||||
assertThat(flags.containsAny(/* flags= */ -1234, 0, 2, Integer.MAX_VALUE)).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -95,9 +95,9 @@ public final class FlagSetTest {
|
||||
|
||||
assertThat(
|
||||
flags.containsAny(
|
||||
/* flags...= */ -1235, -1234, 0, 1, 2, Integer.MAX_VALUE - 1, Integer.MAX_VALUE))
|
||||
/* flags= */ -1235, -1234, 0, 1, 2, Integer.MAX_VALUE - 1, Integer.MAX_VALUE))
|
||||
.isTrue();
|
||||
assertThat(flags.containsAny(/* flags...= */ -1235, 1, Integer.MAX_VALUE - 1)).isFalse();
|
||||
assertThat(flags.containsAny(/* flags= */ -1235, 1, Integer.MAX_VALUE - 1)).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -181,9 +181,9 @@ public final class TrackSelectionParametersTest {
|
||||
TrackSelectionParameters parameters =
|
||||
new TrackSelectionParameters.Builder(getApplicationContext())
|
||||
.setViewportSize(
|
||||
/*viewportWidth=*/ 1,
|
||||
/*viewportHeight=*/ 2,
|
||||
/*viewportOrientationMayChange=*/ false)
|
||||
/* viewportWidth= */ 1,
|
||||
/* viewportHeight= */ 2,
|
||||
/* viewportOrientationMayChange= */ false)
|
||||
.clearViewportSizeConstraints()
|
||||
.build();
|
||||
|
||||
|
@ -0,0 +1,368 @@
|
||||
/*
|
||||
* Copyright 2022 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static java.lang.Math.min;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
// TODO(b/198772621): Add tests for PlaybackParameter changes once Sonic or
|
||||
// DefaultAudioProcessorChain is in common.
|
||||
/** Unit tests for {@link AudioProcessingPipeline}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class AudioProcessingPipelineTest {
|
||||
private static final AudioFormat AUDIO_FORMAT =
|
||||
new AudioFormat(/* sampleRate= */ 44100, /* channelCount= */ 2, C.ENCODING_PCM_16BIT);
|
||||
|
||||
@Test
|
||||
public void noAudioProcessors_isNotOperational() throws Exception {
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of());
|
||||
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
|
||||
assertThat(audioProcessingPipeline.isOperational()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sameProcessors_pipelinesAreOnlyEqualIfSameOrderAndReference() throws Exception {
|
||||
AudioProcessor audioProcessorOne = new FakeAudioProcessor(/* active= */ true);
|
||||
AudioProcessor audioProcessorTwo = new FakeAudioProcessor(/* active= */ false);
|
||||
AudioProcessor audioProcessorThree = new FakeAudioProcessor(/* active= */ true);
|
||||
|
||||
AudioProcessingPipeline pipelineOne =
|
||||
new AudioProcessingPipeline(
|
||||
ImmutableList.of(audioProcessorOne, audioProcessorTwo, audioProcessorThree));
|
||||
// The internal state of the pipeline does not affect equality.
|
||||
pipelineOne.configure(AUDIO_FORMAT);
|
||||
pipelineOne.flush();
|
||||
|
||||
AudioProcessingPipeline pipelineTwo =
|
||||
new AudioProcessingPipeline(
|
||||
ImmutableList.of(audioProcessorOne, audioProcessorTwo, audioProcessorThree));
|
||||
|
||||
assertThat(pipelineOne).isEqualTo(pipelineTwo);
|
||||
|
||||
AudioProcessingPipeline pipelineThree =
|
||||
new AudioProcessingPipeline(
|
||||
ImmutableList.of(audioProcessorThree, audioProcessorTwo, audioProcessorOne));
|
||||
assertThat(pipelineTwo).isNotEqualTo(pipelineThree);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configuringPipeline_givesFormat() throws Exception {
|
||||
FakeAudioProcessor fakeSampleRateChangingAudioProcessor =
|
||||
new FakeAudioProcessor(/* active= */ true) {
|
||||
@Override
|
||||
public AudioFormat configure(AudioFormat inputAudioFormat)
|
||||
throws UnhandledAudioFormatException {
|
||||
AudioFormat outputFormat =
|
||||
new AudioFormat(
|
||||
inputAudioFormat.sampleRate * 2,
|
||||
inputAudioFormat.channelCount,
|
||||
inputAudioFormat.encoding);
|
||||
return super.configure(outputFormat);
|
||||
}
|
||||
};
|
||||
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(fakeSampleRateChangingAudioProcessor));
|
||||
AudioFormat outputFormat = audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
|
||||
assertThat(outputFormat.sampleRate).isEqualTo(AUDIO_FORMAT.sampleRate * 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configuringAndFlushingPipeline_isOperational() throws Exception {
|
||||
FakeAudioProcessor fakeSampleRateChangingAudioProcessor =
|
||||
new FakeAudioProcessor(/* active= */ true) {
|
||||
@Override
|
||||
public AudioFormat configure(AudioFormat inputAudioFormat)
|
||||
throws UnhandledAudioFormatException {
|
||||
AudioFormat outputFormat =
|
||||
new AudioFormat(
|
||||
inputAudioFormat.sampleRate * 2,
|
||||
inputAudioFormat.channelCount,
|
||||
inputAudioFormat.encoding);
|
||||
return super.configure(outputFormat);
|
||||
}
|
||||
};
|
||||
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(fakeSampleRateChangingAudioProcessor));
|
||||
|
||||
assertThat(audioProcessingPipeline.isOperational()).isFalse();
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
// Configuring the pipeline is not enough for it to be operational.
|
||||
assertThat(audioProcessingPipeline.isOperational()).isFalse();
|
||||
audioProcessingPipeline.flush();
|
||||
assertThat(audioProcessingPipeline.isOperational()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void reconfigure_doesNotChangeOperational_untilFlush() throws Exception {
|
||||
FakeAudioProcessor audioProcessor = new FakeAudioProcessor(/* active= */ true);
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(audioProcessor));
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
assertThat(audioProcessingPipeline.isOperational()).isTrue();
|
||||
|
||||
audioProcessor.setActive(false);
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
assertThat(audioProcessingPipeline.isOperational()).isTrue();
|
||||
audioProcessingPipeline.flush();
|
||||
assertThat(audioProcessingPipeline.isOperational()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void inactiveProcessor_isIgnoredInConfiguration() throws Exception {
|
||||
FakeAudioProcessor fakeSampleRateChangingAudioProcessor =
|
||||
new FakeAudioProcessor(/* active= */ false) {
|
||||
@Override
|
||||
public AudioFormat configure(AudioFormat inputAudioFormat)
|
||||
throws UnhandledAudioFormatException {
|
||||
AudioFormat outputFormat =
|
||||
new AudioFormat(
|
||||
inputAudioFormat.sampleRate * 2,
|
||||
inputAudioFormat.channelCount,
|
||||
inputAudioFormat.encoding);
|
||||
return super.configure(outputFormat);
|
||||
}
|
||||
};
|
||||
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(fakeSampleRateChangingAudioProcessor));
|
||||
AudioFormat outputFormat = audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
assertThat(outputFormat).isEqualTo(AUDIO_FORMAT);
|
||||
assertThat(audioProcessingPipeline.isOperational()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_producesOutputBuffer() throws Exception {
|
||||
FakeAudioProcessor audioProcessor = new FakeAudioProcessor(/* active= */ true);
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(audioProcessor));
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
|
||||
ByteBuffer inputBuffer = createOneSecondDefaultSilenceBuffer(AUDIO_FORMAT);
|
||||
long inputBytes = inputBuffer.remaining();
|
||||
audioProcessingPipeline.queueInput(inputBuffer);
|
||||
inputBytes -= inputBuffer.remaining();
|
||||
ByteBuffer outputBuffer = audioProcessingPipeline.getOutput();
|
||||
assertThat(inputBytes).isEqualTo(outputBuffer.remaining());
|
||||
assertThat(inputBuffer).isNotSameInstanceAs(outputBuffer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isEnded_needsBufferConsuming() throws Exception {
|
||||
FakeAudioProcessor audioProcessor = new FakeAudioProcessor(/* active= */ true);
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(ImmutableList.of(audioProcessor));
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
|
||||
ByteBuffer inputBuffer = createOneSecondDefaultSilenceBuffer(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.queueInput(inputBuffer);
|
||||
audioProcessingPipeline.queueEndOfStream();
|
||||
assertThat(audioProcessingPipeline.isEnded()).isFalse();
|
||||
ByteBuffer outputBuffer = audioProcessingPipeline.getOutput();
|
||||
assertThat(audioProcessingPipeline.isEnded()).isFalse();
|
||||
|
||||
// "consume" the buffer
|
||||
outputBuffer.position(outputBuffer.limit());
|
||||
assertThat(audioProcessingPipeline.isEnded()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pipelineWithAdvancedAudioProcessors_drainsAndFeedsCorrectly_duplicatesBytes()
|
||||
throws Exception {
|
||||
AudioProcessingPipeline audioProcessingPipeline =
|
||||
new AudioProcessingPipeline(
|
||||
ImmutableList.of(
|
||||
new FakeAudioProcessor(
|
||||
/* active= */ true, /* maxInputBytesAtOnce= */ 8, /* duplicateBytes= */ true),
|
||||
new FakeAudioProcessor(
|
||||
/* active= */ true, /* maxInputBytesAtOnce= */ 0, /* duplicateBytes= */ false),
|
||||
new FakeAudioProcessor(
|
||||
/* active= */ true, /* maxInputBytesAtOnce= */ 12, /* duplicateBytes= */ true),
|
||||
new FakeAudioProcessor(
|
||||
/* active= */ true,
|
||||
/* maxInputBytesAtOnce= */ 160,
|
||||
/* duplicateBytes= */ false)));
|
||||
audioProcessingPipeline.configure(AUDIO_FORMAT);
|
||||
audioProcessingPipeline.flush();
|
||||
|
||||
ByteBuffer inputBuffer = createOneSecondDefaultSilenceBuffer(AUDIO_FORMAT);
|
||||
inputBuffer.put(0, (byte) 24);
|
||||
inputBuffer.put(1, (byte) 36);
|
||||
inputBuffer.put(2, (byte) 6);
|
||||
int bytesInput = inputBuffer.remaining();
|
||||
List<Byte> bytesOutput = new ArrayList<>();
|
||||
while (!audioProcessingPipeline.isEnded()) {
|
||||
ByteBuffer bufferToConsume;
|
||||
while ((bufferToConsume = audioProcessingPipeline.getOutput()).hasRemaining()) {
|
||||
// "consume" the buffer. Equivalent to writing downstream.
|
||||
bytesOutput.add(bufferToConsume.get());
|
||||
}
|
||||
if (!inputBuffer.hasRemaining()) {
|
||||
audioProcessingPipeline.queueEndOfStream();
|
||||
} else {
|
||||
audioProcessingPipeline.queueInput(inputBuffer);
|
||||
}
|
||||
}
|
||||
assertThat(audioProcessingPipeline.isEnded()).isTrue();
|
||||
assertThat(4 * bytesInput).isEqualTo(bytesOutput.size());
|
||||
|
||||
assertThat(bytesOutput.get(0)).isEqualTo((byte) 24);
|
||||
assertThat(bytesOutput.get(1)).isEqualTo((byte) 24);
|
||||
assertThat(bytesOutput.get(2)).isEqualTo((byte) 24);
|
||||
assertThat(bytesOutput.get(3)).isEqualTo((byte) 24);
|
||||
assertThat(bytesOutput.get(4)).isEqualTo((byte) 36);
|
||||
assertThat(bytesOutput.get(5)).isEqualTo((byte) 36);
|
||||
assertThat(bytesOutput.get(6)).isEqualTo((byte) 36);
|
||||
assertThat(bytesOutput.get(7)).isEqualTo((byte) 36);
|
||||
assertThat(bytesOutput.get(8)).isEqualTo((byte) 6);
|
||||
assertThat(bytesOutput.get(9)).isEqualTo((byte) 6);
|
||||
assertThat(bytesOutput.get(10)).isEqualTo((byte) 6);
|
||||
assertThat(bytesOutput.get(11)).isEqualTo((byte) 6);
|
||||
assertThat(bytesOutput.get(12)).isEqualTo((byte) 0);
|
||||
}
|
||||
|
||||
// TODO(b/198772621): Consider implementing BaseAudioProcessor once that is in common.
|
||||
private static class FakeAudioProcessor implements AudioProcessor {
|
||||
protected ByteBuffer internalBuffer;
|
||||
private boolean inputEnded;
|
||||
private boolean active;
|
||||
private final int maxInputBytesAtOnce;
|
||||
private final boolean duplicateBytes;
|
||||
|
||||
private @MonotonicNonNull AudioFormat pendingOutputFormat;
|
||||
private @MonotonicNonNull AudioFormat outputFormat;
|
||||
|
||||
public FakeAudioProcessor(boolean active) {
|
||||
this(active, /* maxInputBytesAtOnce= */ 0, /* duplicateBytes= */ false);
|
||||
}
|
||||
|
||||
public FakeAudioProcessor(boolean active, int maxInputBytesAtOnce, boolean duplicateBytes) {
|
||||
this.active = active;
|
||||
this.maxInputBytesAtOnce = maxInputBytesAtOnce;
|
||||
this.duplicateBytes = duplicateBytes;
|
||||
internalBuffer = EMPTY_BUFFER;
|
||||
}
|
||||
|
||||
public void setActive(boolean active) {
|
||||
this.active = active;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AudioFormat configure(AudioFormat inputAudioFormat)
|
||||
throws UnhandledAudioFormatException {
|
||||
pendingOutputFormat = inputAudioFormat;
|
||||
return pendingOutputFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isActive() {
|
||||
return active && !pendingOutputFormat.equals(AudioFormat.NOT_SET);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInput(ByteBuffer inputBuffer) {
|
||||
if (outputFormat.equals(AudioFormat.NOT_SET)) {
|
||||
return;
|
||||
}
|
||||
|
||||
int remaining = inputBuffer.remaining();
|
||||
if (remaining == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
internalBuffer =
|
||||
createOrReplaceBuffer(
|
||||
maxInputBytesAtOnce > 0 ? min(remaining, maxInputBytesAtOnce) : remaining,
|
||||
internalBuffer);
|
||||
|
||||
while (internalBuffer.hasRemaining()) {
|
||||
byte b = inputBuffer.get();
|
||||
internalBuffer.put(b);
|
||||
if (duplicateBytes) {
|
||||
internalBuffer.put(b);
|
||||
}
|
||||
}
|
||||
|
||||
internalBuffer.flip();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueEndOfStream() {
|
||||
inputEnded = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getOutput() {
|
||||
return internalBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnded() {
|
||||
return inputEnded && !internalBuffer.hasRemaining();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
internalBuffer.clear();
|
||||
internalBuffer = EMPTY_BUFFER;
|
||||
inputEnded = false;
|
||||
outputFormat = pendingOutputFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
flush();
|
||||
}
|
||||
}
|
||||
|
||||
private static ByteBuffer createOrReplaceBuffer(int size, @Nullable ByteBuffer buffer) {
|
||||
if (buffer == null || buffer.capacity() < size) {
|
||||
buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
|
||||
}
|
||||
buffer.clear();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/** Creates a one second silence buffer for the given {@link AudioFormat}. */
|
||||
private static ByteBuffer createOneSecondDefaultSilenceBuffer(AudioFormat audioFormat) {
|
||||
return createOrReplaceBuffer(
|
||||
/* size= */ audioFormat.sampleRate * audioFormat.channelCount * audioFormat.bytesPerFrame,
|
||||
/* buffer= */ null);
|
||||
}
|
||||
}
|
@ -1157,7 +1157,7 @@ public final class CronetDataSourceTest {
|
||||
@Test
|
||||
public void redirectParseAndAttachCookie_dataSourceDoesNotHandleSetCookie_followsRedirect()
|
||||
throws HttpDataSourceException {
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 300);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 300);
|
||||
mockFollowRedirectSuccess();
|
||||
|
||||
testResponseHeader.put("Set-Cookie", "testcookie=testcookie; Path=/video");
|
||||
@ -1182,7 +1182,7 @@ public final class CronetDataSourceTest {
|
||||
dataSourceUnderTest.addTransferListener(mockTransferListener);
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 300);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 300);
|
||||
|
||||
testResponseHeader.put("Set-Cookie", "testcookie=testcookie; Path=/video");
|
||||
|
||||
@ -1210,7 +1210,7 @@ public final class CronetDataSourceTest {
|
||||
dataSourceUnderTest.addTransferListener(mockTransferListener);
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 300);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 300);
|
||||
mockReadSuccess(0, 1000);
|
||||
|
||||
testResponseHeader.put("Set-Cookie", "testcookie=testcookie; Path=/video");
|
||||
@ -1225,7 +1225,7 @@ public final class CronetDataSourceTest {
|
||||
|
||||
@Test
|
||||
public void redirectNoSetCookieFollowsRedirect() throws HttpDataSourceException {
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 300);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 300);
|
||||
mockFollowRedirectSuccess();
|
||||
|
||||
dataSourceUnderTest.open(testDataSpec);
|
||||
@ -1245,7 +1245,7 @@ public final class CronetDataSourceTest {
|
||||
.setHandleSetCookieRequests(true)
|
||||
.createDataSource();
|
||||
dataSourceUnderTest.addTransferListener(mockTransferListener);
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 300);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 300);
|
||||
mockFollowRedirectSuccess();
|
||||
|
||||
dataSourceUnderTest.open(testDataSpec);
|
||||
@ -1255,7 +1255,7 @@ public final class CronetDataSourceTest {
|
||||
|
||||
@Test
|
||||
public void redirectPostFollowRedirect() throws HttpDataSourceException {
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 302);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 302);
|
||||
mockFollowRedirectSuccess();
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
|
||||
@ -1275,7 +1275,7 @@ public final class CronetDataSourceTest {
|
||||
.setKeepPostFor302Redirects(false)
|
||||
.setHandleSetCookieRequests(true)
|
||||
.createDataSource();
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 302);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 302);
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
testResponseHeader.put("Set-Cookie", "testcookie=testcookie; Path=/video");
|
||||
|
||||
@ -1297,7 +1297,7 @@ public final class CronetDataSourceTest {
|
||||
.setResetTimeoutOnRedirects(true)
|
||||
.setKeepPostFor302Redirects(true)
|
||||
.createDataSource();
|
||||
mockSingleRedirectSuccess(/*responseCode=*/ 302);
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 302);
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
|
||||
dataSourceUnderTest.open(testPostDataSpec);
|
||||
|
@ -104,7 +104,7 @@ public class ContrastPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -128,7 +128,7 @@ public class ContrastPixelTest {
|
||||
Color.rgb(
|
||||
OPENGL_NEUTRAL_RGB_VALUE, OPENGL_NEUTRAL_RGB_VALUE, OPENGL_NEUTRAL_RGB_VALUE));
|
||||
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -187,7 +187,7 @@ public class ContrastPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(MAXIMUM_CONTRAST_PNG_ASSET_PATH);
|
||||
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
contrastProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -126,7 +126,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -147,7 +147,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -168,7 +168,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -189,7 +189,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -212,7 +212,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(LUT_MAP_WHITE_TO_GREEN_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -233,7 +233,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(INVERT_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
@ -254,7 +254,7 @@ public class SingleColorLutPixelTest {
|
||||
setupOutputTexture(outputSize.first, outputSize.second);
|
||||
Bitmap expectedBitmap = readBitmap(GRAYSCALE_PNG_ASSET_PATH);
|
||||
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs = */ 0);
|
||||
colorLutProcessor.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromCurrentGlFramebuffer(outputSize.first, outputSize.second);
|
||||
|
||||
|
@ -399,8 +399,11 @@ public abstract class DecoderAudioRenderer<
|
||||
}
|
||||
|
||||
private boolean drainOutputBuffer()
|
||||
throws ExoPlaybackException, DecoderException, AudioSink.ConfigurationException,
|
||||
AudioSink.InitializationException, AudioSink.WriteException {
|
||||
throws ExoPlaybackException,
|
||||
DecoderException,
|
||||
AudioSink.ConfigurationException,
|
||||
AudioSink.InitializationException,
|
||||
AudioSink.WriteException {
|
||||
if (outputBuffer == null) {
|
||||
outputBuffer = decoder.dequeueOutputBuffer();
|
||||
if (outputBuffer == null) {
|
||||
|
@ -44,6 +44,7 @@ import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.PlaybackParameters;
|
||||
import androidx.media3.common.audio.AudioProcessingPipeline;
|
||||
import androidx.media3.common.audio.AudioProcessor;
|
||||
import androidx.media3.common.audio.AudioProcessor.UnhandledAudioFormatException;
|
||||
import androidx.media3.common.util.Assertions;
|
||||
@ -59,6 +60,7 @@ import androidx.media3.extractor.Ac3Util;
|
||||
import androidx.media3.extractor.Ac4Util;
|
||||
import androidx.media3.extractor.DtsUtil;
|
||||
import androidx.media3.extractor.MpegAudioUtil;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
@ -67,8 +69,6 @@ import java.lang.annotation.Target;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
@ -464,8 +464,8 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
private final boolean enableFloatOutput;
|
||||
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
|
||||
private final TrimmingAudioProcessor trimmingAudioProcessor;
|
||||
private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
|
||||
private final AudioProcessor[] toFloatPcmAvailableAudioProcessors;
|
||||
private final ImmutableList<AudioProcessor> toIntPcmAvailableAudioProcessors;
|
||||
private final ImmutableList<AudioProcessor> toFloatPcmAvailableAudioProcessors;
|
||||
private final ConditionVariable releasingConditionVariable;
|
||||
private final AudioTrackPositionTracker audioTrackPositionTracker;
|
||||
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
|
||||
@ -482,6 +482,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
@Nullable private Listener listener;
|
||||
@Nullable private Configuration pendingConfiguration;
|
||||
private @MonotonicNonNull Configuration configuration;
|
||||
private @MonotonicNonNull AudioProcessingPipeline audioProcessingPipeline;
|
||||
@Nullable private AudioTrack audioTrack;
|
||||
|
||||
private AudioAttributes audioAttributes;
|
||||
@ -502,14 +503,11 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
private long startMediaTimeUs;
|
||||
private float volume;
|
||||
|
||||
private AudioProcessor[] activeAudioProcessors;
|
||||
private ByteBuffer[] outputBuffers;
|
||||
@Nullable private ByteBuffer inputBuffer;
|
||||
private int inputBufferAccessUnitCount;
|
||||
@Nullable private ByteBuffer outputBuffer;
|
||||
private @MonotonicNonNull byte[] preV21OutputBuffer;
|
||||
private int preV21OutputBufferOffset;
|
||||
private int drainingAudioProcessorIndex;
|
||||
private boolean handledEndOfStream;
|
||||
private boolean stoppedAudioTrack;
|
||||
|
||||
@ -536,15 +534,10 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
|
||||
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
|
||||
trimmingAudioProcessor = new TrimmingAudioProcessor();
|
||||
ArrayList<AudioProcessor> toIntPcmAudioProcessors = new ArrayList<>();
|
||||
Collections.addAll(
|
||||
toIntPcmAudioProcessors,
|
||||
new ResamplingAudioProcessor(),
|
||||
channelMappingAudioProcessor,
|
||||
trimmingAudioProcessor);
|
||||
Collections.addAll(toIntPcmAudioProcessors, audioProcessorChain.getAudioProcessors());
|
||||
toIntPcmAvailableAudioProcessors = toIntPcmAudioProcessors.toArray(new AudioProcessor[0]);
|
||||
toFloatPcmAvailableAudioProcessors = new AudioProcessor[] {new FloatResamplingAudioProcessor()};
|
||||
toIntPcmAvailableAudioProcessors =
|
||||
ImmutableList.of(
|
||||
new ResamplingAudioProcessor(), channelMappingAudioProcessor, trimmingAudioProcessor);
|
||||
toFloatPcmAvailableAudioProcessors = ImmutableList.of(new FloatResamplingAudioProcessor());
|
||||
volume = 1f;
|
||||
audioAttributes = AudioAttributes.DEFAULT;
|
||||
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
|
||||
@ -556,9 +549,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
/* mediaTimeUs= */ 0,
|
||||
/* audioTrackPositionUs= */ 0);
|
||||
audioTrackPlaybackParameters = PlaybackParameters.DEFAULT;
|
||||
drainingAudioProcessorIndex = C.INDEX_UNSET;
|
||||
activeAudioProcessors = new AudioProcessor[0];
|
||||
outputBuffers = new ByteBuffer[0];
|
||||
mediaPositionParametersCheckpoints = new ArrayDeque<>();
|
||||
initializationExceptionPendingExceptionHolder =
|
||||
new PendingExceptionHolder<>(AUDIO_TRACK_RETRY_DURATION_MS);
|
||||
@ -621,9 +611,8 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
@Override
|
||||
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels)
|
||||
throws ConfigurationException {
|
||||
AudioProcessingPipeline audioProcessingPipeline;
|
||||
int inputPcmFrameSize;
|
||||
@Nullable AudioProcessor[] availableAudioProcessors;
|
||||
|
||||
@OutputMode int outputMode;
|
||||
@C.Encoding int outputEncoding;
|
||||
int outputSampleRate;
|
||||
@ -634,10 +623,21 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding));
|
||||
|
||||
inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount);
|
||||
availableAudioProcessors =
|
||||
shouldUseFloatOutput(inputFormat.pcmEncoding)
|
||||
? toFloatPcmAvailableAudioProcessors
|
||||
: toIntPcmAvailableAudioProcessors;
|
||||
|
||||
ImmutableList.Builder<AudioProcessor> pipelineProcessors = new ImmutableList.Builder<>();
|
||||
if (shouldUseFloatOutput(inputFormat.pcmEncoding)) {
|
||||
pipelineProcessors.addAll(toFloatPcmAvailableAudioProcessors);
|
||||
} else {
|
||||
pipelineProcessors.addAll(toIntPcmAvailableAudioProcessors);
|
||||
pipelineProcessors.add(audioProcessorChain.getAudioProcessors());
|
||||
}
|
||||
audioProcessingPipeline = new AudioProcessingPipeline(pipelineProcessors.build());
|
||||
|
||||
// If the underlying processors of the new pipeline are the same as the existing pipeline,
|
||||
// then use the existing one when the configuration is used.
|
||||
if (audioProcessingPipeline.equals(this.audioProcessingPipeline)) {
|
||||
audioProcessingPipeline = this.audioProcessingPipeline;
|
||||
}
|
||||
|
||||
trimmingAudioProcessor.setTrimFrameCount(
|
||||
inputFormat.encoderDelay, inputFormat.encoderPadding);
|
||||
@ -655,15 +655,10 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
AudioProcessor.AudioFormat outputFormat =
|
||||
new AudioProcessor.AudioFormat(
|
||||
inputFormat.sampleRate, inputFormat.channelCount, inputFormat.pcmEncoding);
|
||||
for (AudioProcessor audioProcessor : availableAudioProcessors) {
|
||||
try {
|
||||
AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
|
||||
if (audioProcessor.isActive()) {
|
||||
outputFormat = nextFormat;
|
||||
}
|
||||
} catch (UnhandledAudioFormatException e) {
|
||||
throw new ConfigurationException(e, inputFormat);
|
||||
}
|
||||
try {
|
||||
outputFormat = audioProcessingPipeline.configure(outputFormat);
|
||||
} catch (UnhandledAudioFormatException e) {
|
||||
throw new ConfigurationException(e, inputFormat);
|
||||
}
|
||||
|
||||
outputMode = OUTPUT_MODE_PCM;
|
||||
@ -672,8 +667,9 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount);
|
||||
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
|
||||
} else {
|
||||
// Audio processing is not supported in offload or passthrough mode.
|
||||
audioProcessingPipeline = new AudioProcessingPipeline(ImmutableList.of());
|
||||
inputPcmFrameSize = C.LENGTH_UNSET;
|
||||
availableAudioProcessors = new AudioProcessor[0];
|
||||
outputSampleRate = inputFormat.sampleRate;
|
||||
outputPcmFrameSize = C.LENGTH_UNSET;
|
||||
if (useOffloadedPlayback(inputFormat, audioAttributes)) {
|
||||
@ -726,7 +722,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
outputChannelConfig,
|
||||
outputEncoding,
|
||||
bufferSize,
|
||||
availableAudioProcessors);
|
||||
audioProcessingPipeline);
|
||||
if (isAudioTrackInitialized()) {
|
||||
this.pendingConfiguration = pendingConfiguration;
|
||||
} else {
|
||||
@ -735,27 +731,8 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
}
|
||||
|
||||
private void setupAudioProcessors() {
|
||||
AudioProcessor[] audioProcessors = configuration.availableAudioProcessors;
|
||||
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
|
||||
for (AudioProcessor audioProcessor : audioProcessors) {
|
||||
if (audioProcessor.isActive()) {
|
||||
newAudioProcessors.add(audioProcessor);
|
||||
} else {
|
||||
audioProcessor.flush();
|
||||
}
|
||||
}
|
||||
int count = newAudioProcessors.size();
|
||||
activeAudioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
|
||||
outputBuffers = new ByteBuffer[count];
|
||||
flushAudioProcessors();
|
||||
}
|
||||
|
||||
private void flushAudioProcessors() {
|
||||
for (int i = 0; i < activeAudioProcessors.length; i++) {
|
||||
AudioProcessor audioProcessor = activeAudioProcessors[i];
|
||||
audioProcessor.flush();
|
||||
outputBuffers[i] = audioProcessor.getOutput();
|
||||
}
|
||||
audioProcessingPipeline = configuration.audioProcessingPipeline;
|
||||
audioProcessingPipeline.flush();
|
||||
}
|
||||
|
||||
private boolean initializeAudioTrack() throws InitializationException {
|
||||
@ -1019,40 +996,57 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
offloadStreamEventCallbackV29.register(audioTrack);
|
||||
}
|
||||
|
||||
/**
|
||||
* Repeatedly drains and feeds the {@link AudioProcessingPipeline} until {@link
|
||||
* #writeBuffer(ByteBuffer, long)} is not accepting any more input or there is no more input to
|
||||
* feed into the pipeline.
|
||||
*
|
||||
* <p>If the {@link AudioProcessingPipeline} is not {@linkplain
|
||||
* AudioProcessingPipeline#isOperational() operational}, input buffers are passed straight to
|
||||
* {@link #writeBuffer(ByteBuffer, long)}.
|
||||
*/
|
||||
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
|
||||
int count = activeAudioProcessors.length;
|
||||
int index = count;
|
||||
while (index >= 0) {
|
||||
ByteBuffer input =
|
||||
index > 0
|
||||
? outputBuffers[index - 1]
|
||||
: (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
|
||||
if (index == count) {
|
||||
writeBuffer(input, avSyncPresentationTimeUs);
|
||||
} else {
|
||||
AudioProcessor audioProcessor = activeAudioProcessors[index];
|
||||
if (index > drainingAudioProcessorIndex) {
|
||||
audioProcessor.queueInput(input);
|
||||
}
|
||||
ByteBuffer output = audioProcessor.getOutput();
|
||||
outputBuffers[index] = output;
|
||||
if (output.hasRemaining()) {
|
||||
// Handle the output as input to the next audio processor or the AudioTrack.
|
||||
index++;
|
||||
continue;
|
||||
if (!audioProcessingPipeline.isOperational() && inputBuffer != null) {
|
||||
writeBuffer(inputBuffer, avSyncPresentationTimeUs);
|
||||
return;
|
||||
}
|
||||
|
||||
while (!audioProcessingPipeline.isEnded()) {
|
||||
ByteBuffer bufferToWrite;
|
||||
while ((bufferToWrite = audioProcessingPipeline.getOutput()).hasRemaining()) {
|
||||
writeBuffer(bufferToWrite, avSyncPresentationTimeUs);
|
||||
if (bufferToWrite.hasRemaining()) {
|
||||
// writeBuffer method is providing back pressure.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (input.hasRemaining()) {
|
||||
// The input wasn't consumed and no output was produced, so give up for now.
|
||||
if (inputBuffer == null || !inputBuffer.hasRemaining()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get more input from upstream.
|
||||
index--;
|
||||
audioProcessingPipeline.queueInput(inputBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues end of stream and then fully drains all buffers.
|
||||
*
|
||||
* @return Whether the buffers have been fully drained.
|
||||
*/
|
||||
private boolean drainToEndOfStream() throws WriteException {
|
||||
if (!audioProcessingPipeline.isOperational()) {
|
||||
if (outputBuffer == null) {
|
||||
return true;
|
||||
}
|
||||
writeBuffer(outputBuffer, C.TIME_UNSET);
|
||||
return outputBuffer == null;
|
||||
}
|
||||
|
||||
audioProcessingPipeline.queueEndOfStream();
|
||||
processBuffers(C.TIME_UNSET);
|
||||
return audioProcessingPipeline.isEnded()
|
||||
&& (outputBuffer == null || !outputBuffer.hasRemaining());
|
||||
}
|
||||
|
||||
@SuppressWarnings("ReferenceEquality")
|
||||
private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException {
|
||||
if (!buffer.hasRemaining()) {
|
||||
@ -1177,36 +1171,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
|| status == ERROR_NATIVE_DEAD_OBJECT;
|
||||
}
|
||||
|
||||
private boolean drainToEndOfStream() throws WriteException {
|
||||
boolean audioProcessorNeedsEndOfStream = false;
|
||||
if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
|
||||
drainingAudioProcessorIndex = 0;
|
||||
audioProcessorNeedsEndOfStream = true;
|
||||
}
|
||||
while (drainingAudioProcessorIndex < activeAudioProcessors.length) {
|
||||
AudioProcessor audioProcessor = activeAudioProcessors[drainingAudioProcessorIndex];
|
||||
if (audioProcessorNeedsEndOfStream) {
|
||||
audioProcessor.queueEndOfStream();
|
||||
}
|
||||
processBuffers(C.TIME_UNSET);
|
||||
if (!audioProcessor.isEnded()) {
|
||||
return false;
|
||||
}
|
||||
audioProcessorNeedsEndOfStream = true;
|
||||
drainingAudioProcessorIndex++;
|
||||
}
|
||||
|
||||
// Finish writing any remaining output to the track.
|
||||
if (outputBuffer != null) {
|
||||
writeBuffer(outputBuffer, C.TIME_UNSET);
|
||||
if (outputBuffer != null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
drainingAudioProcessorIndex = C.INDEX_UNSET;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnded() {
|
||||
return !isAudioTrackInitialized() || (handledEndOfStream && !hasPendingData());
|
||||
@ -1422,6 +1386,9 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
for (AudioProcessor audioProcessor : toFloatPcmAvailableAudioProcessors) {
|
||||
audioProcessor.reset();
|
||||
}
|
||||
if (audioProcessingPipeline != null) {
|
||||
audioProcessingPipeline.reset();
|
||||
}
|
||||
playing = false;
|
||||
offloadDisabledUntilNextConfiguration = false;
|
||||
}
|
||||
@ -1449,11 +1416,10 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
outputBuffer = null;
|
||||
stoppedAudioTrack = false;
|
||||
handledEndOfStream = false;
|
||||
drainingAudioProcessorIndex = C.INDEX_UNSET;
|
||||
avSyncHeader = null;
|
||||
bytesUntilNextAvSync = 0;
|
||||
trimmingAudioProcessor.resetTrimmedFrameCount();
|
||||
flushAudioProcessors();
|
||||
setupAudioProcessors();
|
||||
}
|
||||
|
||||
@RequiresApi(23)
|
||||
@ -1991,7 +1957,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
public final int outputChannelConfig;
|
||||
public final @C.Encoding int outputEncoding;
|
||||
public final int bufferSize;
|
||||
public final AudioProcessor[] availableAudioProcessors;
|
||||
public final AudioProcessingPipeline audioProcessingPipeline;
|
||||
|
||||
public Configuration(
|
||||
Format inputFormat,
|
||||
@ -2002,7 +1968,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
int outputChannelConfig,
|
||||
int outputEncoding,
|
||||
int bufferSize,
|
||||
AudioProcessor[] availableAudioProcessors) {
|
||||
AudioProcessingPipeline audioProcessingPipeline) {
|
||||
this.inputFormat = inputFormat;
|
||||
this.inputPcmFrameSize = inputPcmFrameSize;
|
||||
this.outputMode = outputMode;
|
||||
@ -2011,7 +1977,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
this.outputChannelConfig = outputChannelConfig;
|
||||
this.outputEncoding = outputEncoding;
|
||||
this.bufferSize = bufferSize;
|
||||
this.availableAudioProcessors = availableAudioProcessors;
|
||||
this.audioProcessingPipeline = audioProcessingPipeline;
|
||||
}
|
||||
|
||||
public Configuration copyWithBufferSize(int bufferSize) {
|
||||
@ -2024,7 +1990,7 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
outputChannelConfig,
|
||||
outputEncoding,
|
||||
bufferSize,
|
||||
availableAudioProcessors);
|
||||
audioProcessingPipeline);
|
||||
}
|
||||
|
||||
/** Returns if the configurations are sufficiently compatible to reuse the audio track. */
|
||||
|
@ -376,7 +376,7 @@ public final class MaskingMediaSource extends WrappingMediaSource {
|
||||
/* id= */ setIds ? 0 : null,
|
||||
/* uid= */ setIds ? MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID : null,
|
||||
/* windowIndex= */ 0,
|
||||
/* durationUs = */ C.TIME_UNSET,
|
||||
/* durationUs= */ C.TIME_UNSET,
|
||||
/* positionInWindowUs= */ 0,
|
||||
/* adPlaybackState= */ AdPlaybackState.NONE,
|
||||
/* isPlaceholder= */ true);
|
||||
|
@ -120,7 +120,7 @@ public final class ExoplayerCuesDecoder implements SubtitleDecoder {
|
||||
SingleEventSubtitle subtitle =
|
||||
new SingleEventSubtitle(
|
||||
inputBuffer.timeUs, cueDecoder.decode(checkNotNull(inputBuffer.data).array()));
|
||||
outputBuffer.setContent(inputBuffer.timeUs, subtitle, /* subsampleOffsetUs=*/ 0);
|
||||
outputBuffer.setContent(inputBuffer.timeUs, subtitle, /* subsampleOffsetUs= */ 0);
|
||||
}
|
||||
inputBuffer.clear();
|
||||
inputBufferState = INPUT_BUFFER_AVAILABLE;
|
||||
|
@ -756,7 +756,7 @@ public final class ExoPlayerTest {
|
||||
public void adGroupWithLoadError_noFurtherAdGroup_isSkipped() throws Exception {
|
||||
AdPlaybackState initialAdPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */
|
||||
TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ 5 * C.MICROS_PER_SECOND);
|
||||
Timeline fakeTimeline =
|
||||
@ -809,7 +809,7 @@ public final class ExoPlayerTest {
|
||||
public void adGroupWithLoadError_withFurtherAdGroup_isSkipped() throws Exception {
|
||||
AdPlaybackState initialAdPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */
|
||||
TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ 5 * C.MICROS_PER_SECOND,
|
||||
TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
@ -2637,7 +2637,7 @@ public final class ExoPlayerTest {
|
||||
new ActionSchedule.Builder(TAG)
|
||||
.pause()
|
||||
.waitForTimelineChanged(timeline, Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE)
|
||||
.sendMessage(target, /* mediaItemIndex = */ 2, /* positionMs= */ 50)
|
||||
.sendMessage(target, /* mediaItemIndex= */ 2, /* positionMs= */ 50)
|
||||
.play()
|
||||
.build();
|
||||
new ExoPlayerTestRunner.Builder(context)
|
||||
@ -2659,7 +2659,7 @@ public final class ExoPlayerTest {
|
||||
.pause()
|
||||
.waitForTimelineChanged(
|
||||
timeline, /* expectedReason= */ Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE)
|
||||
.sendMessage(target, /* mediaItemIndex = */ 2, /* positionMs= */ 50)
|
||||
.sendMessage(target, /* mediaItemIndex= */ 2, /* positionMs= */ 50)
|
||||
.play()
|
||||
.build();
|
||||
new ExoPlayerTestRunner.Builder(context)
|
||||
@ -2690,7 +2690,7 @@ public final class ExoPlayerTest {
|
||||
.pause()
|
||||
.waitForTimelineChanged(
|
||||
timeline, /* expectedReason= */ Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE)
|
||||
.sendMessage(target, /* mediaItemIndex = */ 1, /* positionMs= */ 50)
|
||||
.sendMessage(target, /* mediaItemIndex= */ 1, /* positionMs= */ 50)
|
||||
.executeRunnable(() -> mediaSource.setNewSourceInfo(secondTimeline))
|
||||
.waitForTimelineChanged(
|
||||
secondTimeline, /* expectedReason= */ Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE)
|
||||
@ -2724,9 +2724,9 @@ public final class ExoPlayerTest {
|
||||
new ActionSchedule.Builder(TAG)
|
||||
.pause()
|
||||
.waitForPlaybackState(Player.STATE_READY)
|
||||
.sendMessage(target1, /* mediaItemIndex = */ 0, /* positionMs= */ 50)
|
||||
.sendMessage(target2, /* mediaItemIndex = */ 1, /* positionMs= */ 50)
|
||||
.sendMessage(target3, /* mediaItemIndex = */ 2, /* positionMs= */ 50)
|
||||
.sendMessage(target1, /* mediaItemIndex= */ 0, /* positionMs= */ 50)
|
||||
.sendMessage(target2, /* mediaItemIndex= */ 1, /* positionMs= */ 50)
|
||||
.sendMessage(target3, /* mediaItemIndex= */ 2, /* positionMs= */ 50)
|
||||
.setShuffleModeEnabled(true)
|
||||
.seek(/* mediaItemIndex= */ 2, /* positionMs= */ 0)
|
||||
.play()
|
||||
@ -2938,8 +2938,7 @@ public final class ExoPlayerTest {
|
||||
AdPlaybackState adPlaybackStateWithMidroll =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1,
|
||||
/* adGroupTimesUs...= */ TimelineWindowDefinition
|
||||
.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
/* adGroupTimesUs= */ TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ 5 * C.MICROS_PER_SECOND);
|
||||
Timeline timeline2 =
|
||||
new FakeTimeline(
|
||||
@ -2990,8 +2989,7 @@ public final class ExoPlayerTest {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1,
|
||||
/* adGroupTimesUs...= */ TimelineWindowDefinition
|
||||
.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
/* adGroupTimesUs= */ TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ Util.msToUs(adGroupWindowTimeMs));
|
||||
Timeline timeline =
|
||||
new FakeTimeline(
|
||||
@ -3624,7 +3622,7 @@ public final class ExoPlayerTest {
|
||||
@Test
|
||||
public void contentWithInitialSeekPositionAfterPrerollAdStartsAtSeekPosition() throws Exception {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 3, /* adGroupTimesUs...= */ 0);
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 3, /* adGroupTimesUs= */ 0);
|
||||
Timeline fakeTimeline =
|
||||
new FakeTimeline(
|
||||
new TimelineWindowDefinition(
|
||||
@ -3673,7 +3671,7 @@ public final class ExoPlayerTest {
|
||||
@Test
|
||||
public void contentWithoutInitialSeekStartsAtDefaultPositionAfterPrerollAd() throws Exception {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 3, /* adGroupTimesUs...= */ 0);
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 3, /* adGroupTimesUs= */ 0);
|
||||
Timeline fakeTimeline =
|
||||
new FakeTimeline(
|
||||
new TimelineWindowDefinition(
|
||||
@ -3727,7 +3725,7 @@ public final class ExoPlayerTest {
|
||||
ExoPlayer player = new TestExoPlayerBuilder(context).build();
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */ 42_000_004_000_000L);
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */ 42_000_004_000_000L);
|
||||
Timeline liveTimeline1 =
|
||||
new FakeTimeline(
|
||||
new TimelineWindowDefinition(
|
||||
@ -4732,7 +4730,7 @@ public final class ExoPlayerTest {
|
||||
long contentDurationMs = 10_000;
|
||||
long adDurationMs = 100_000;
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0);
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0);
|
||||
adPlaybackState = adPlaybackState.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1);
|
||||
adPlaybackState =
|
||||
adPlaybackState.withAvailableAdUri(
|
||||
@ -4834,7 +4832,7 @@ public final class ExoPlayerTest {
|
||||
long contentDurationMs = 10_000;
|
||||
long adDurationMs = 4_000;
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0);
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0);
|
||||
adPlaybackState = adPlaybackState.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1);
|
||||
adPlaybackState =
|
||||
adPlaybackState.withAvailableAdUri(
|
||||
@ -4917,7 +4915,7 @@ public final class ExoPlayerTest {
|
||||
long contentDurationMs = 10_000;
|
||||
long adDurationMs = 4_000;
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(
|
||||
/* adGroupIndex= */ 0,
|
||||
@ -4962,7 +4960,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
@ -5053,7 +5051,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
false,
|
||||
false);
|
||||
@ -5115,7 +5113,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
@ -5182,7 +5180,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
@ -5263,7 +5261,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ 2,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
@ -5318,7 +5316,7 @@ public final class ExoPlayerTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
"windowId",
|
||||
/* numberOfPlayedAds= */ Integer.MAX_VALUE,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
/* isAdPeriodFlags= */ true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
@ -5386,28 +5384,28 @@ public final class ExoPlayerTest {
|
||||
new AdPlaybackState("adsId"),
|
||||
/* fromPositionUs= */ DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ C.MICROS_PER_SECOND);
|
||||
/* adDurationsUs= */ C.MICROS_PER_SECOND);
|
||||
adPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ (3 * C.MICROS_PER_SECOND),
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ C.MICROS_PER_SECOND);
|
||||
/* adDurationsUs= */ C.MICROS_PER_SECOND);
|
||||
adPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ (5 * C.MICROS_PER_SECOND),
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ C.MICROS_PER_SECOND);
|
||||
/* adDurationsUs= */ C.MICROS_PER_SECOND);
|
||||
adPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US
|
||||
+ (9 * C.MICROS_PER_SECOND),
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ C.MICROS_PER_SECOND);
|
||||
/* adDurationsUs= */ C.MICROS_PER_SECOND);
|
||||
adPlaybackState =
|
||||
adPlaybackState.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup+ */ 0);
|
||||
adPlaybackState =
|
||||
@ -5653,7 +5651,7 @@ public final class ExoPlayerTest {
|
||||
@Test
|
||||
public void shortAdFollowedByUnpreparedAd_playbackDoesNotGetStuck() throws Exception {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 2, /* adGroupTimesUs...= */ 0);
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 2, /* adGroupTimesUs= */ 0);
|
||||
long shortAdDurationMs = 1_000;
|
||||
adPlaybackState =
|
||||
adPlaybackState.withAdDurationsUs(new long[][] {{shortAdDurationMs, shortAdDurationMs}});
|
||||
@ -8081,7 +8079,7 @@ public final class ExoPlayerTest {
|
||||
@Test
|
||||
public void seekToCurrentPosition_inEndedState_switchesToBufferingStateAndContinuesPlayback()
|
||||
throws Exception {
|
||||
MediaSource mediaSource = new FakeMediaSource(new FakeTimeline(/* windowCount = */ 1));
|
||||
MediaSource mediaSource = new FakeMediaSource(new FakeTimeline(/* windowCount= */ 1));
|
||||
AtomicInteger mediaItemIndexAfterFinalEndedState = new AtomicInteger();
|
||||
ActionSchedule actionSchedule =
|
||||
new ActionSchedule.Builder(TAG)
|
||||
@ -8552,8 +8550,8 @@ public final class ExoPlayerTest {
|
||||
/* isDynamic= */ false,
|
||||
/* isLive= */ false,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs = */ 100_000,
|
||||
/* defaultPositionUs = */ 0,
|
||||
/* durationUs= */ 100_000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
MediaItem.fromUri("http://foo.bar/fake1"));
|
||||
@ -8566,8 +8564,8 @@ public final class ExoPlayerTest {
|
||||
/* isDynamic= */ false,
|
||||
/* isLive= */ false,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs = */ 100_000,
|
||||
/* defaultPositionUs = */ 0,
|
||||
/* durationUs= */ 100_000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
MediaItem.fromUri("http://foo.bar/fake2"));
|
||||
@ -8580,8 +8578,8 @@ public final class ExoPlayerTest {
|
||||
/* isDynamic= */ false,
|
||||
/* isLive= */ false,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs = */ 100_000,
|
||||
/* defaultPositionUs = */ 0,
|
||||
/* durationUs= */ 100_000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
MediaItem.fromUri("http://foo.bar/fake3"));
|
||||
@ -9065,7 +9063,7 @@ public final class ExoPlayerTest {
|
||||
@Test
|
||||
public void isCommandAvailable_duringAd_isFalseForSeekCommands() throws Exception {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(
|
||||
/* adGroupIndex= */ 0,
|
||||
@ -10036,7 +10034,7 @@ public final class ExoPlayerTest {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1,
|
||||
/* adGroupTimesUs...= */ 0,
|
||||
/* adGroupTimesUs= */ 0,
|
||||
7 * C.MICROS_PER_SECOND,
|
||||
C.TIME_END_OF_SOURCE);
|
||||
TimelineWindowDefinition adTimelineDefinition =
|
||||
@ -10625,7 +10623,7 @@ public final class ExoPlayerTest {
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 2,
|
||||
/* adGroupTimesUs...= */ 0,
|
||||
/* adGroupTimesUs= */ 0,
|
||||
7 * C.MICROS_PER_SECOND,
|
||||
C.TIME_END_OF_SOURCE);
|
||||
TimelineWindowDefinition adTimeline =
|
||||
@ -10762,7 +10760,7 @@ public final class ExoPlayerTest {
|
||||
player.addListener(listener);
|
||||
AdPlaybackState adPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */ 2 * C.MICROS_PER_SECOND);
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */ 2 * C.MICROS_PER_SECOND);
|
||||
TimelineWindowDefinition adTimeline =
|
||||
new TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
@ -10851,7 +10849,7 @@ public final class ExoPlayerTest {
|
||||
player.addListener(listener);
|
||||
AdPlaybackState postRollAdPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE);
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */ C.TIME_END_OF_SOURCE);
|
||||
TimelineWindowDefinition postRollWindow =
|
||||
new TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
@ -10865,7 +10863,7 @@ public final class ExoPlayerTest {
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
postRollAdPlaybackState);
|
||||
AdPlaybackState preRollAdPlaybackState =
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */ 0);
|
||||
FakeTimeline.createAdPlaybackState(/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */ 0);
|
||||
TimelineWindowDefinition preRollWindow =
|
||||
new TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
@ -11765,7 +11763,7 @@ public final class ExoPlayerTest {
|
||||
|
||||
player.prepare();
|
||||
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY);
|
||||
player.seekTo(/* positionMs = */ 0);
|
||||
player.seekTo(/* positionMs= */ 0);
|
||||
player.seekToNext();
|
||||
|
||||
assertThat(player.getCurrentMediaItemIndex()).isEqualTo(0);
|
||||
@ -11903,8 +11901,7 @@ public final class ExoPlayerTest {
|
||||
.build();
|
||||
// Live stream timeline with unassigned next ad group.
|
||||
AdPlaybackState initialAdPlaybackState =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(), /* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE)
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ C.TIME_END_OF_SOURCE)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAdDurationsUs(new long[][] {new long[] {10 * C.MICROS_PER_SECOND}});
|
||||
|
@ -127,7 +127,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
@Test
|
||||
public void getNextMediaPeriodInfo_withPrerollAd_returnsCorrectMediaPeriodInfos() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ 0);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
assertNextMediaPeriodInfoIsAd(
|
||||
/* adGroupIndex= */ 0,
|
||||
@ -149,7 +149,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
@Test
|
||||
public void getNextMediaPeriodInfo_withMidrollAds_returnsCorrectMediaPeriodInfos() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
assertGetNextMediaPeriodInfoReturnsContentMediaPeriod(
|
||||
/* periodUid= */ firstPeriodUid,
|
||||
/* startPositionUs= */ 0,
|
||||
@ -205,7 +205,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
@Test
|
||||
public void getNextMediaPeriodInfo_withMidrollAndPostroll_returnsCorrectMediaPeriodInfos() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE);
|
||||
assertGetNextMediaPeriodInfoReturnsContentMediaPeriod(
|
||||
/* periodUid= */ firstPeriodUid,
|
||||
/* startPositionUs= */ 0,
|
||||
@ -259,7 +259,7 @@ public final class MediaPeriodQueueTest {
|
||||
adPlaybackState =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(),
|
||||
/* adGroupTimesUs...= */ 0,
|
||||
/* adGroupTimesUs= */ 0,
|
||||
FIRST_AD_START_TIME_US,
|
||||
C.TIME_END_OF_SOURCE)
|
||||
.withContentDurationUs(CONTENT_DURATION_US)
|
||||
@ -330,7 +330,7 @@ public final class MediaPeriodQueueTest {
|
||||
adPlaybackState =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(),
|
||||
/* adGroupTimesUs...= */ 0,
|
||||
/* adGroupTimesUs= */ 0,
|
||||
FIRST_AD_START_TIME_US,
|
||||
SECOND_AD_START_TIME_US)
|
||||
.withContentDurationUs(CONTENT_DURATION_US)
|
||||
@ -398,7 +398,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
@Test
|
||||
public void getNextMediaPeriodInfo_withPostrollLoadError_returnsEmptyFinalMediaPeriodInfo() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ C.TIME_END_OF_SOURCE);
|
||||
assertGetNextMediaPeriodInfoReturnsContentMediaPeriod(
|
||||
/* periodUid= */ firstPeriodUid,
|
||||
/* startPositionUs= */ 0,
|
||||
@ -425,7 +425,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
@Test
|
||||
public void getNextMediaPeriodInfo_withPlayedAdGroups_returnsCorrectMediaPeriodInfos() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ 0, FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ 0, FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 2);
|
||||
@ -509,14 +509,14 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeInPlayingContent_handlesChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
enqueueNext(); // Content before ad.
|
||||
enqueueNext(); // Ad.
|
||||
enqueueNext(); // Content after ad.
|
||||
|
||||
// Change position of first ad (= change duration of playing content before first ad).
|
||||
updateAdPlaybackStateAndTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000);
|
||||
updateAdPlaybackStateAndTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US - 2000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
long maxRendererReadPositionUs =
|
||||
MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US - 3000;
|
||||
@ -537,14 +537,14 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeInPlayingContentAfterReadingPosition_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
enqueueNext(); // Content before ad.
|
||||
enqueueNext(); // Ad.
|
||||
enqueueNext(); // Content after ad.
|
||||
|
||||
// Change position of first ad (= change duration of playing content before first ad).
|
||||
updateAdPlaybackStateAndTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000);
|
||||
updateAdPlaybackStateAndTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US - 2000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
long maxRendererReadPositionUs =
|
||||
MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US - 1000;
|
||||
@ -579,7 +579,7 @@ public final class MediaPeriodQueueTest {
|
||||
// Change position of first ad (= change duration of playing content before first ad).
|
||||
adPlaybackState =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(), /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000)
|
||||
/* adsId= */ new Object(), /* adGroupTimesUs= */ FIRST_AD_START_TIME_US - 2000)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true);
|
||||
updateTimeline();
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
@ -602,7 +602,7 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeAfterReadingPeriod_handlesChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
enqueueNext(); // Content before first ad.
|
||||
@ -612,7 +612,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
// Change position of second ad (= change duration of content between ads).
|
||||
updateAdPlaybackStateAndTimeline(
|
||||
/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
boolean changeHandled =
|
||||
@ -628,7 +628,7 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeBeforeReadingPeriod_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
enqueueNext(); // Content before first ad.
|
||||
@ -641,7 +641,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
// Change position of second ad (= change duration of content between ads).
|
||||
updateAdPlaybackStateAndTimeline(
|
||||
/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
long maxRendererReadPositionUs =
|
||||
@ -659,7 +659,7 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeInReadingPeriodAfterReadingPosition_handlesChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
enqueueNext(); // Content before first ad.
|
||||
@ -671,7 +671,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
// Change position of second ad (= change duration of content between ads).
|
||||
updateAdPlaybackStateAndTimeline(
|
||||
/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
long readingPositionAtStartOfContentBetweenAds =
|
||||
@ -691,7 +691,7 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeInReadingPeriodBeforeReadingPosition_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
enqueueNext(); // Content before first ad.
|
||||
@ -703,7 +703,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
// Change position of second ad (= change duration of content between ads).
|
||||
updateAdPlaybackStateAndTimeline(
|
||||
/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
long readingPositionAtEndOfContentBetweenAds =
|
||||
@ -723,7 +723,7 @@ public final class MediaPeriodQueueTest {
|
||||
@Test
|
||||
public void
|
||||
updateQueuedPeriods_withDurationChangeInReadingPeriodReadToEnd_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() {
|
||||
setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setupAdTimeline(/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
enqueueNext(); // Content before first ad.
|
||||
@ -735,7 +735,7 @@ public final class MediaPeriodQueueTest {
|
||||
|
||||
// Change position of second ad (= change duration of content between ads).
|
||||
updateAdPlaybackStateAndTimeline(
|
||||
/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
/* adGroupTimesUs= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 0);
|
||||
setAdGroupLoaded(/* adGroupIndex= */ 1);
|
||||
boolean changeHandled =
|
||||
@ -838,7 +838,7 @@ public final class MediaPeriodQueueTest {
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
/* isAdPeriodFlags= */ true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
@ -873,7 +873,7 @@ public final class MediaPeriodQueueTest {
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 4,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
/* isAdPeriodFlags= */ true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
@ -908,7 +908,7 @@ public final class MediaPeriodQueueTest {
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 2,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
/* isAdPeriodFlags= */ true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
@ -932,7 +932,7 @@ public final class MediaPeriodQueueTest {
|
||||
Object windowId = new Object();
|
||||
Timeline timeline =
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ false, true);
|
||||
windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags= */ false, true);
|
||||
|
||||
MediaPeriodId mediaPeriodId =
|
||||
mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange(
|
||||
@ -951,7 +951,7 @@ public final class MediaPeriodQueueTest {
|
||||
Object windowId = new Object();
|
||||
Timeline timeline =
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ false, true, false);
|
||||
windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags= */ false, true, false);
|
||||
|
||||
MediaPeriodId mediaPeriodId =
|
||||
mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange(
|
||||
@ -970,7 +970,7 @@ public final class MediaPeriodQueueTest {
|
||||
Object windowId = new Object();
|
||||
Timeline timeline =
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId, /* numberOfPlayedAds= */ 1, /* isAdPeriodFlags...= */ false, true, false);
|
||||
windowId, /* numberOfPlayedAds= */ 1, /* isAdPeriodFlags= */ false, true, false);
|
||||
|
||||
MediaPeriodId mediaPeriodId =
|
||||
mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange(
|
||||
@ -989,7 +989,7 @@ public final class MediaPeriodQueueTest {
|
||||
Object windowId = new Object();
|
||||
Timeline timeline =
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId, /* numberOfPlayedAds= */ 2, /* isAdPeriodFlags...= */ true, true, false);
|
||||
windowId, /* numberOfPlayedAds= */ 2, /* isAdPeriodFlags= */ true, true, false);
|
||||
MediaPeriodId mediaPeriodId =
|
||||
mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange(
|
||||
timeline, new Pair<>(windowId, 0), /* positionUs= */ 0);
|
||||
@ -1009,7 +1009,7 @@ public final class MediaPeriodQueueTest {
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 4,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
@ -1032,12 +1032,7 @@ public final class MediaPeriodQueueTest {
|
||||
Object windowId = new Object();
|
||||
Timeline timeline =
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
false,
|
||||
false,
|
||||
false);
|
||||
windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags= */ true, false, false, false);
|
||||
|
||||
MediaPeriodId mediaPeriodId =
|
||||
mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange(
|
||||
@ -1058,7 +1053,7 @@ public final class MediaPeriodQueueTest {
|
||||
createMultiPeriodServerSideInsertedTimeline(
|
||||
windowId,
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
false,
|
||||
false,
|
||||
false);
|
||||
|
@ -451,7 +451,7 @@ public class MediaSourceListTest {
|
||||
@Test
|
||||
public void setMediaSources_expectTimelineUsesCustomShuffleOrder() {
|
||||
Timeline timeline =
|
||||
mediaSourceList.setMediaSources(createFakeHolders(), new FakeShuffleOrder(/* length=*/ 4));
|
||||
mediaSourceList.setMediaSources(createFakeHolders(), new FakeShuffleOrder(/* length= */ 4));
|
||||
assertTimelineUsesFakeShuffleOrder(timeline);
|
||||
}
|
||||
|
||||
|
@ -1027,7 +1027,7 @@ public final class DefaultAnalyticsCollectorTest {
|
||||
new AtomicReference<>(
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1,
|
||||
/* adGroupTimesUs...= */ windowOffsetInFirstPeriodUs,
|
||||
/* adGroupTimesUs= */ windowOffsetInFirstPeriodUs,
|
||||
windowOffsetInFirstPeriodUs + 5 * C.MICROS_PER_SECOND,
|
||||
C.TIME_END_OF_SOURCE));
|
||||
AtomicInteger playedAdCount = new AtomicInteger(0);
|
||||
@ -1300,7 +1300,7 @@ public final class DefaultAnalyticsCollectorTest {
|
||||
/* isDynamic= */ false,
|
||||
10 * C.MICROS_PER_SECOND,
|
||||
FakeTimeline.createAdPlaybackState(
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs...= */
|
||||
/* adsPerAdGroup= */ 1, /* adGroupTimesUs= */
|
||||
windowOffsetInFirstPeriodUs + 5 * C.MICROS_PER_SECOND)));
|
||||
FakeMediaSource fakeMediaSource =
|
||||
new FakeMediaSource(
|
||||
|
@ -1285,7 +1285,7 @@ public final class DefaultPlaybackSessionManagerTest {
|
||||
private static EventTime createEventTime(
|
||||
Timeline timeline, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
|
||||
return new EventTime(
|
||||
/* realtimeMs = */ 0,
|
||||
/* realtimeMs= */ 0,
|
||||
timeline,
|
||||
windowIndex,
|
||||
mediaPeriodId,
|
||||
@ -1303,7 +1303,7 @@ public final class DefaultPlaybackSessionManagerTest {
|
||||
@Nullable MediaPeriodId eventMediaPeriodId,
|
||||
@Nullable MediaPeriodId currentMediaPeriodId) {
|
||||
return new EventTime(
|
||||
/* realtimeMs = */ 0,
|
||||
/* realtimeMs= */ 0,
|
||||
timeline,
|
||||
windowIndex,
|
||||
eventMediaPeriodId,
|
||||
|
@ -289,11 +289,10 @@ public final class SilenceSkippingAudioProcessorTest {
|
||||
Pcm16BitAudioBuilder audioBuilder = new Pcm16BitAudioBuilder(channelCount, totalFrameCount);
|
||||
while (!audioBuilder.isFull()) {
|
||||
int silenceDurationFrames = (silenceDurationMs * sampleRate) / 1000;
|
||||
audioBuilder.appendFrames(
|
||||
/* count= */ silenceDurationFrames, /* channelLevels...= */ (short) 0);
|
||||
audioBuilder.appendFrames(/* count= */ silenceDurationFrames, /* channelLevels= */ (short) 0);
|
||||
int noiseDurationFrames = (noiseDurationMs * sampleRate) / 1000;
|
||||
audioBuilder.appendFrames(
|
||||
/* count= */ noiseDurationFrames, /* channelLevels...= */ Short.MAX_VALUE);
|
||||
/* count= */ noiseDurationFrames, /* channelLevels= */ Short.MAX_VALUE);
|
||||
}
|
||||
return new InputBufferProvider(audioBuilder.build());
|
||||
}
|
||||
|
@ -363,7 +363,7 @@ public final class ConcatenatingMediaSourceTest {
|
||||
new FakeMediaSource(Timeline.EMPTY),
|
||||
new FakeMediaSource(Timeline.EMPTY),
|
||||
};
|
||||
Timeline nonEmptyTimeline = new FakeTimeline(/* windowCount = */ 1);
|
||||
Timeline nonEmptyTimeline = new FakeTimeline(/* windowCount= */ 1);
|
||||
|
||||
mediaSource.addMediaSources(Arrays.asList(childSources));
|
||||
Timeline timeline = testRunner.prepareSource();
|
||||
|
@ -79,7 +79,7 @@ public final class AdsMediaSourceTest {
|
||||
CONTENT_TIMELINE.getUidOfPeriod(/* periodIndex= */ 0);
|
||||
|
||||
private static final AdPlaybackState AD_PLAYBACK_STATE =
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs= */ 0)
|
||||
.withContentDurationUs(CONTENT_DURATION_US)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(
|
||||
|
@ -91,19 +91,16 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
// Test with one ad group before the window, and the window starting within the second ad group.
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(
|
||||
/* adsId= */ new Object(), /* adGroupTimesUs...= */
|
||||
15_000_000,
|
||||
41_500_000,
|
||||
42_200_000)
|
||||
/* adsId= */ new Object(), /* adGroupTimesUs= */ 15_000_000, 41_500_000, 42_200_000)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs...= */ 500_000)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 300_000, 100_000)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 400_000)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs= */ 500_000)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs= */ 300_000, 100_000)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 400_000)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 0, /* contentResumeOffsetUs= */ 100_000)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 1, /* contentResumeOffsetUs= */ 400_000)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 200_000);
|
||||
@ -184,19 +181,19 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 200_000);
|
||||
/* adDurationsUs= */ 200_000);
|
||||
adPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 400_000,
|
||||
/* contentResumeOffsetUs= */ 1_000_000,
|
||||
/* adDurationsUs...= */ 300_000);
|
||||
/* adDurationsUs= */ 300_000);
|
||||
AdPlaybackState firstAdPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 900_000,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 100_000);
|
||||
/* adDurationsUs= */ 100_000);
|
||||
|
||||
AtomicReference<ServerSideAdInsertionMediaSource> mediaSourceRef = new AtomicReference<>();
|
||||
mediaSourceRef.set(
|
||||
@ -254,7 +251,7 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
new AdPlaybackState(/* adsId= */ new Object()),
|
||||
/* fromPositionUs= */ 900_000,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 100_000);
|
||||
/* adDurationsUs= */ 100_000);
|
||||
AtomicReference<ServerSideAdInsertionMediaSource> mediaSourceRef = new AtomicReference<>();
|
||||
mediaSourceRef.set(
|
||||
new ServerSideAdInsertionMediaSource(
|
||||
@ -282,7 +279,7 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
firstAdPlaybackState,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 500_000);
|
||||
/* adDurationsUs= */ 500_000);
|
||||
mediaSourceRef
|
||||
.get()
|
||||
.setAdPlaybackStates(ImmutableMap.of(periodUid.get(), secondAdPlaybackState));
|
||||
@ -325,7 +322,7 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
new AdPlaybackState(/* adsId= */ new Object()),
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 500_000);
|
||||
/* adDurationsUs= */ 500_000);
|
||||
AtomicReference<ServerSideAdInsertionMediaSource> mediaSourceRef = new AtomicReference<>();
|
||||
mediaSourceRef.set(
|
||||
new ServerSideAdInsertionMediaSource(
|
||||
@ -355,7 +352,7 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
firstAdPlaybackState
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 3)
|
||||
.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs...= */ 50_000, 250_000, 200_000);
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs= */ 50_000, 250_000, 200_000);
|
||||
mediaSourceRef
|
||||
.get()
|
||||
.setAdPlaybackStates(ImmutableMap.of(periodUid.get(), secondAdPlaybackState));
|
||||
@ -393,19 +390,19 @@ public final class ServerSideAdInsertionMediaSourceTest {
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 100_000);
|
||||
/* adDurationsUs= */ 100_000);
|
||||
adPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 600_000,
|
||||
/* contentResumeOffsetUs= */ 1_000_000,
|
||||
/* adDurationsUs...= */ 100_000);
|
||||
/* adDurationsUs= */ 100_000);
|
||||
AdPlaybackState firstAdPlaybackState =
|
||||
addAdGroupToAdPlaybackState(
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ 900_000,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 100_000);
|
||||
/* adDurationsUs= */ 100_000);
|
||||
|
||||
AtomicReference<ServerSideAdInsertionMediaSource> mediaSourceRef = new AtomicReference<>();
|
||||
mediaSourceRef.set(
|
||||
|
@ -39,7 +39,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
@Test
|
||||
public void addAdGroupToAdPlaybackState_insertsCorrectAdGroupData() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 1, C.TIME_END_OF_SOURCE)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 1, C.TIME_END_OF_SOURCE)
|
||||
.withRemovedAdGroupCount(2);
|
||||
|
||||
// stream: 0-- content --4300-- ad1 --4500-- content
|
||||
@ -49,16 +49,16 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
state,
|
||||
/* fromPositionUs= */ 4300,
|
||||
/* contentResumeOffsetUs= */ 400,
|
||||
/* adDurationsUs...= */ 200);
|
||||
/* adDurationsUs= */ 200);
|
||||
|
||||
assertThat(state)
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 0, 4300, C.TIME_END_OF_SOURCE)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 0, 4300, C.TIME_END_OF_SOURCE)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 200));
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 200));
|
||||
|
||||
// stream: 0-- content --2100-- ad1 --2400-- content --4300-- ad2 --4500-- content
|
||||
// content timeline: 0-2100 - [ad1] - 2100-4000 - [ad2] - 4400-end
|
||||
@ -67,20 +67,20 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
state,
|
||||
/* fromPositionUs= */ 2100,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 300);
|
||||
/* adDurationsUs= */ 300);
|
||||
|
||||
assertThat(state)
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(
|
||||
ADS_ID, /* adGroupTimesUs...= */ 0, 0, 2100, 4000, C.TIME_END_OF_SOURCE)
|
||||
ADS_ID, /* adGroupTimesUs= */ 0, 0, 2100, 4000, C.TIME_END_OF_SOURCE)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 3, /* isServerSideInserted= */ true)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 3, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 200));
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 200));
|
||||
|
||||
// stream: 0-- ad1 --100-- content --2100-- ad2 --2400-- content --4300-- ad3 --4500-- content
|
||||
// content timeline: 0 - [ad1] - 50-2050 -[ad2] - 2050-3950 - [ad3] - 4350-end
|
||||
@ -89,12 +89,12 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
state,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 50,
|
||||
/* adDurationsUs...= */ 100);
|
||||
/* adDurationsUs= */ 100);
|
||||
|
||||
assertThat(state)
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(
|
||||
ADS_ID, /* adGroupTimesUs...= */ 0, 0, 0, 2050, 3950, C.TIME_END_OF_SOURCE)
|
||||
ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2050, 3950, C.TIME_END_OF_SOURCE)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -104,9 +104,9 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 4, /* isServerSideInserted= */ true)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 50)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 100)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 200));
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 100)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 200));
|
||||
|
||||
// stream: 0-- ad1 --100-- c --2100-- ad2 --2400-- c --4300-- ad3 --4500-- c --5000-- ad4 --6000
|
||||
// content timeline: 0 - [ad1] - 50-2050 -[ad2] - 2050-3950 - [ad3] - 4350-4850 - [ad4] - 4850
|
||||
@ -115,19 +115,12 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
state,
|
||||
/* fromPositionUs= */ 5000,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 1000);
|
||||
/* adDurationsUs= */ 1000);
|
||||
|
||||
assertThat(state)
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(
|
||||
ADS_ID, /* adGroupTimesUs...= */
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2050,
|
||||
3950,
|
||||
4850,
|
||||
C.TIME_END_OF_SOURCE)
|
||||
ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2050, 3950, 4850, C.TIME_END_OF_SOURCE)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -139,10 +132,10 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 5, /* isServerSideInserted= */ true)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 50)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 100)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 5, /* adDurationsUs...= */ 1000));
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 100)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 300)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 5, /* adDurationsUs= */ 1000));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -154,7 +147,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
state,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 50_000,
|
||||
/* adDurationsUs...= */ 0,
|
||||
/* adDurationsUs= */ 0,
|
||||
0,
|
||||
10_000,
|
||||
40_000,
|
||||
@ -177,7 +170,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
// stream: 0-- ad1 --200-- content --2100-- ad2 --2300-- content --4300-- ad3 --4500-- content
|
||||
// content timeline: 0 - [ad1] - 100-2000 -[ad2] - 2000-4000 - [ad3] - 4400-end
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 0, 0, 2000, 4000)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2000, 4000)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -185,9 +178,9 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 100)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 3, /* contentResumeOffsetUs= */ 0)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 50, 50, 100);
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 50, 50, 100);
|
||||
|
||||
assertThat(
|
||||
getStreamPositionUsForAd(
|
||||
@ -277,7 +270,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
// stream: 0-- ad1 --200-- content --2100-- ad2 --2300-- content --4300-- ad3 --4500-- content
|
||||
// content timeline: 0 - [ad1] - 100-2000 -[ad2] - 2000-4000 - [ad3] - 4400-end
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 0, 0, 2000, 4000)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2000, 4000)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -285,9 +278,9 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 100)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 3, /* contentResumeOffsetUs= */ 0)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 50, 50, 100);
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 50, 50, 100);
|
||||
|
||||
assertThat(
|
||||
getMediaPeriodPositionUsForAd(
|
||||
@ -382,7 +375,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
// stream: 0-- ad1 --200-- content --2100-- ad2 --2300-- content --4300-- ad3 --4500-- content
|
||||
// content timeline: 0 - [ad1] - 100-2000 -[ad2] - 2000-4000 - [ad3] - 4400-end
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 0, 0, 2000, 4000)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2000, 4000)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -390,9 +383,9 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 100)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 3, /* contentResumeOffsetUs= */ 0)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 50, 50, 100);
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 50, 50, 100);
|
||||
|
||||
assertThat(getStreamPositionUsForContent(/* positionUs= */ 0, /* nextAdGroupIndex= */ 2, state))
|
||||
.isEqualTo(0);
|
||||
@ -478,7 +471,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
// stream: 0-- ad1 --200-- content --2100-- ad2 --2300-- content --4300-- ad3 --4500-- content
|
||||
// content timeline: 0 - [ad1] - 100-2000 -[ad2] - 2000-4000 - [ad3] - 4400-end
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 0, 0, 2000, 4000)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 0, 0, 2000, 4000)
|
||||
.withRemovedAdGroupCount(2)
|
||||
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 2)
|
||||
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
|
||||
@ -486,9 +479,9 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 100)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 3, /* contentResumeOffsetUs= */ 0)
|
||||
.withContentResumeOffsetUs(/* adGroupIndex= */ 4, /* contentResumeOffsetUs= */ 400)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs...= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs...= */ 50, 50, 100);
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ 150, 50)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 3, /* adDurationsUs= */ 200)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 4, /* adDurationsUs= */ 50, 50, 100);
|
||||
|
||||
assertThat(
|
||||
getMediaPeriodPositionUsForContent(
|
||||
@ -595,7 +588,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
|
||||
@Test
|
||||
public void getAdCountInGroup_withUnsetCount_returnsZero() {
|
||||
AdPlaybackState state = new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 2000);
|
||||
AdPlaybackState state = new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 2000);
|
||||
|
||||
assertThat(getAdCountInGroup(state, /* adGroupIndex= */ 0)).isEqualTo(0);
|
||||
assertThat(getAdCountInGroup(state, /* adGroupIndex= */ 1)).isEqualTo(0);
|
||||
@ -604,7 +597,7 @@ public final class ServerSideAdInsertionUtilTest {
|
||||
@Test
|
||||
public void getAdCountInGroup_withSetCount_returnsCount() {
|
||||
AdPlaybackState state =
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs...= */ 0, 2000)
|
||||
new AdPlaybackState(ADS_ID, /* adGroupTimesUs= */ 0, 2000)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 4)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 6);
|
||||
|
||||
|
@ -53,14 +53,14 @@ public class ExoplayerCuesDecoderTest {
|
||||
@Test
|
||||
public void decoder_outputsSubtitle() throws Exception {
|
||||
SubtitleInputBuffer inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
SubtitleOutputBuffer outputBuffer = decoder.dequeueOutputBuffer();
|
||||
|
||||
assertThat(outputBuffer.getCues(/* timeUs=*/ 999)).isEmpty();
|
||||
assertThat(outputBuffer.getCues(/* timeUs= */ 999)).isEmpty();
|
||||
assertThat(outputBuffer.getCues(1001)).hasSize(1);
|
||||
assertThat(outputBuffer.getCues(/* timeUs=*/ 1000)).hasSize(1);
|
||||
assertThat(outputBuffer.getCues(/* timeUs=*/ 1000).get(0).text.toString()).isEqualTo("text");
|
||||
assertThat(outputBuffer.getCues(/* timeUs= */ 1000)).hasSize(1);
|
||||
assertThat(outputBuffer.getCues(/* timeUs= */ 1000).get(0).text.toString()).isEqualTo("text");
|
||||
|
||||
outputBuffer.release();
|
||||
}
|
||||
@ -75,7 +75,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
// Returns null before input has been queued
|
||||
assertThat(decoder.dequeueOutputBuffer()).isNull();
|
||||
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
|
||||
// Returns buffer when the input buffer is queued and output buffer is available
|
||||
@ -89,7 +89,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
public void dequeueOutputBuffer_releasedOutputAndQueuedNextInput_returnsOutputBuffer()
|
||||
throws Exception {
|
||||
SubtitleInputBuffer inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
SubtitleOutputBuffer outputBuffer = decoder.dequeueOutputBuffer();
|
||||
exhaustAllOutputBuffers(decoder);
|
||||
@ -113,7 +113,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
@Test
|
||||
public void dequeueInputBuffer_withQueuedInput_returnsNull() throws Exception {
|
||||
SubtitleInputBuffer inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
|
||||
assertThat(decoder.dequeueInputBuffer()).isNull();
|
||||
@ -136,7 +136,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
@Test
|
||||
public void releaseOutputBuffer_calledTwice_fails() throws Exception {
|
||||
SubtitleInputBuffer inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
SubtitleOutputBuffer outputBuffer = decoder.dequeueOutputBuffer();
|
||||
outputBuffer.release();
|
||||
@ -147,14 +147,14 @@ public class ExoplayerCuesDecoderTest {
|
||||
@Test
|
||||
public void flush_doesNotInfluenceOutputBufferAvailability() throws Exception {
|
||||
SubtitleInputBuffer inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
SubtitleOutputBuffer outputBuffer = decoder.dequeueOutputBuffer();
|
||||
assertThat(outputBuffer).isNotNull();
|
||||
exhaustAllOutputBuffers(decoder);
|
||||
decoder.flush();
|
||||
inputBuffer = decoder.dequeueInputBuffer();
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
|
||||
assertThat(decoder.dequeueOutputBuffer()).isNull();
|
||||
}
|
||||
@ -169,7 +169,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
inputBuffer = decoder.dequeueInputBuffer();
|
||||
}
|
||||
for (int i = 0; i < inputBuffers.size(); i++) {
|
||||
writeDataToInputBuffer(inputBuffers.get(i), /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffers.get(i), /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffers.get(i));
|
||||
}
|
||||
decoder.flush();
|
||||
@ -185,7 +185,7 @@ public class ExoplayerCuesDecoderTest {
|
||||
do {
|
||||
inputBuffer = decoder.dequeueInputBuffer();
|
||||
if (inputBuffer != null) {
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs=*/ 1000, ENCODED_CUES);
|
||||
writeDataToInputBuffer(inputBuffer, /* timeUs= */ 1000, ENCODED_CUES);
|
||||
decoder.queueInputBuffer(inputBuffer);
|
||||
}
|
||||
} while (decoder.dequeueOutputBuffer() != null);
|
||||
|
@ -570,9 +570,9 @@ public final class AdaptiveTrackSelectionTest {
|
||||
TrackGroup trackGroupMultipleFixed = new TrackGroup(formatFixed1, formatFixed2);
|
||||
TrackGroup trackGroupAdaptive =
|
||||
new TrackGroup(formatAdaptive1, formatAdaptive2, formatAdaptive3, formatAdaptive4);
|
||||
Definition definitionFixed1 = new Definition(trackGroupMultipleFixed, /* tracks...= */ 0);
|
||||
Definition definitionFixed2 = new Definition(trackGroupMultipleFixed, /* tracks...= */ 1);
|
||||
Definition definitionAdaptive = new Definition(trackGroupAdaptive, /* tracks...= */ 1, 2, 3);
|
||||
Definition definitionFixed1 = new Definition(trackGroupMultipleFixed, /* tracks= */ 0);
|
||||
Definition definitionFixed2 = new Definition(trackGroupMultipleFixed, /* tracks= */ 1);
|
||||
Definition definitionAdaptive = new Definition(trackGroupAdaptive, /* tracks= */ 1, 2, 3);
|
||||
List<List<AdaptationCheckpoint>> checkPoints = new ArrayList<>();
|
||||
AdaptiveTrackSelection.Factory factory =
|
||||
new AdaptiveTrackSelection.Factory() {
|
||||
@ -621,9 +621,9 @@ public final class AdaptiveTrackSelectionTest {
|
||||
TrackGroup trackGroup2 =
|
||||
new TrackGroup(group2Format1, group2Format2, group2Format3, group2UnusedFormat);
|
||||
TrackGroup fixedGroup = new TrackGroup(fixedFormat);
|
||||
Definition definition1 = new Definition(trackGroup1, /* tracks...= */ 0, 1);
|
||||
Definition definition2 = new Definition(trackGroup2, /* tracks...= */ 0, 1, 2);
|
||||
Definition fixedDefinition = new Definition(fixedGroup, /* tracks...= */ 0);
|
||||
Definition definition1 = new Definition(trackGroup1, /* tracks= */ 0, 1);
|
||||
Definition definition2 = new Definition(trackGroup2, /* tracks= */ 0, 1, 2);
|
||||
Definition fixedDefinition = new Definition(fixedGroup, /* tracks= */ 0);
|
||||
List<List<AdaptationCheckpoint>> checkPoints = new ArrayList<>();
|
||||
AdaptiveTrackSelection.Factory factory =
|
||||
new AdaptiveTrackSelection.Factory() {
|
||||
|
@ -2173,10 +2173,8 @@ public final class DefaultTrackSelectorTest {
|
||||
TIMELINE);
|
||||
|
||||
assertThat(result.length).isEqualTo(2);
|
||||
assertAdaptiveSelection(
|
||||
result.selections[0], trackGroups.get(0), /* expectedTracks...= */ 1, 0);
|
||||
assertAdaptiveSelection(
|
||||
result.selections[1], trackGroups.get(1), /* expectedTracks...= */ 1, 0);
|
||||
assertAdaptiveSelection(result.selections[0], trackGroups.get(0), /* expectedTracks= */ 1, 0);
|
||||
assertAdaptiveSelection(result.selections[1], trackGroups.get(1), /* expectedTracks= */ 1, 0);
|
||||
|
||||
// Multiple adaptive selection disallowed.
|
||||
trackSelector.setParameters(
|
||||
@ -2189,8 +2187,7 @@ public final class DefaultTrackSelectorTest {
|
||||
TIMELINE);
|
||||
|
||||
assertThat(result.length).isEqualTo(2);
|
||||
assertAdaptiveSelection(
|
||||
result.selections[0], trackGroups.get(0), /* expectedTracks...= */ 1, 0);
|
||||
assertAdaptiveSelection(result.selections[0], trackGroups.get(0), /* expectedTracks= */ 1, 0);
|
||||
assertFixedSelection(result.selections[1], trackGroups.get(1), /* expectedTrack= */ 1);
|
||||
}
|
||||
|
||||
@ -2234,7 +2231,7 @@ public final class DefaultTrackSelectorTest {
|
||||
trackSelector.selectTracks(
|
||||
new RendererCapabilities[] {VIDEO_CAPABILITIES}, trackGroups, periodId, TIMELINE);
|
||||
assertThat(result.length).isEqualTo(1);
|
||||
assertAdaptiveSelection(result.selections[0], adaptiveGroup, /* expectedTracks...= */ 1, 0);
|
||||
assertAdaptiveSelection(result.selections[0], adaptiveGroup, /* expectedTracks= */ 1, 0);
|
||||
|
||||
// Select default (=most tracks) if no preference is specified.
|
||||
trackSelector.setParameters(defaultParameters.buildUpon().setPreferredVideoMimeType(null));
|
||||
@ -2242,7 +2239,7 @@ public final class DefaultTrackSelectorTest {
|
||||
trackSelector.selectTracks(
|
||||
new RendererCapabilities[] {VIDEO_CAPABILITIES}, trackGroups, periodId, TIMELINE);
|
||||
assertThat(result.length).isEqualTo(1);
|
||||
assertAdaptiveSelection(result.selections[0], adaptiveGroup, /* expectedTracks...= */ 1, 0);
|
||||
assertAdaptiveSelection(result.selections[0], adaptiveGroup, /* expectedTracks= */ 1, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -2421,7 +2418,7 @@ public final class DefaultTrackSelectorTest {
|
||||
@Test
|
||||
public void roundTripViaBundle_ofSelectionOverride_yieldsEqualInstance() {
|
||||
SelectionOverride selectionOverrideToBundle =
|
||||
new SelectionOverride(/* groupIndex= */ 1, /* tracks...= */ 2, 3);
|
||||
new SelectionOverride(/* groupIndex= */ 1, /* tracks= */ 2, 3);
|
||||
|
||||
SelectionOverride selectionOverrideFromBundle =
|
||||
DefaultTrackSelector.SelectionOverride.CREATOR.fromBundle(
|
||||
|
@ -689,7 +689,7 @@ public final class HlsMediaPeriod implements MediaPeriod, HlsPlaylistTracker.Pla
|
||||
sampleStreamWrapper.prepareWithMultivariantPlaylistInfo(
|
||||
muxedTrackGroups.toArray(new TrackGroup[0]),
|
||||
/* primaryTrackGroupIndex= */ 0,
|
||||
/* optionalTrackGroupsIndices...= */ muxedTrackGroups.indexOf(id3TrackGroup));
|
||||
/* optionalTrackGroupsIndices= */ muxedTrackGroups.indexOf(id3TrackGroup));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -682,7 +682,7 @@ public final class ImaServerSideAdInsertionMediaSource extends CompositeMediaSou
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ fromPositionUs,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ getAdDuration(
|
||||
/* adDurationsUs= */ getAdDuration(
|
||||
/* startTimeSeconds= */ cuePoint.getStartTime(),
|
||||
/* endTimeSeconds= */ cuePoint.getEndTime()));
|
||||
}
|
||||
|
@ -443,7 +443,7 @@ import java.util.Set;
|
||||
private static AdPlaybackState splitAdGroupForPeriod(
|
||||
Object adsId, AdGroup adGroup, long periodStartUs, long periodDurationUs) {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(checkNotNull(adsId), /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(checkNotNull(adsId), /* adGroupTimesUs= */ 0)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, periodDurationUs)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, true)
|
||||
@ -605,7 +605,7 @@ import java.util.Set;
|
||||
adPlaybackState,
|
||||
/* fromPositionUs= */ currentContentPeriodPositionUs,
|
||||
/* contentResumeOffsetUs= */ sum(adDurationsUs),
|
||||
/* adDurationsUs...= */ adDurationsUs);
|
||||
/* adDurationsUs= */ adDurationsUs);
|
||||
adGroupIndex =
|
||||
adPlaybackState.getAdGroupIndexForPositionUs(
|
||||
mediaPeriodPositionUs, /* periodDurationUs= */ C.TIME_UNSET);
|
||||
|
@ -239,7 +239,7 @@ public final class ImaAdsLoaderTest {
|
||||
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US));
|
||||
}
|
||||
|
||||
@ -312,7 +312,7 @@ public final class ImaAdsLoaderTest {
|
||||
// Verify that the preroll ad has been marked as played.
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0, TEST_URI)
|
||||
@ -336,7 +336,7 @@ public final class ImaAdsLoaderTest {
|
||||
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 20_500_000)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 20_500_000)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US)
|
||||
.withAdDurationsUs(new long[][] {{TEST_AD_DURATION_US}})
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
@ -384,7 +384,7 @@ public final class ImaAdsLoaderTest {
|
||||
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ C.TIME_END_OF_SOURCE)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US)
|
||||
.withAdDurationsUs(new long[][] {{TEST_AD_DURATION_US}})
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
@ -1114,7 +1114,7 @@ public final class ImaAdsLoaderTest {
|
||||
// Verify that the preroll ad has been marked as played.
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0, TEST_URI)
|
||||
@ -1123,7 +1123,7 @@ public final class ImaAdsLoaderTest {
|
||||
.withAdResumePositionUs(/* adResumePositionUs= */ 0));
|
||||
// Verify that the second source's ad cue points have preloaded.
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 1))
|
||||
.isEqualTo(new AdPlaybackState(secondAdsId, /* adGroupTimesUs...= */ 0));
|
||||
.isEqualTo(new AdPlaybackState(secondAdsId, /* adGroupTimesUs= */ 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -1181,7 +1181,7 @@ public final class ImaAdsLoaderTest {
|
||||
// Verify that the preroll ad has been marked as played.
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 0))
|
||||
.isEqualTo(
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0)
|
||||
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs= */ 0)
|
||||
.withContentDurationUs(CONTENT_PERIOD_DURATION_US)
|
||||
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
|
||||
.withAvailableAdUri(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0, TEST_URI)
|
||||
@ -1190,7 +1190,7 @@ public final class ImaAdsLoaderTest {
|
||||
.withAdResumePositionUs(/* adResumePositionUs= */ 0));
|
||||
// Verify that the second source's ad cue points have preloaded.
|
||||
assertThat(getAdPlaybackState(/* periodIndex= */ 1))
|
||||
.isEqualTo(new AdPlaybackState(secondAdsId, /* adGroupTimesUs...= */ 0));
|
||||
.isEqualTo(new AdPlaybackState(secondAdsId, /* adGroupTimesUs= */ 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -37,7 +37,7 @@ public class ImaServerSideAdInsertionMediaSourceTest {
|
||||
new AdPlaybackState("adsId1"),
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 10,
|
||||
/* adDurationsUs...= */ 5_000_000,
|
||||
/* adDurationsUs= */ 5_000_000,
|
||||
10_000_000,
|
||||
20_000_000);
|
||||
AdPlaybackState secondAdPlaybackState =
|
||||
@ -45,20 +45,20 @@ public class ImaServerSideAdInsertionMediaSourceTest {
|
||||
new AdPlaybackState("adsId2"),
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 10,
|
||||
/* adDurationsUs...= */ 10_000_000)
|
||||
/* adDurationsUs= */ 10_000_000)
|
||||
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0);
|
||||
AdPlaybackState thirdAdPlaybackState =
|
||||
ServerSideAdInsertionUtil.addAdGroupToAdPlaybackState(
|
||||
new AdPlaybackState("adsId3"),
|
||||
/* fromPositionUs= */ C.TIME_END_OF_SOURCE,
|
||||
/* contentResumeOffsetUs= */ 10,
|
||||
/* adDurationsUs...= */ 10_000_000);
|
||||
/* adDurationsUs= */ 10_000_000);
|
||||
thirdAdPlaybackState =
|
||||
ServerSideAdInsertionUtil.addAdGroupToAdPlaybackState(
|
||||
thirdAdPlaybackState,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 10,
|
||||
/* adDurationsUs...= */ 10_000_000)
|
||||
/* adDurationsUs= */ 10_000_000)
|
||||
.withRemovedAdGroupCount(1);
|
||||
State state =
|
||||
new State(
|
||||
|
@ -435,7 +435,7 @@ public class ImaUtilTest {
|
||||
new AdPlaybackState(/* adsId= */ "adsId", 0)
|
||||
.withAdCount(/* adGroupIndex= */ 0, periodCount)
|
||||
.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs...= */
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs= */
|
||||
DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + periodDurationUs,
|
||||
periodDurationUs,
|
||||
periodDurationUs,
|
||||
@ -475,12 +475,10 @@ public class ImaUtilTest {
|
||||
.withAdCount(/* adGroupIndex= */ 1, 1)
|
||||
.withAdCount(/* adGroupIndex= */ 2, 1)
|
||||
.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs...= */
|
||||
/* adGroupIndex= */ 0, /* adDurationsUs= */
|
||||
DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + (2 * periodDurationUs))
|
||||
.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 1, /* adDurationsUs...= */ (2 * periodDurationUs))
|
||||
.withAdDurationsUs(
|
||||
/* adGroupIndex= */ 2, /* adDurationsUs...= */ (2 * periodDurationUs))
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs= */ (2 * periodDurationUs))
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs= */ (2 * periodDurationUs))
|
||||
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0)
|
||||
.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, true)
|
||||
@ -522,7 +520,7 @@ public class ImaUtilTest {
|
||||
// around removed.
|
||||
DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + periodDurationUs + 2)
|
||||
.withAdCount(/* adGroupIndex= */ 0, 1)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs...= */ periodDurationUs)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs= */ periodDurationUs)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, true);
|
||||
FakeTimeline timeline =
|
||||
new FakeTimeline(
|
||||
@ -545,7 +543,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState(/* adsId= */ "adsId", periodDurationUs - 1)
|
||||
.withAdCount(/* adGroupIndex= */ 0, 1)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs...= */ periodDurationUs)
|
||||
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs= */ periodDurationUs)
|
||||
.withIsServerSideInserted(/* adGroupIndex= */ 0, true);
|
||||
FakeTimeline timeline =
|
||||
new FakeTimeline(
|
||||
@ -569,7 +567,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 30_000_000);
|
||||
/* adDurationsUs= */ 30_000_000);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -594,7 +592,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 30_000_000);
|
||||
/* adDurationsUs= */ 30_000_000);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -619,7 +617,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 30_000_000);
|
||||
/* adDurationsUs= */ 30_000_000);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -644,7 +642,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 30_000_000);
|
||||
/* adDurationsUs= */ 30_000_000);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -667,7 +665,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 10_000_001);
|
||||
/* adDurationsUs= */ 10_000_001);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -690,7 +688,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 30_000_000);
|
||||
/* adDurationsUs= */ 30_000_000);
|
||||
|
||||
adPlaybackState =
|
||||
ImaUtil.expandAdGroupPlaceholder(
|
||||
@ -714,7 +712,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 10_000_000,
|
||||
/* adDurationsUs= */ 10_000_000,
|
||||
20_000_000,
|
||||
0);
|
||||
|
||||
@ -739,7 +737,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 0,
|
||||
/* adDurationsUs= */ 0,
|
||||
10_000_000,
|
||||
20_000_000);
|
||||
|
||||
@ -764,7 +762,7 @@ public class ImaUtilTest {
|
||||
AdPlaybackState.NONE,
|
||||
/* fromPositionUs= */ 0,
|
||||
/* contentResumeOffsetUs= */ 0,
|
||||
/* adDurationsUs...= */ 5_000_000,
|
||||
/* adDurationsUs= */ 5_000_000,
|
||||
10_000_000,
|
||||
20_000_000);
|
||||
|
||||
|
@ -449,7 +449,9 @@ public final class DefaultExtractorsFactory implements ExtractorsFactory {
|
||||
|
||||
@Nullable
|
||||
private static Constructor<? extends Extractor> getFlacExtractorConstructor()
|
||||
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException,
|
||||
throws ClassNotFoundException,
|
||||
NoSuchMethodException,
|
||||
InvocationTargetException,
|
||||
IllegalAccessException {
|
||||
@SuppressWarnings("nullness:argument")
|
||||
boolean isFlacNativeLibraryAvailable =
|
||||
@ -470,7 +472,9 @@ public final class DefaultExtractorsFactory implements ExtractorsFactory {
|
||||
public interface ConstructorSupplier {
|
||||
@Nullable
|
||||
Constructor<? extends Extractor> getConstructor()
|
||||
throws InvocationTargetException, IllegalAccessException, NoSuchMethodException,
|
||||
throws InvocationTargetException,
|
||||
IllegalAccessException,
|
||||
NoSuchMethodException,
|
||||
ClassNotFoundException;
|
||||
}
|
||||
|
||||
|
@ -656,7 +656,7 @@ public class FragmentedMp4Extractor implements Extractor {
|
||||
}
|
||||
|
||||
byte[] messageData = new byte[atom.bytesLeft()];
|
||||
atom.readBytes(messageData, /*offset=*/ 0, atom.bytesLeft());
|
||||
atom.readBytes(messageData, /* offset= */ 0, atom.bytesLeft());
|
||||
EventMessage eventMessage = new EventMessage(schemeIdUri, value, durationMs, id, messageData);
|
||||
ParsableByteArray encodedEventMessage =
|
||||
new ParsableByteArray(eventMessageEncoder.encode(eventMessage));
|
||||
|
@ -376,7 +376,7 @@ import java.lang.annotation.Target;
|
||||
DeviceInfo.UNKNOWN,
|
||||
/* deviceVolume= */ 0,
|
||||
/* deviceMuted= */ false,
|
||||
/* playWhenReady = */ false,
|
||||
/* playWhenReady= */ false,
|
||||
PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST,
|
||||
PLAYBACK_SUPPRESSION_REASON_NONE,
|
||||
STATE_IDLE,
|
||||
|
@ -109,7 +109,7 @@ public class FakeAdaptiveMediaPeriod
|
||||
C.SELECTION_REASON_UNKNOWN,
|
||||
/* trackSelectionData= */ null,
|
||||
/* mediaStartTimeUs= */ 0,
|
||||
/* mediaEndTimeUs = */ C.TIME_UNSET);
|
||||
/* mediaEndTimeUs= */ C.TIME_UNSET);
|
||||
this.callback = callback;
|
||||
prepared = true;
|
||||
Util.castNonNull(this.callback).onPrepared(this);
|
||||
@ -128,7 +128,7 @@ public class FakeAdaptiveMediaPeriod
|
||||
C.SELECTION_REASON_UNKNOWN,
|
||||
/* trackSelectionData= */ null,
|
||||
/* mediaStartTimeUs= */ 0,
|
||||
/* mediaEndTimeUs = */ C.TIME_UNSET);
|
||||
/* mediaEndTimeUs= */ C.TIME_UNSET);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -234,7 +234,7 @@ public class FakeMediaPeriod implements MediaPeriod {
|
||||
C.SELECTION_REASON_UNKNOWN,
|
||||
/* trackSelectionData= */ null,
|
||||
/* mediaStartTimeUs= */ 0,
|
||||
/* mediaEndTimeUs = */ C.TIME_UNSET);
|
||||
/* mediaEndTimeUs= */ C.TIME_UNSET);
|
||||
prepareCallback = callback;
|
||||
if (deferOnPrepared) {
|
||||
playerHandler = Util.createHandlerForCurrentLooper();
|
||||
@ -418,7 +418,7 @@ public class FakeMediaPeriod implements MediaPeriod {
|
||||
C.SELECTION_REASON_UNKNOWN,
|
||||
/* trackSelectionData= */ null,
|
||||
/* mediaStartTimeUs= */ 0,
|
||||
/* mediaEndTimeUs = */ C.TIME_UNSET);
|
||||
/* mediaEndTimeUs= */ C.TIME_UNSET);
|
||||
}
|
||||
|
||||
private boolean isLoadingFinished() {
|
||||
|
@ -389,7 +389,7 @@ public class FakeMediaSource extends BaseMediaSource {
|
||||
C.SELECTION_REASON_UNKNOWN,
|
||||
/* trackSelectionData= */ null,
|
||||
/* mediaStartTimeMs= */ C.TIME_UNSET,
|
||||
/* mediaEndTimeMs = */ C.TIME_UNSET);
|
||||
/* mediaEndTimeMs= */ C.TIME_UNSET);
|
||||
long elapsedRealTimeMs = SystemClock.elapsedRealtime();
|
||||
MediaSourceEventListener.EventDispatcher eventDispatcher =
|
||||
createEventDispatcher(/* mediaPeriodId= */ null);
|
||||
|
@ -69,7 +69,7 @@ public class FakeTrackSelector extends DefaultTrackSelector {
|
||||
boolean hasTracks = trackGroupArray.length > 0;
|
||||
definitions[i] =
|
||||
hasTracks
|
||||
? new ExoTrackSelection.Definition(trackGroupArray.get(0), /* tracks...= */ 0)
|
||||
? new ExoTrackSelection.Definition(trackGroupArray.get(0), /* tracks= */ 0)
|
||||
: null;
|
||||
}
|
||||
return definitions;
|
||||
|
@ -82,17 +82,17 @@ public final class FakeClockTest {
|
||||
handler.obtainMessage(/* what= */ 2, /* obj= */ testObject).sendToTarget();
|
||||
handler.obtainMessage(/* what= */ 3, /* arg1= */ 99, /* arg2= */ 44).sendToTarget();
|
||||
handler
|
||||
.obtainMessage(/* what= */ 4, /* arg1= */ 88, /* arg2= */ 33, /* obj=*/ testObject)
|
||||
.obtainMessage(/* what= */ 4, /* arg1= */ 88, /* arg2= */ 33, /* obj= */ testObject)
|
||||
.sendToTarget();
|
||||
ShadowLooper.idleMainLooper();
|
||||
shadowOf(handler.getLooper()).idle();
|
||||
|
||||
assertThat(callback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ testObject),
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 99, /* arg2= */ 44, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 88, /* arg2= */ 33, /* obj=*/ testObject))
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null),
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ testObject),
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 99, /* arg2= */ 44, /* obj= */ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 88, /* arg2= */ 33, /* obj= */ testObject))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@ -113,8 +113,8 @@ public final class FakeClockTest {
|
||||
|
||||
assertThat(callback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null))
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null))
|
||||
.inOrder();
|
||||
|
||||
fakeClock.advanceTime(50);
|
||||
@ -122,14 +122,14 @@ public final class FakeClockTest {
|
||||
|
||||
assertThat(callback.messages).hasSize(3);
|
||||
assertThat(Iterables.getLast(callback.messages))
|
||||
.isEqualTo(new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null));
|
||||
.isEqualTo(new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null));
|
||||
|
||||
fakeClock.advanceTime(50);
|
||||
shadowOf(handler.getLooper()).idle();
|
||||
|
||||
assertThat(callback.messages).hasSize(4);
|
||||
assertThat(Iterables.getLast(callback.messages))
|
||||
.isEqualTo(new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null));
|
||||
.isEqualTo(new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -149,10 +149,10 @@ public final class FakeClockTest {
|
||||
|
||||
assertThat(callback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null))
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null),
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null),
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null),
|
||||
new MessageData(/* what= */ 4, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@ -223,14 +223,14 @@ public final class FakeClockTest {
|
||||
|
||||
assertThat(callback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null));
|
||||
new MessageData(/* what= */ 3, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null));
|
||||
assertThat(testRunnable1.hasRun).isTrue();
|
||||
assertThat(testRunnable2.hasRun).isTrue();
|
||||
|
||||
// Assert that message with same "what" on other handler wasn't removed.
|
||||
assertThat(otherCallback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null));
|
||||
new MessageData(/* what= */ 2, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -266,7 +266,7 @@ public final class FakeClockTest {
|
||||
// Assert that message on other handler wasn't removed.
|
||||
assertThat(otherCallback.messages)
|
||||
.containsExactly(
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj=*/ null));
|
||||
new MessageData(/* what= */ 1, /* arg1= */ 0, /* arg2= */ 0, /* obj= */ null));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -39,7 +39,7 @@ public class FakeTimelineTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
windowId,
|
||||
numberOfPlayedAds,
|
||||
/* isAdPeriodFlags...= */ true,
|
||||
/* isAdPeriodFlags= */ true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
@ -94,7 +94,7 @@ public class FakeTimelineTest {
|
||||
FakeTimeline.createMultiPeriodAdTimeline(
|
||||
/* windowId= */ new Object(),
|
||||
/* numberOfPlayedAds= */ 0,
|
||||
/* isAdPeriodFlags...= */ false,
|
||||
/* isAdPeriodFlags= */ false,
|
||||
true,
|
||||
true,
|
||||
false);
|
||||
|
@ -266,7 +266,7 @@ public final class LeanbackPlayerAdapter extends PlayerAdapter implements Runnab
|
||||
// int arguments (int what, int extra). Since PlaybackException defines a single error
|
||||
// code, we pass 0 as the extra.
|
||||
context.getString(
|
||||
R.string.lb_media_player_error, /* formatArgs...= */ error.errorCode, 0));
|
||||
R.string.lb_media_player_error, /* formatArgs= */ error.errorCode, 0));
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user