mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Rollback of 6e9a2cc0cd
PiperOrigin-RevId: 730482824 (cherry picked from commit ee6eb98d4bd4f63270744e25b606208a4a950288)
This commit is contained in:
parent
50f6bdd1fd
commit
77c8ddd884
@ -21,25 +21,24 @@ import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.SpeedProviderUtil.getNextSpeedChangeSamplePosition;
|
||||
import static androidx.media3.common.util.SpeedProviderUtil.getSampleAlignedSpeed;
|
||||
import static androidx.media3.common.util.Util.sampleCountToDurationUs;
|
||||
import static androidx.media3.common.util.Util.scaleLargeValue;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.Math.round;
|
||||
|
||||
import androidx.annotation.GuardedBy;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.annotation.VisibleForTesting;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.util.LongArray;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.util.LongArrayQueue;
|
||||
import androidx.media3.common.util.SpeedProviderUtil;
|
||||
import androidx.media3.common.util.TimestampConsumer;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import java.math.RoundingMode;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Queue;
|
||||
import java.util.function.LongConsumer;
|
||||
import org.checkerframework.checker.initialization.qual.UnknownInitialization;
|
||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
|
||||
/**
|
||||
* An {@link AudioProcessor} that changes the speed of audio samples depending on their timestamp.
|
||||
@ -67,34 +66,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
@GuardedBy("lock")
|
||||
private final Queue<TimestampConsumer> pendingCallbacks;
|
||||
|
||||
// Elements in the same positions in the arrays are associated.
|
||||
|
||||
@GuardedBy("lock")
|
||||
private LongArray inputSegmentStartTimesUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private LongArray outputSegmentStartTimesUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastProcessedInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastSpeedAdjustedInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastSpeedAdjustedOutputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long speedAdjustedTimeAsyncInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private float currentSpeed;
|
||||
|
||||
private long framesRead;
|
||||
|
||||
private boolean endOfStreamQueuedToSonic;
|
||||
|
||||
/** The current input audio format. */
|
||||
@GuardedBy("lock")
|
||||
private AudioFormat inputAudioFormat;
|
||||
|
||||
private AudioFormat pendingInputAudioFormat;
|
||||
@ -112,7 +89,6 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
new SynchronizedSonicAudioProcessor(lock, /* keepActiveWithDefaultParameters= */ true);
|
||||
pendingCallbackInputTimesUs = new LongArrayQueue();
|
||||
pendingCallbacks = new ArrayDeque<>();
|
||||
speedAdjustedTimeAsyncInputTimeUs = C.TIME_UNSET;
|
||||
resetInternalState(/* shouldResetSpeed= */ true);
|
||||
}
|
||||
|
||||
@ -120,10 +96,10 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
public static long getSampleCountAfterProcessorApplied(
|
||||
SpeedProvider speedProvider,
|
||||
@IntRange(from = 1) int inputSampleRateHz,
|
||||
@IntRange(from = 1) long inputSamples) {
|
||||
@IntRange(from = 0) long inputSamples) {
|
||||
checkArgument(speedProvider != null);
|
||||
checkArgument(inputSampleRateHz > 0);
|
||||
checkArgument(inputSamples > 0);
|
||||
checkArgument(inputSamples >= 0);
|
||||
|
||||
long outputSamples = 0;
|
||||
long positionSamples = 0;
|
||||
@ -171,18 +147,22 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
|
||||
@Override
|
||||
public void queueInput(ByteBuffer inputBuffer) {
|
||||
long currentTimeUs = sampleCountToDurationUs(framesRead, inputAudioFormat.sampleRate);
|
||||
float newSpeed = getSampleAlignedSpeed(speedProvider, framesRead, inputAudioFormat.sampleRate);
|
||||
long nextSpeedChangeSamplePosition =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, framesRead, inputAudioFormat.sampleRate);
|
||||
AudioFormat format;
|
||||
synchronized (lock) {
|
||||
format = inputAudioFormat;
|
||||
}
|
||||
|
||||
updateSpeed(newSpeed, currentTimeUs);
|
||||
float newSpeed = getSampleAlignedSpeed(speedProvider, framesRead, format.sampleRate);
|
||||
long nextSpeedChangeSamplePosition =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, framesRead, format.sampleRate);
|
||||
|
||||
updateSpeed(newSpeed);
|
||||
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
int bytesToNextSpeedChange;
|
||||
if (nextSpeedChangeSamplePosition != C.INDEX_UNSET) {
|
||||
bytesToNextSpeedChange =
|
||||
(int) ((nextSpeedChangeSamplePosition - framesRead) * inputAudioFormat.bytesPerFrame);
|
||||
(int) ((nextSpeedChangeSamplePosition - framesRead) * format.bytesPerFrame);
|
||||
// Update the input buffer limit to make sure that all samples processed have the same speed.
|
||||
inputBuffer.limit(min(inputBufferLimit, inputBuffer.position() + bytesToNextSpeedChange));
|
||||
} else {
|
||||
@ -197,10 +177,8 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
endOfStreamQueuedToSonic = true;
|
||||
}
|
||||
long bytesRead = inputBuffer.position() - startPosition;
|
||||
checkState(
|
||||
bytesRead % inputAudioFormat.bytesPerFrame == 0, "A frame was not queued completely.");
|
||||
framesRead += bytesRead / inputAudioFormat.bytesPerFrame;
|
||||
updateLastProcessedInputTime();
|
||||
checkState(bytesRead % format.bytesPerFrame == 0, "A frame was not queued completely.");
|
||||
framesRead += bytesRead / format.bytesPerFrame;
|
||||
inputBuffer.limit(inputBufferLimit);
|
||||
}
|
||||
|
||||
@ -215,9 +193,7 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
|
||||
@Override
|
||||
public ByteBuffer getOutput() {
|
||||
ByteBuffer output = sonicAudioProcessor.getOutput();
|
||||
processPendingCallbacks();
|
||||
return output;
|
||||
return sonicAudioProcessor.getOutput();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -228,9 +204,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
@Override
|
||||
public void flush() {
|
||||
inputEnded = false;
|
||||
inputAudioFormat = pendingInputAudioFormat;
|
||||
resetInternalState(/* shouldResetSpeed= */ false);
|
||||
sonicAudioProcessor.flush();
|
||||
synchronized (lock) {
|
||||
inputAudioFormat = pendingInputAudioFormat;
|
||||
sonicAudioProcessor.flush();
|
||||
processPendingCallbacks();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -238,7 +217,11 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
flush();
|
||||
pendingInputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingOutputAudioFormat = AudioFormat.NOT_SET;
|
||||
inputAudioFormat = AudioFormat.NOT_SET;
|
||||
synchronized (lock) {
|
||||
inputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingCallbackInputTimesUs.clear();
|
||||
pendingCallbacks.clear();
|
||||
}
|
||||
resetInternalState(/* shouldResetSpeed= */ true);
|
||||
sonicAudioProcessor.reset();
|
||||
}
|
||||
@ -261,154 +244,125 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
* @param callback The callback called with the output time. May be called on a different thread
|
||||
* from the caller of this method.
|
||||
*/
|
||||
// TODO(b/381553948): Accept an executor on which to dispatch the callback.
|
||||
public void getSpeedAdjustedTimeAsync(long inputTimeUs, TimestampConsumer callback) {
|
||||
int sampleRate;
|
||||
synchronized (lock) {
|
||||
checkArgument(speedAdjustedTimeAsyncInputTimeUs < inputTimeUs);
|
||||
speedAdjustedTimeAsyncInputTimeUs = inputTimeUs;
|
||||
if ((inputTimeUs <= lastProcessedInputTimeUs && pendingCallbackInputTimesUs.isEmpty())
|
||||
|| isEnded()) {
|
||||
callback.onTimestamp(calculateSpeedAdjustedTime(inputTimeUs));
|
||||
sampleRate = inputAudioFormat.sampleRate;
|
||||
|
||||
if (sampleRate == Format.NO_VALUE) {
|
||||
pendingCallbackInputTimesUs.add(inputTimeUs);
|
||||
pendingCallbacks.add(callback);
|
||||
return;
|
||||
}
|
||||
pendingCallbackInputTimesUs.add(inputTimeUs);
|
||||
pendingCallbacks.add(callback);
|
||||
}
|
||||
// TODO(b/381553948): Use an executor to invoke callback.
|
||||
callback.onTimestamp(
|
||||
getDurationUsAfterProcessorApplied(speedProvider, sampleRate, inputTimeUs));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the input media duration for the given playout duration.
|
||||
* Returns the input media duration in microseconds for the given playout duration.
|
||||
*
|
||||
* <p>Both durations are counted from the last {@link #reset()} or {@link #flush()} of the audio
|
||||
* processor.
|
||||
*
|
||||
* <p>The {@code playoutDurationUs} must be less than last processed buffer output time.
|
||||
* <p>This method returns the inverse of {@link #getSpeedAdjustedTimeAsync} when the instance has
|
||||
* been configured and flushed. Otherwise, it returns {@code playoutDurationUs}.
|
||||
*
|
||||
* @param playoutDurationUs The playout duration in microseconds.
|
||||
* @return The corresponding input duration in microseconds.
|
||||
*/
|
||||
public long getMediaDurationUs(long playoutDurationUs) {
|
||||
int sampleRate;
|
||||
synchronized (lock) {
|
||||
int floorIndex = outputSegmentStartTimesUs.size() - 1;
|
||||
while (floorIndex > 0 && outputSegmentStartTimesUs.get(floorIndex) > playoutDurationUs) {
|
||||
floorIndex--;
|
||||
}
|
||||
long lastSegmentOutputDurationUs =
|
||||
playoutDurationUs - outputSegmentStartTimesUs.get(floorIndex);
|
||||
long lastSegmentInputDurationUs;
|
||||
if (floorIndex == outputSegmentStartTimesUs.size() - 1) {
|
||||
lastSegmentInputDurationUs = getMediaDurationUsAtCurrentSpeed(lastSegmentOutputDurationUs);
|
||||
|
||||
} else {
|
||||
lastSegmentInputDurationUs =
|
||||
round(
|
||||
lastSegmentOutputDurationUs
|
||||
* divide(
|
||||
inputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- inputSegmentStartTimesUs.get(floorIndex),
|
||||
outputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- outputSegmentStartTimesUs.get(floorIndex)));
|
||||
}
|
||||
return inputSegmentStartTimesUs.get(floorIndex) + lastSegmentInputDurationUs;
|
||||
sampleRate = inputAudioFormat.sampleRate;
|
||||
}
|
||||
if (sampleRate == Format.NO_VALUE) {
|
||||
return playoutDurationUs;
|
||||
}
|
||||
long outputSamples =
|
||||
scaleLargeValue(playoutDurationUs, sampleRate, C.MICROS_PER_SECOND, RoundingMode.HALF_EVEN);
|
||||
long inputSamples = getInputFrameCountForOutput(speedProvider, sampleRate, outputSamples);
|
||||
return sampleCountToDurationUs(inputSamples, sampleRate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assuming enough audio has been processed, calculates the time at which the {@code inputTimeUs}
|
||||
* is outputted at after the speed changes has been applied.
|
||||
* Returns the number of input frames needed to output a specific number of frames, given a speed
|
||||
* provider, input sample rate, and number of output frames.
|
||||
*
|
||||
* <p>This is the inverse operation of {@link #getSampleCountAfterProcessorApplied}.
|
||||
*/
|
||||
@SuppressWarnings("GuardedBy") // All call sites are guarded.
|
||||
private long calculateSpeedAdjustedTime(long inputTimeUs) {
|
||||
int floorIndex = inputSegmentStartTimesUs.size() - 1;
|
||||
while (floorIndex > 0 && inputSegmentStartTimesUs.get(floorIndex) > inputTimeUs) {
|
||||
floorIndex--;
|
||||
}
|
||||
long lastSegmentOutputDurationUs;
|
||||
if (floorIndex == inputSegmentStartTimesUs.size() - 1) {
|
||||
if (lastSpeedAdjustedInputTimeUs < inputSegmentStartTimesUs.get(floorIndex)) {
|
||||
lastSpeedAdjustedInputTimeUs = inputSegmentStartTimesUs.get(floorIndex);
|
||||
lastSpeedAdjustedOutputTimeUs = outputSegmentStartTimesUs.get(floorIndex);
|
||||
@VisibleForTesting
|
||||
/* package */ static long getInputFrameCountForOutput(
|
||||
SpeedProvider speedProvider,
|
||||
@IntRange(from = 1) int inputSampleRate,
|
||||
@IntRange(from = 0) long outputFrameCount) {
|
||||
checkArgument(inputSampleRate > 0);
|
||||
checkArgument(outputFrameCount >= 0);
|
||||
|
||||
long inputSampleCount = 0;
|
||||
while (outputFrameCount > 0) {
|
||||
long boundarySamples =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, inputSampleCount, inputSampleRate);
|
||||
float speed = getSampleAlignedSpeed(speedProvider, inputSampleCount, inputSampleRate);
|
||||
|
||||
long outputSamplesForSection =
|
||||
Sonic.getExpectedFrameCountAfterProcessorApplied(
|
||||
/* inputSampleRateHz= */ inputSampleRate,
|
||||
/* outputSampleRateHz= */ inputSampleRate,
|
||||
/* speed= */ speed,
|
||||
/* pitch= */ speed,
|
||||
/* inputFrameCount= */ boundarySamples - inputSampleCount);
|
||||
|
||||
if (boundarySamples == C.INDEX_UNSET || outputSamplesForSection > outputFrameCount) {
|
||||
inputSampleCount +=
|
||||
Sonic.getExpectedInputFrameCountForOutputFrameCount(
|
||||
/* inputSampleRateHz= */ inputSampleRate,
|
||||
/* outputSampleRateHz= */ inputSampleRate,
|
||||
/* speed= */ speed,
|
||||
/* pitch= */ speed,
|
||||
outputFrameCount);
|
||||
outputFrameCount = 0;
|
||||
} else {
|
||||
outputFrameCount -= outputSamplesForSection;
|
||||
inputSampleCount = boundarySamples;
|
||||
}
|
||||
long lastSegmentInputDurationUs = inputTimeUs - lastSpeedAdjustedInputTimeUs;
|
||||
lastSegmentOutputDurationUs = getPlayoutDurationUsAtCurrentSpeed(lastSegmentInputDurationUs);
|
||||
} else {
|
||||
long lastSegmentInputDurationUs = inputTimeUs - lastSpeedAdjustedInputTimeUs;
|
||||
lastSegmentOutputDurationUs =
|
||||
round(
|
||||
lastSegmentInputDurationUs
|
||||
* divide(
|
||||
outputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- outputSegmentStartTimesUs.get(floorIndex),
|
||||
inputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- inputSegmentStartTimesUs.get(floorIndex)));
|
||||
}
|
||||
lastSpeedAdjustedInputTimeUs = inputTimeUs;
|
||||
lastSpeedAdjustedOutputTimeUs += lastSegmentOutputDurationUs;
|
||||
return lastSpeedAdjustedOutputTimeUs;
|
||||
|
||||
return inputSampleCount;
|
||||
}
|
||||
|
||||
private static double divide(long dividend, long divisor) {
|
||||
return ((double) dividend) / divisor;
|
||||
private static long getDurationUsAfterProcessorApplied(
|
||||
SpeedProvider speedProvider, int sampleRate, long inputDurationUs) {
|
||||
long inputSamples =
|
||||
scaleLargeValue(inputDurationUs, sampleRate, C.MICROS_PER_SECOND, RoundingMode.HALF_EVEN);
|
||||
long outputSamples =
|
||||
getSampleCountAfterProcessorApplied(speedProvider, sampleRate, inputSamples);
|
||||
return sampleCountToDurationUs(outputSamples, sampleRate);
|
||||
}
|
||||
|
||||
private void processPendingCallbacks() {
|
||||
synchronized (lock) {
|
||||
while (!pendingCallbacks.isEmpty()
|
||||
&& (pendingCallbackInputTimesUs.element() <= lastProcessedInputTimeUs || isEnded())) {
|
||||
pendingCallbacks
|
||||
.remove()
|
||||
.onTimestamp(calculateSpeedAdjustedTime(pendingCallbackInputTimesUs.remove()));
|
||||
if (inputAudioFormat.sampleRate == Format.NO_VALUE) {
|
||||
return;
|
||||
}
|
||||
|
||||
while (!pendingCallbacks.isEmpty()) {
|
||||
long inputTimeUs = pendingCallbackInputTimesUs.remove();
|
||||
TimestampConsumer consumer = pendingCallbacks.remove();
|
||||
// TODO(b/381553948): Use an executor to invoke callback.
|
||||
consumer.onTimestamp(
|
||||
getDurationUsAfterProcessorApplied(
|
||||
speedProvider, inputAudioFormat.sampleRate, inputTimeUs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void updateSpeed(float newSpeed, long timeUs) {
|
||||
synchronized (lock) {
|
||||
if (newSpeed != currentSpeed) {
|
||||
updateSpeedChangeArrays(timeUs);
|
||||
currentSpeed = newSpeed;
|
||||
sonicAudioProcessor.setSpeed(newSpeed);
|
||||
sonicAudioProcessor.setPitch(newSpeed);
|
||||
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
||||
sonicAudioProcessor.flush();
|
||||
endOfStreamQueuedToSonic = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("GuardedBy") // All call sites are guarded.
|
||||
private void updateSpeedChangeArrays(long currentSpeedChangeInputTimeUs) {
|
||||
long lastSpeedChangeOutputTimeUs =
|
||||
outputSegmentStartTimesUs.get(outputSegmentStartTimesUs.size() - 1);
|
||||
long lastSpeedChangeInputTimeUs =
|
||||
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1);
|
||||
long lastSpeedSegmentMediaDurationUs =
|
||||
currentSpeedChangeInputTimeUs - lastSpeedChangeInputTimeUs;
|
||||
inputSegmentStartTimesUs.add(currentSpeedChangeInputTimeUs);
|
||||
outputSegmentStartTimesUs.add(
|
||||
lastSpeedChangeOutputTimeUs
|
||||
+ getPlayoutDurationUsAtCurrentSpeed(lastSpeedSegmentMediaDurationUs));
|
||||
}
|
||||
|
||||
private long getPlayoutDurationUsAtCurrentSpeed(long mediaDurationUs) {
|
||||
return sonicAudioProcessor.getPlayoutDuration(mediaDurationUs);
|
||||
}
|
||||
|
||||
private long getMediaDurationUsAtCurrentSpeed(long playoutDurationUs) {
|
||||
return sonicAudioProcessor.getMediaDuration(playoutDurationUs);
|
||||
}
|
||||
|
||||
private void updateLastProcessedInputTime() {
|
||||
synchronized (lock) {
|
||||
// TODO - b/320242819: Investigate whether bytesRead can be used here rather than
|
||||
// sonicAudioProcessor.getProcessedInputBytes().
|
||||
long currentProcessedInputDurationUs =
|
||||
Util.scaleLargeTimestamp(
|
||||
/* timestamp= */ sonicAudioProcessor.getProcessedInputBytes(),
|
||||
/* multiplier= */ C.MICROS_PER_SECOND,
|
||||
/* divisor= */ (long) inputAudioFormat.sampleRate * inputAudioFormat.bytesPerFrame);
|
||||
lastProcessedInputTimeUs =
|
||||
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1)
|
||||
+ currentProcessedInputDurationUs;
|
||||
private void updateSpeed(float newSpeed) {
|
||||
if (newSpeed != currentSpeed) {
|
||||
currentSpeed = newSpeed;
|
||||
sonicAudioProcessor.setSpeed(newSpeed);
|
||||
sonicAudioProcessor.setPitch(newSpeed);
|
||||
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
||||
sonicAudioProcessor.flush();
|
||||
endOfStreamQueuedToSonic = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -420,28 +374,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
*
|
||||
* @param shouldResetSpeed Whether {@link #currentSpeed} should be reset to its default value.
|
||||
*/
|
||||
@EnsuresNonNull({"inputSegmentStartTimesUs", "outputSegmentStartTimesUs"})
|
||||
@RequiresNonNull("lock")
|
||||
private void resetInternalState(
|
||||
@UnknownInitialization SpeedChangingAudioProcessor this, boolean shouldResetSpeed) {
|
||||
synchronized (lock) {
|
||||
inputSegmentStartTimesUs = new LongArray();
|
||||
outputSegmentStartTimesUs = new LongArray();
|
||||
inputSegmentStartTimesUs.add(0);
|
||||
outputSegmentStartTimesUs.add(0);
|
||||
lastProcessedInputTimeUs = 0;
|
||||
lastSpeedAdjustedInputTimeUs = 0;
|
||||
lastSpeedAdjustedOutputTimeUs = 0;
|
||||
if (shouldResetSpeed) {
|
||||
currentSpeed = 1f;
|
||||
}
|
||||
if (shouldResetSpeed) {
|
||||
currentSpeed = 1f;
|
||||
}
|
||||
|
||||
framesRead = 0;
|
||||
endOfStreamQueuedToSonic = false;
|
||||
// TODO: b/339842724 - This should ideally also reset speedAdjustedTimeAsyncInputTimeUs and
|
||||
// clear pendingCallbacks and pendingCallbacksInputTimes. We can't do this at the moment
|
||||
// because some clients register callbacks with getSpeedAdjustedTimeAsync before this audio
|
||||
// processor is flushed.
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.audio.AudioProcessor.EMPTY_BUFFER;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.audio.SpeedChangingAudioProcessor.getInputFrameCountForOutput;
|
||||
import static androidx.media3.test.utils.TestUtil.getNonRandomByteBuffer;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
@ -36,53 +36,59 @@ import org.junit.runner.RunWith;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
private static final AudioFormat AUDIO_FORMAT =
|
||||
private static final AudioFormat AUDIO_FORMAT_44_100HZ =
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 44100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
/* sampleRate= */ 44_100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
|
||||
private static final AudioFormat AUDIO_FORMAT_50_000HZ =
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50_000, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
|
||||
@Test
|
||||
public void queueInput_noSpeedChange_doesNotOverwriteInput() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
inputBuffer.rewind();
|
||||
assertThat(inputBuffer)
|
||||
.isEqualTo(getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame));
|
||||
.isEqualTo(
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_speedChange_doesNotOverwriteInput() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
inputBuffer.rewind();
|
||||
assertThat(inputBuffer)
|
||||
.isEqualTo(getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame));
|
||||
.isEqualTo(
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_noSpeedChange_copiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -96,11 +102,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueInput_speedChange_modifiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -115,11 +121,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueInput_noSpeedChangeAfterSpeedChange_copiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -136,11 +144,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {1, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {1, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -150,7 +160,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -165,11 +175,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {3, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {3, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -179,7 +191,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -194,18 +206,20 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 3});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 3});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
ByteBuffer outputBuffer = getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -218,7 +232,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
@Test
|
||||
public void queueInput_multipleSpeedsInBufferWithLimitAtFrameBoundary_readsDataUntilSpeedLimit()
|
||||
throws Exception {
|
||||
long speedChangeTimeUs = 4 * C.MICROS_PER_SECOND / AUDIO_FORMAT.sampleRate;
|
||||
long speedChangeTimeUs = 4 * C.MICROS_PER_SECOND / AUDIO_FORMAT_44_100HZ.sampleRate;
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithStartTimes(
|
||||
/* startTimesUs= */ new long[] {0L, speedChangeTimeUs},
|
||||
@ -226,19 +240,19 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT.bytesPerFrame);
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
assertThat(inputBuffer.limit()).isEqualTo(inputBufferLimit);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_multipleSpeedsInBufferWithLimitInsideFrame_readsDataUntilSpeedLimit()
|
||||
throws Exception {
|
||||
long speedChangeTimeUs = (long) (3.5 * C.MICROS_PER_SECOND / AUDIO_FORMAT.sampleRate);
|
||||
long speedChangeTimeUs = (long) (3.5 * C.MICROS_PER_SECOND / AUDIO_FORMAT_44_100HZ.sampleRate);
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithStartTimes(
|
||||
/* startTimesUs= */ new long[] {0L, speedChangeTimeUs},
|
||||
@ -246,12 +260,12 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT.bytesPerFrame);
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
assertThat(inputBuffer.limit()).isEqualTo(inputBufferLimit);
|
||||
}
|
||||
|
||||
@ -266,18 +280,18 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
// SpeedChangingAudioProcessor only queues samples until the next speed change.
|
||||
while (inputBuffer.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
outputFrames +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrames +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
// We allow 1 sample of tolerance per speed change.
|
||||
assertThat(outputFrames).isWithin(1).of(3);
|
||||
}
|
||||
@ -287,11 +301,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -307,11 +323,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {1, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {1, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -327,11 +345,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -344,11 +362,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -360,7 +378,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueEndOfStream_noInputQueued_endsProcessor() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
|
||||
@ -373,11 +391,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void isEnded_afterNoSpeedChangeAndOutputRetrieved_isFalse() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
@ -389,11 +407,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void isEnded_afterSpeedChangeAndOutputRetrieved_isFalse() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
@ -402,147 +420,89 @@ public class SpeedChangingAudioProcessorTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_callbacksCalledWithCorrectParameters() throws Exception {
|
||||
public void getSpeedAdjustedTimeAsync_beforeFlush_callbacksCalledWithCorrectParametersAfterFlush()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
// Sample period = 20us.
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 50L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
/* inputTimeUs= */ 40L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 100L, outputTimesUs::add);
|
||||
/* inputTimeUs= */ 80L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 150L, outputTimesUs::add);
|
||||
/* inputTimeUs= */ 160L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (150 - 113)*1) -> 93
|
||||
assertThat(outputTimesUs).containsExactly(25L, 50L, 93L);
|
||||
assertThat(outputTimesUs).isEmpty();
|
||||
speedChangingAudioProcessor.flush();
|
||||
assertThat(outputTimesUs).containsExactly(20L, 40L, 100L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_afterFlush_callbacksCalledWithCorrectParameters()
|
||||
public void getSpeedAdjustedTimeAsync_afterCallToFlush_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate). Also add another speed change
|
||||
// to 3x at a later point that should not be used if the flush is handled correctly.
|
||||
// Sample period = 20us.
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
/* frameCounts= */ new int[] {5, 5, 5},
|
||||
/* speeds= */ new float[] {2, 1, 3});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
// Use the audio processor before a flush
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
|
||||
// Flush and use it again.
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 50L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 100L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 150L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (150 - 113)*1) -> 93
|
||||
assertThat(outputTimesUs).containsExactly(25L, 50L, 93L);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 40L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 80L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 160L, outputTimesUs::add);
|
||||
|
||||
assertThat(outputTimesUs).containsExactly(20L, 40L, 100L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_timeAfterEndTime_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
// The speed change is at 120Us (6*MICROS_PER_SECOND/sampleRate).
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 3, AUDIO_FORMAT.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 300L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (300 - 113)*1) -> 243
|
||||
assertThat(outputTimesUs).containsExactly(243L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getSpeedAdjustedTimeAsync_timeAfterEndTimeAfterProcessorEnded_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
checkState(speedChangingAudioProcessor.isEnded());
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 300L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (300 - 113)*1) -> 243
|
||||
assertThat(outputTimesUs).containsExactly(243L);
|
||||
assertThat(outputTimesUs).containsExactly(240L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getMediaDurationUs_returnsCorrectValues() throws Exception {
|
||||
// The speed changes happen every 10ms (441 samples @ 441.KHz)
|
||||
// The speed changes happen every 10ms (500 samples @ 50.KHz)
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
/* frameCounts= */ new int[] {441, 441, 441, 441},
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {500, 500, 500, 500},
|
||||
/* speeds= */ new float[] {2, 1, 5, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 441 * 4, AUDIO_FORMAT.bytesPerFrame);
|
||||
while (inputBuffer.position() < inputBuffer.limit()) {
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
}
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
|
||||
// input (in ms) (0, 10, 20, 30, 40) ->
|
||||
// output (in ms) (0, 10/2, 10/2 + 10, 10/2 + 10 + 10/5, 10/2 + 10 + 10/5 + 10/2)
|
||||
@ -572,30 +532,30 @@ public class SpeedChangingAudioProcessorTest {
|
||||
int outputFrameCount = 0;
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000, 1000},
|
||||
/* speeds= */ new float[] {2, 4, 2}); // 500, 250, 500 = 1250
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1000, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1000, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
input.rewind();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
input.rewind();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(2).of(1250);
|
||||
}
|
||||
|
||||
@ -612,17 +572,17 @@ public class SpeedChangingAudioProcessorTest {
|
||||
/* speeds= */ new float[] {2, 3, 8, 4});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer input = getNonRandomByteBuffer(12, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(12, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
|
||||
// Allow one sample of tolerance per effectively applied speed change.
|
||||
assertThat(outputFrameCount).isWithin(1).of(4);
|
||||
@ -633,23 +593,23 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000},
|
||||
/* speeds= */ new float[] {1, 2}); // 1000, 500.
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
// 1500 input frames falls in the middle of the 2x region.
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int outputFrameCount = 0;
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.flush();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isEqualTo(1250);
|
||||
input.rewind();
|
||||
|
||||
@ -659,11 +619,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(1).of(2500); // 1250 * 2.
|
||||
}
|
||||
|
||||
@ -672,23 +632,23 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000},
|
||||
/* speeds= */ new float[] {2, 4}); // 500, 250.
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
// 1500 input frames falls in the middle of the 2x region.
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int outputFrameCount = 0;
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.flush();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(1).of(625);
|
||||
input.rewind();
|
||||
|
||||
@ -698,11 +658,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(2).of(1250); // 625 * 2.
|
||||
}
|
||||
|
||||
@ -716,7 +676,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 100);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 100);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(50);
|
||||
}
|
||||
|
||||
@ -724,13 +684,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void getSampleCountAfterProcessorApplied_withMultipleSpeeds_outputsExpectedSamples() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {100, 400, 50},
|
||||
/* speeds= */ new float[] {2.f, 4f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 550);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 550);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(250);
|
||||
}
|
||||
|
||||
@ -739,13 +699,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
getSampleCountAfterProcessorApplied_beyondLastSpeedRegion_stillAppliesLastSpeedValue() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {100, 400, 50},
|
||||
/* speeds= */ new float[] {2.f, 4f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 3000);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 3000);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5150);
|
||||
}
|
||||
|
||||
@ -754,38 +714,38 @@ public class SpeedChangingAudioProcessorTest {
|
||||
getSampleCountAfterProcessorApplied_withInputCountBeyondIntRange_outputsExpectedSamples() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 3_000_000_000L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5999984250L);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 3_000_000_000L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5_999_984_250L);
|
||||
}
|
||||
|
||||
// Testing range validation.
|
||||
@SuppressLint("Range")
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withNegativeSampleCount_throws() {
|
||||
public void getSampleCountAfterProcessorApplied_withNegativeFrameCount_throws() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ -2L));
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ -2L));
|
||||
}
|
||||
|
||||
// Testing range validation.
|
||||
@SuppressLint("Range")
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withZeroSampleRate_throws() {
|
||||
public void getSampleCountAfterProcessorApplied_withZeroFrameRate_throws() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
assertThrows(
|
||||
@ -801,14 +761,32 @@ public class SpeedChangingAudioProcessorTest {
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
/* speedProvider= */ null, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 1000L));
|
||||
/* speedProvider= */ null,
|
||||
AUDIO_FORMAT_44_100HZ.sampleRate,
|
||||
/* inputSamples= */ 1000L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withZeroInputFrames_returnsZero() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 0L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(0L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isActive_beforeConfigure_returnsFalse() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {1000}, /* speeds= */ new float[] {2f});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000},
|
||||
/* speeds= */ new float[] {2f});
|
||||
|
||||
SpeedChangingAudioProcessor processor = new SpeedChangingAudioProcessor(speedProvider);
|
||||
assertThat(processor.isActive()).isFalse();
|
||||
@ -819,18 +797,34 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {1000}, /* speeds= */ new float[] {2f});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000},
|
||||
/* speeds= */ new float[] {2f});
|
||||
|
||||
SpeedChangingAudioProcessor processor = new SpeedChangingAudioProcessor(speedProvider);
|
||||
processor.configure(AUDIO_FORMAT);
|
||||
processor.configure(AUDIO_FORMAT_44_100HZ);
|
||||
assertThat(processor.isActive()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getInputFrameCountForOutput_withZeroOutputFrames_returnsZero() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
|
||||
long inputFrames =
|
||||
getInputFrameCountForOutput(
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* outputFrameCount= */ 0L);
|
||||
assertThat(inputFrames).isEqualTo(0L);
|
||||
}
|
||||
|
||||
private static SpeedChangingAudioProcessor getConfiguredSpeedChangingAudioProcessor(
|
||||
SpeedProvider speedProvider) throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_44_100HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
return speedChangingAudioProcessor;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user