mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
parent
afd601f670
commit
d6e4642bcf
@ -18,6 +18,8 @@
|
|||||||
* Extractors:
|
* Extractors:
|
||||||
* DataSource:
|
* DataSource:
|
||||||
* Audio:
|
* Audio:
|
||||||
|
* Do not bypass `SonicAudioProcessor` when `SpeedChangingAudioProcessor`
|
||||||
|
is configured with default parameters.
|
||||||
* Video:
|
* Video:
|
||||||
* Text:
|
* Text:
|
||||||
* Metadata:
|
* Metadata:
|
||||||
|
@ -15,8 +15,11 @@
|
|||||||
*/
|
*/
|
||||||
package androidx.media3.common.audio;
|
package androidx.media3.common.audio;
|
||||||
|
|
||||||
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static java.lang.Math.abs;
|
||||||
|
|
||||||
|
import androidx.annotation.FloatRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
@ -44,6 +47,8 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
*/
|
*/
|
||||||
private static final int MIN_BYTES_FOR_DURATION_SCALING_CALCULATION = 1024;
|
private static final int MIN_BYTES_FOR_DURATION_SCALING_CALCULATION = 1024;
|
||||||
|
|
||||||
|
private final boolean shouldBeActiveWithDefaultParameters;
|
||||||
|
|
||||||
private int pendingOutputSampleRate;
|
private int pendingOutputSampleRate;
|
||||||
private float speed;
|
private float speed;
|
||||||
private float pitch;
|
private float pitch;
|
||||||
@ -64,6 +69,17 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
|
|
||||||
/** Creates a new Sonic audio processor. */
|
/** Creates a new Sonic audio processor. */
|
||||||
public SonicAudioProcessor() {
|
public SonicAudioProcessor() {
|
||||||
|
this(/* keepActiveWithDefaultParameters= */ false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new instance of {@link SonicAudioProcessor}.
|
||||||
|
*
|
||||||
|
* <p>If {@code keepActiveWithDefaultParameters} is set to {@code true}, then {@link #isActive()}
|
||||||
|
* returns {@code true} when parameters have been configured to default values that result in
|
||||||
|
* no-op processing.
|
||||||
|
*/
|
||||||
|
/* package */ SonicAudioProcessor(boolean keepActiveWithDefaultParameters) {
|
||||||
speed = 1f;
|
speed = 1f;
|
||||||
pitch = 1f;
|
pitch = 1f;
|
||||||
pendingInputAudioFormat = AudioFormat.NOT_SET;
|
pendingInputAudioFormat = AudioFormat.NOT_SET;
|
||||||
@ -74,6 +90,7 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
shortBuffer = buffer.asShortBuffer();
|
shortBuffer = buffer.asShortBuffer();
|
||||||
outputBuffer = EMPTY_BUFFER;
|
outputBuffer = EMPTY_BUFFER;
|
||||||
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
|
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
|
||||||
|
shouldBeActiveWithDefaultParameters = keepActiveWithDefaultParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -83,7 +100,8 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
*
|
*
|
||||||
* @param speed The target factor by which playback should be sped up.
|
* @param speed The target factor by which playback should be sped up.
|
||||||
*/
|
*/
|
||||||
public final void setSpeed(float speed) {
|
public final void setSpeed(@FloatRange(from = 0f, fromInclusive = false) float speed) {
|
||||||
|
checkArgument(speed > 0f);
|
||||||
if (this.speed != speed) {
|
if (this.speed != speed) {
|
||||||
this.speed = speed;
|
this.speed = speed;
|
||||||
pendingSonicRecreation = true;
|
pendingSonicRecreation = true;
|
||||||
@ -97,7 +115,8 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
*
|
*
|
||||||
* @param pitch The target pitch.
|
* @param pitch The target pitch.
|
||||||
*/
|
*/
|
||||||
public final void setPitch(float pitch) {
|
public final void setPitch(@FloatRange(from = 0f, fromInclusive = false) float pitch) {
|
||||||
|
checkArgument(pitch > 0f);
|
||||||
if (this.pitch != pitch) {
|
if (this.pitch != pitch) {
|
||||||
this.pitch = pitch;
|
this.pitch = pitch;
|
||||||
pendingSonicRecreation = true;
|
pendingSonicRecreation = true;
|
||||||
@ -113,6 +132,7 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
* @see #configure(AudioFormat)
|
* @see #configure(AudioFormat)
|
||||||
*/
|
*/
|
||||||
public final void setOutputSampleRateHz(int sampleRateHz) {
|
public final void setOutputSampleRateHz(int sampleRateHz) {
|
||||||
|
checkArgument(sampleRateHz == SAMPLE_RATE_NO_CHANGE || sampleRateHz > 0);
|
||||||
pendingOutputSampleRate = sampleRateHz;
|
pendingOutputSampleRate = sampleRateHz;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,9 +216,13 @@ public class SonicAudioProcessor implements AudioProcessor {
|
|||||||
@Override
|
@Override
|
||||||
public final boolean isActive() {
|
public final boolean isActive() {
|
||||||
return pendingOutputAudioFormat.sampleRate != Format.NO_VALUE
|
return pendingOutputAudioFormat.sampleRate != Format.NO_VALUE
|
||||||
&& (Math.abs(speed - 1f) >= CLOSE_THRESHOLD
|
&& (shouldBeActiveWithDefaultParameters || !areParametersSetToDefaultValues());
|
||||||
|| Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|
}
|
||||||
|| pendingOutputAudioFormat.sampleRate != pendingInputAudioFormat.sampleRate);
|
|
||||||
|
private boolean areParametersSetToDefaultValues() {
|
||||||
|
return abs(speed - 1f) < CLOSE_THRESHOLD
|
||||||
|
&& abs(pitch - 1f) < CLOSE_THRESHOLD
|
||||||
|
&& pendingOutputAudioFormat.sampleRate == pendingInputAudioFormat.sampleRate;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -24,6 +24,7 @@ import static androidx.media3.common.util.Util.sampleCountToDurationUs;
|
|||||||
import static java.lang.Math.min;
|
import static java.lang.Math.min;
|
||||||
import static java.lang.Math.round;
|
import static java.lang.Math.round;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
import androidx.annotation.GuardedBy;
|
import androidx.annotation.GuardedBy;
|
||||||
import androidx.annotation.IntRange;
|
import androidx.annotation.IntRange;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -99,11 +100,12 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
public SpeedChangingAudioProcessor(SpeedProvider speedProvider) {
|
public SpeedChangingAudioProcessor(SpeedProvider speedProvider) {
|
||||||
this.speedProvider = speedProvider;
|
this.speedProvider = speedProvider;
|
||||||
lock = new Object();
|
lock = new Object();
|
||||||
sonicAudioProcessor = new SynchronizedSonicAudioProcessor(lock);
|
sonicAudioProcessor =
|
||||||
|
new SynchronizedSonicAudioProcessor(lock, /* keepActiveWithDefaultParameters= */ true);
|
||||||
pendingCallbackInputTimesUs = new LongArrayQueue();
|
pendingCallbackInputTimesUs = new LongArrayQueue();
|
||||||
pendingCallbacks = new ArrayDeque<>();
|
pendingCallbacks = new ArrayDeque<>();
|
||||||
speedAdjustedTimeAsyncInputTimeUs = C.TIME_UNSET;
|
speedAdjustedTimeAsyncInputTimeUs = C.TIME_UNSET;
|
||||||
resetState();
|
resetState(/* shouldResetSpeed= */ true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the estimated number of samples output given the provided parameters. */
|
/** Returns the estimated number of samples output given the provided parameters. */
|
||||||
@ -174,20 +176,12 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
long startPosition = inputBuffer.position();
|
long startPosition = inputBuffer.position();
|
||||||
if (isUsingSonic()) {
|
|
||||||
sonicAudioProcessor.queueInput(inputBuffer);
|
sonicAudioProcessor.queueInput(inputBuffer);
|
||||||
if (bytesToNextSpeedChange != C.LENGTH_UNSET
|
if (bytesToNextSpeedChange != C.LENGTH_UNSET
|
||||||
&& (inputBuffer.position() - startPosition) == bytesToNextSpeedChange) {
|
&& (inputBuffer.position() - startPosition) == bytesToNextSpeedChange) {
|
||||||
sonicAudioProcessor.queueEndOfStream();
|
sonicAudioProcessor.queueEndOfStream();
|
||||||
endOfStreamQueuedToSonic = true;
|
endOfStreamQueuedToSonic = true;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
ByteBuffer buffer = replaceOutputBuffer(/* size= */ inputBuffer.remaining());
|
|
||||||
if (inputBuffer.hasRemaining()) {
|
|
||||||
buffer.put(inputBuffer);
|
|
||||||
}
|
|
||||||
buffer.flip();
|
|
||||||
}
|
|
||||||
long bytesRead = inputBuffer.position() - startPosition;
|
long bytesRead = inputBuffer.position() - startPosition;
|
||||||
checkState(
|
checkState(
|
||||||
bytesRead % inputAudioFormat.bytesPerFrame == 0, "A frame was not queued completely.");
|
bytesRead % inputAudioFormat.bytesPerFrame == 0, "A frame was not queued completely.");
|
||||||
@ -204,9 +198,11 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Not using BaseAudioProcessor's buffers.
|
||||||
|
@SuppressLint("MissingSuperCall")
|
||||||
@Override
|
@Override
|
||||||
public ByteBuffer getOutput() {
|
public ByteBuffer getOutput() {
|
||||||
ByteBuffer output = isUsingSonic() ? sonicAudioProcessor.getOutput() : super.getOutput();
|
ByteBuffer output = sonicAudioProcessor.getOutput();
|
||||||
processPendingCallbacks();
|
processPendingCallbacks();
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
@ -218,13 +214,13 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onFlush() {
|
protected void onFlush() {
|
||||||
resetState();
|
resetState(/* shouldResetSpeed= */ false);
|
||||||
sonicAudioProcessor.flush();
|
sonicAudioProcessor.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onReset() {
|
protected void onReset() {
|
||||||
resetState();
|
resetState(/* shouldResetSpeed= */ true);
|
||||||
sonicAudioProcessor.reset();
|
sonicAudioProcessor.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,10 +347,8 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
if (newSpeed != currentSpeed) {
|
if (newSpeed != currentSpeed) {
|
||||||
updateSpeedChangeArrays(timeUs);
|
updateSpeedChangeArrays(timeUs);
|
||||||
currentSpeed = newSpeed;
|
currentSpeed = newSpeed;
|
||||||
if (isUsingSonic()) {
|
|
||||||
sonicAudioProcessor.setSpeed(newSpeed);
|
sonicAudioProcessor.setSpeed(newSpeed);
|
||||||
sonicAudioProcessor.setPitch(newSpeed);
|
sonicAudioProcessor.setPitch(newSpeed);
|
||||||
}
|
|
||||||
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
||||||
sonicAudioProcessor.flush();
|
sonicAudioProcessor.flush();
|
||||||
endOfStreamQueuedToSonic = false;
|
endOfStreamQueuedToSonic = false;
|
||||||
@ -378,20 +372,15 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private long getPlayoutDurationUsAtCurrentSpeed(long mediaDurationUs) {
|
private long getPlayoutDurationUsAtCurrentSpeed(long mediaDurationUs) {
|
||||||
return isUsingSonic()
|
return sonicAudioProcessor.getPlayoutDuration(mediaDurationUs);
|
||||||
? sonicAudioProcessor.getPlayoutDuration(mediaDurationUs)
|
|
||||||
: mediaDurationUs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getMediaDurationUsAtCurrentSpeed(long playoutDurationUs) {
|
private long getMediaDurationUsAtCurrentSpeed(long playoutDurationUs) {
|
||||||
return isUsingSonic()
|
return sonicAudioProcessor.getMediaDuration(playoutDurationUs);
|
||||||
? sonicAudioProcessor.getMediaDuration(playoutDurationUs)
|
|
||||||
: playoutDurationUs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateLastProcessedInputTime() {
|
private void updateLastProcessedInputTime() {
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
if (isUsingSonic()) {
|
|
||||||
// TODO - b/320242819: Investigate whether bytesRead can be used here rather than
|
// TODO - b/320242819: Investigate whether bytesRead can be used here rather than
|
||||||
// sonicAudioProcessor.getProcessedInputBytes().
|
// sonicAudioProcessor.getProcessedInputBytes().
|
||||||
long currentProcessedInputDurationUs =
|
long currentProcessedInputDurationUs =
|
||||||
@ -402,21 +391,21 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
lastProcessedInputTimeUs =
|
lastProcessedInputTimeUs =
|
||||||
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1)
|
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1)
|
||||||
+ currentProcessedInputDurationUs;
|
+ currentProcessedInputDurationUs;
|
||||||
} else {
|
|
||||||
lastProcessedInputTimeUs = sampleCountToDurationUs(framesRead, inputAudioFormat.sampleRate);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean isUsingSonic() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return currentSpeed != 1f;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets internal fields to their default value.
|
||||||
|
*
|
||||||
|
* <p>When setting {@code shouldResetSpeed} to {@code true}, {@link #sonicAudioProcessor}'s speed
|
||||||
|
* and pitch must also be updated.
|
||||||
|
*
|
||||||
|
* @param shouldResetSpeed Whether {@link #currentSpeed} should be reset to its default value.
|
||||||
|
*/
|
||||||
@EnsuresNonNull({"inputSegmentStartTimesUs", "outputSegmentStartTimesUs"})
|
@EnsuresNonNull({"inputSegmentStartTimesUs", "outputSegmentStartTimesUs"})
|
||||||
@RequiresNonNull("lock")
|
@RequiresNonNull("lock")
|
||||||
private void resetState(@UnknownInitialization SpeedChangingAudioProcessor this) {
|
private void resetState(
|
||||||
|
@UnknownInitialization SpeedChangingAudioProcessor this, boolean shouldResetSpeed) {
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
inputSegmentStartTimesUs = new LongArray();
|
inputSegmentStartTimesUs = new LongArray();
|
||||||
outputSegmentStartTimesUs = new LongArray();
|
outputSegmentStartTimesUs = new LongArray();
|
||||||
@ -425,8 +414,10 @@ public final class SpeedChangingAudioProcessor extends BaseAudioProcessor {
|
|||||||
lastProcessedInputTimeUs = 0;
|
lastProcessedInputTimeUs = 0;
|
||||||
lastSpeedAdjustedInputTimeUs = 0;
|
lastSpeedAdjustedInputTimeUs = 0;
|
||||||
lastSpeedAdjustedOutputTimeUs = 0;
|
lastSpeedAdjustedOutputTimeUs = 0;
|
||||||
|
if (shouldResetSpeed) {
|
||||||
currentSpeed = 1f;
|
currentSpeed = 1f;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
framesRead = 0;
|
framesRead = 0;
|
||||||
endOfStreamQueuedToSonic = false;
|
endOfStreamQueuedToSonic = false;
|
||||||
|
@ -26,9 +26,9 @@ import java.nio.ByteBuffer;
|
|||||||
private final Object lock;
|
private final Object lock;
|
||||||
private final SonicAudioProcessor sonicAudioProcessor;
|
private final SonicAudioProcessor sonicAudioProcessor;
|
||||||
|
|
||||||
public SynchronizedSonicAudioProcessor(Object lock) {
|
public SynchronizedSonicAudioProcessor(Object lock, boolean keepActiveWithDefaultParameters) {
|
||||||
this.lock = lock;
|
this.lock = lock;
|
||||||
sonicAudioProcessor = new SonicAudioProcessor();
|
sonicAudioProcessor = new SonicAudioProcessor(keepActiveWithDefaultParameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public final void setSpeed(float speed) {
|
public final void setSpeed(float speed) {
|
||||||
|
@ -86,11 +86,19 @@ public final class SonicAudioProcessorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void isNotActiveWithNoChange() throws Exception {
|
public void isActive_withDefaultParameters_returnsFalse() throws Exception {
|
||||||
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
|
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
|
||||||
assertThat(sonicAudioProcessor.isActive()).isFalse();
|
assertThat(sonicAudioProcessor.isActive()).isFalse();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void isActive_keepActiveWithDefaultParameters_returnsTrue() throws Exception {
|
||||||
|
SonicAudioProcessor processor =
|
||||||
|
new SonicAudioProcessor(/* keepActiveWithDefaultParameters= */ true);
|
||||||
|
processor.configure(AUDIO_FORMAT_44100_HZ);
|
||||||
|
assertThat(processor.isActive()).isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void doesNotSupportNon16BitInput() throws Exception {
|
public void doesNotSupportNon16BitInput() throws Exception {
|
||||||
try {
|
try {
|
||||||
|
@ -603,6 +603,84 @@ public class SpeedChangingAudioProcessorTest {
|
|||||||
assertThat(outputFrameCount).isWithin(1).of(4);
|
assertThat(outputFrameCount).isWithin(1).of(4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void flush_withInitialSpeedSetToDefault_returnsToInitialSpeedAfterFlush()
|
||||||
|
throws AudioProcessor.UnhandledAudioFormatException {
|
||||||
|
SpeedProvider speedProvider =
|
||||||
|
TestSpeedProvider.createWithFrameCounts(
|
||||||
|
AUDIO_FORMAT,
|
||||||
|
/* frameCounts= */ new int[] {1000, 1000},
|
||||||
|
/* speeds= */ new float[] {1, 2}); // 1000, 500.
|
||||||
|
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||||
|
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||||
|
// 1500 input frames falls in the middle of the 2x region.
|
||||||
|
ByteBuffer input = getInputBuffer(1500);
|
||||||
|
int outputFrameCount = 0;
|
||||||
|
|
||||||
|
while (input.hasRemaining()) {
|
||||||
|
speedChangingAudioProcessor.queueInput(input);
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
}
|
||||||
|
speedChangingAudioProcessor.flush();
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
assertThat(outputFrameCount).isEqualTo(1250);
|
||||||
|
input.rewind();
|
||||||
|
|
||||||
|
// After flush, SpeedChangingAudioProcessor's position should go back to the beginning and use
|
||||||
|
// the first speed region. This means that even if we flushed during 2x, the initial 1000
|
||||||
|
// samples fed to SpeedChangingAudioProcessor after the flush should be output at 1x.
|
||||||
|
while (input.hasRemaining()) {
|
||||||
|
speedChangingAudioProcessor.queueInput(input);
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
}
|
||||||
|
speedChangingAudioProcessor.queueEndOfStream();
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
assertThat(outputFrameCount).isWithin(1).of(2500); // 1250 * 2.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void flush_withInitialSpeedSetToNonDefault_returnsToInitialSpeedAfterFlush()
|
||||||
|
throws AudioProcessor.UnhandledAudioFormatException {
|
||||||
|
SpeedProvider speedProvider =
|
||||||
|
TestSpeedProvider.createWithFrameCounts(
|
||||||
|
AUDIO_FORMAT,
|
||||||
|
/* frameCounts= */ new int[] {1000, 1000},
|
||||||
|
/* speeds= */ new float[] {2, 4}); // 500, 250.
|
||||||
|
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||||
|
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||||
|
// 1500 input frames falls in the middle of the 2x region.
|
||||||
|
ByteBuffer input = getInputBuffer(1500);
|
||||||
|
int outputFrameCount = 0;
|
||||||
|
|
||||||
|
while (input.hasRemaining()) {
|
||||||
|
speedChangingAudioProcessor.queueInput(input);
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
}
|
||||||
|
speedChangingAudioProcessor.flush();
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
assertThat(outputFrameCount).isWithin(1).of(625);
|
||||||
|
input.rewind();
|
||||||
|
|
||||||
|
// After flush, SpeedChangingAudioProcessor's position should go back to the beginning and use
|
||||||
|
// the first speed region. This means that even if we flushed during 4x, the initial 1000
|
||||||
|
// samples fed to SpeedChangingAudioProcessor after the flush should be output at 2x.
|
||||||
|
while (input.hasRemaining()) {
|
||||||
|
speedChangingAudioProcessor.queueInput(input);
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
}
|
||||||
|
speedChangingAudioProcessor.queueEndOfStream();
|
||||||
|
outputFrameCount +=
|
||||||
|
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||||
|
assertThat(outputFrameCount).isWithin(2).of(1250); // 625 * 2.
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getSampleCountAfterProcessorApplied_withConstantSpeed_outputsExpectedSamples() {
|
public void getSampleCountAfterProcessorApplied_withConstantSpeed_outputsExpectedSamples() {
|
||||||
SpeedProvider speedProvider =
|
SpeedProvider speedProvider =
|
||||||
|
Loading…
x
Reference in New Issue
Block a user