mirror of
https://github.com/androidx/media.git
synced 2025-05-13 18:50:02 +08:00
Don't set playback parameters when using tunneling
Issue: #4803 PiperOrigin-RevId: 356923345
This commit is contained in:
parent
d0dd33e5d3
commit
ed3d1c6283
@ -5,6 +5,9 @@
|
||||
* Core library:
|
||||
* Fix playback issue for HLS live streams without program date time
|
||||
information ([#8560](https://github.com/google/ExoPlayer/issues/8560)).
|
||||
* Fix a bug where setting playback parameters while using video tunneling
|
||||
would cause an error to be thrown
|
||||
([#8570](https://github.com/google/ExoPlayer/issues/8570)).
|
||||
* IMA extension:
|
||||
* Fix handling of repeated ad loads, to avoid ads being discarded if the
|
||||
user seeks away and then back to a preloaded postroll (for example).
|
||||
|
@ -488,7 +488,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
throws ConfigurationException {
|
||||
int inputPcmFrameSize;
|
||||
@Nullable AudioProcessor[] availableAudioProcessors;
|
||||
boolean canApplyPlaybackParameters;
|
||||
|
||||
@OutputMode int outputMode;
|
||||
@C.Encoding int outputEncoding;
|
||||
@ -500,11 +499,10 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding));
|
||||
|
||||
inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount);
|
||||
boolean useFloatOutput =
|
||||
enableFloatOutput && Util.isEncodingHighResolutionPcm(inputFormat.pcmEncoding);
|
||||
availableAudioProcessors =
|
||||
useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
|
||||
canApplyPlaybackParameters = !useFloatOutput;
|
||||
shouldUseFloatOutput(inputFormat.pcmEncoding)
|
||||
? toFloatPcmAvailableAudioProcessors
|
||||
: toIntPcmAvailableAudioProcessors;
|
||||
|
||||
trimmingAudioProcessor.setTrimFrameCount(
|
||||
inputFormat.encoderDelay, inputFormat.encoderPadding);
|
||||
@ -541,7 +539,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
} else {
|
||||
inputPcmFrameSize = C.LENGTH_UNSET;
|
||||
availableAudioProcessors = new AudioProcessor[0];
|
||||
canApplyPlaybackParameters = false;
|
||||
outputSampleRate = inputFormat.sampleRate;
|
||||
outputPcmFrameSize = C.LENGTH_UNSET;
|
||||
if (enableOffload && isOffloadedPlaybackSupported(inputFormat, audioAttributes)) {
|
||||
@ -586,7 +583,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
outputEncoding,
|
||||
specifiedBufferSize,
|
||||
enableAudioTrackPlaybackParams,
|
||||
canApplyPlaybackParameters,
|
||||
availableAudioProcessors);
|
||||
if (isAudioTrackInitialized()) {
|
||||
this.pendingConfiguration = pendingConfiguration;
|
||||
@ -1336,11 +1332,11 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
|
||||
private void applyAudioProcessorPlaybackParametersAndSkipSilence(long presentationTimeUs) {
|
||||
PlaybackParameters playbackParameters =
|
||||
configuration.canApplyPlaybackParameters
|
||||
shouldApplyAudioProcessorPlaybackParameters()
|
||||
? audioProcessorChain.applyPlaybackParameters(getAudioProcessorPlaybackParameters())
|
||||
: PlaybackParameters.DEFAULT;
|
||||
boolean skipSilenceEnabled =
|
||||
configuration.canApplyPlaybackParameters
|
||||
shouldApplyAudioProcessorPlaybackParameters()
|
||||
? audioProcessorChain.applySkipSilenceEnabled(getSkipSilenceEnabled())
|
||||
: DEFAULT_SKIP_SILENCE;
|
||||
mediaPositionParametersCheckpoints.add(
|
||||
@ -1355,6 +1351,31 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether audio processor playback parameters should be applied in the current
|
||||
* configuration.
|
||||
*/
|
||||
private boolean shouldApplyAudioProcessorPlaybackParameters() {
|
||||
// We don't apply speed/pitch adjustment using an audio processor in the following cases:
|
||||
// - in tunneling mode, because audio processing can change the duration of audio yet the video
|
||||
// frame presentation times are currently not modified (see also
|
||||
// https://github.com/google/ExoPlayer/issues/4803);
|
||||
// - when playing encoded audio via passthrough/offload, because modifying the audio stream
|
||||
// would require decoding/re-encoding; and
|
||||
// - when outputting float PCM audio, because SonicAudioProcessor outputs 16-bit integer PCM.
|
||||
return !tunneling
|
||||
&& MimeTypes.AUDIO_RAW.equals(configuration.inputFormat.sampleMimeType)
|
||||
&& !shouldUseFloatOutput(configuration.inputFormat.pcmEncoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether audio in the specified PCM encoding should be written to the audio track as
|
||||
* float PCM.
|
||||
*/
|
||||
private boolean shouldUseFloatOutput(@C.PcmEncoding int pcmEncoding) {
|
||||
return enableFloatOutput && Util.isEncodingHighResolutionPcm(pcmEncoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies and updates media position parameters.
|
||||
*
|
||||
@ -1897,7 +1918,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
public final int outputChannelConfig;
|
||||
@C.Encoding public final int outputEncoding;
|
||||
public final int bufferSize;
|
||||
public final boolean canApplyPlaybackParameters;
|
||||
public final AudioProcessor[] availableAudioProcessors;
|
||||
|
||||
public Configuration(
|
||||
@ -1910,7 +1930,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
int outputEncoding,
|
||||
int specifiedBufferSize,
|
||||
boolean enableAudioTrackPlaybackParams,
|
||||
boolean canApplyPlaybackParameters,
|
||||
AudioProcessor[] availableAudioProcessors) {
|
||||
this.inputFormat = inputFormat;
|
||||
this.inputPcmFrameSize = inputPcmFrameSize;
|
||||
@ -1919,7 +1938,6 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
this.outputSampleRate = outputSampleRate;
|
||||
this.outputChannelConfig = outputChannelConfig;
|
||||
this.outputEncoding = outputEncoding;
|
||||
this.canApplyPlaybackParameters = canApplyPlaybackParameters;
|
||||
this.availableAudioProcessors = availableAudioProcessors;
|
||||
|
||||
// Call computeBufferSize() last as it depends on the other configuration values.
|
||||
|
@ -320,6 +320,20 @@ public final class DefaultAudioSinkTest {
|
||||
assertThat(thrown.format).isEqualTo(format);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setPlaybackParameters_doesNothingWhenTunnelingIsEnabled() throws Exception {
|
||||
defaultAudioSink.setAudioSessionId(1);
|
||||
defaultAudioSink.enableTunnelingV21();
|
||||
defaultAudioSink.setPlaybackParameters(new PlaybackParameters(2));
|
||||
configureDefaultAudioSink(/* channelCount= */ 2);
|
||||
defaultAudioSink.handleBuffer(
|
||||
createDefaultSilenceBuffer(),
|
||||
/* presentationTimeUs= */ 5 * C.MICROS_PER_SECOND,
|
||||
/* encodedAccessUnitCount= */ 1);
|
||||
|
||||
assertThat(defaultAudioSink.getPlaybackParameters().speed).isEqualTo(1);
|
||||
}
|
||||
|
||||
private void configureDefaultAudioSink(int channelCount) throws AudioSink.ConfigurationException {
|
||||
configureDefaultAudioSink(channelCount, /* trimStartFrames= */ 0, /* trimEndFrames= */ 0);
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user