Automated g4 rollback of changelist 173379623.

*** Reason for rollback ***

Breaks setting PlaybackParameters before start of playback

*** Original change description ***

Add support for float output in DefaultAudioSink

Also switch from using MIME types to C.ENCODING_* encodings in DefaultAudioSink.

***

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=174445506
This commit is contained in:
olly 2017-11-03 04:03:46 -07:00 committed by Oliver Woodman
parent 321bc9c24e
commit b6b09ad40b
7 changed files with 106 additions and 116 deletions

View File

@ -127,8 +127,8 @@ public final class C {
*/ */
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, @IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT,
ENCODING_PCM_24BIT, ENCODING_PCM_32BIT, ENCODING_PCM_FLOAT, ENCODING_AC3, ENCODING_E_AC3, ENCODING_PCM_24BIT, ENCODING_PCM_32BIT, ENCODING_AC3, ENCODING_E_AC3, ENCODING_DTS,
ENCODING_DTS, ENCODING_DTS_HD}) ENCODING_DTS_HD})
public @interface Encoding {} public @interface Encoding {}
/** /**
@ -136,7 +136,7 @@ public final class C {
*/ */
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, @IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT,
ENCODING_PCM_24BIT, ENCODING_PCM_32BIT, ENCODING_PCM_FLOAT}) ENCODING_PCM_24BIT, ENCODING_PCM_32BIT})
public @interface PcmEncoding {} public @interface PcmEncoding {}
/** /**
* @see AudioFormat#ENCODING_INVALID * @see AudioFormat#ENCODING_INVALID
@ -158,10 +158,6 @@ public final class C {
* PCM encoding with 32 bits per sample. * PCM encoding with 32 bits per sample.
*/ */
public static final int ENCODING_PCM_32BIT = 0x40000000; public static final int ENCODING_PCM_32BIT = 0x40000000;
/**
* @see AudioFormat#ENCODING_PCM_FLOAT
*/
public static final int ENCODING_PCM_FLOAT = AudioFormat.ENCODING_PCM_FLOAT;
/** /**
* @see AudioFormat#ENCODING_AC3 * @see AudioFormat#ENCODING_AC3
*/ */

View File

@ -25,13 +25,14 @@ import java.nio.ByteBuffer;
* A sink that consumes audio data. * A sink that consumes audio data.
* <p> * <p>
* Before starting playback, specify the input audio format by calling * Before starting playback, specify the input audio format by calling
* {@link #configure(int, int, int, int, int[], int, int)}. * {@link #configure(String, int, int, int, int, int[], int, int)}.
* <p> * <p>
* Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()} * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data. * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
* <p> * <p>
* Call {@link #configure(int, int, int, int, int[], int, int)} whenever the input format changes. * Call {@link #configure(String, int, int, int, int, int[], int, int)} whenever the input format
* The sink will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}. * changes. The sink will be reinitialized on the next call to
* {@link #handleBuffer(ByteBuffer, long)}.
* <p> * <p>
* Call {@link #reset()} to prepare the sink to receive audio data from a new playback position. * Call {@link #reset()} to prepare the sink to receive audio data from a new playback position.
* <p> * <p>
@ -165,12 +166,13 @@ public interface AudioSink {
void setListener(Listener listener); void setListener(Listener listener);
/** /**
* Returns whether it's possible to play audio in the specified encoding using passthrough. * Returns whether it's possible to play audio in the specified format using encoded audio
* passthrough.
* *
* @param encoding The audio encoding. * @param mimeType The format mime type.
* @return Whether it's possible to play audio in the specified encoding using passthrough. * @return Whether it's possible to play audio in the format using encoded audio passthrough.
*/ */
boolean isPassthroughSupported(@C.Encoding int encoding); boolean isPassthroughSupported(String mimeType);
/** /**
* Returns the playback position in the stream starting at zero, in microseconds, or * Returns the playback position in the stream starting at zero, in microseconds, or
@ -184,9 +186,12 @@ public interface AudioSink {
/** /**
* Configures (or reconfigures) the sink. * Configures (or reconfigures) the sink.
* *
* @param inputEncoding The encoding of audio data provided in the input buffers. * @param inputMimeType The MIME type of audio data provided in the input buffers.
* @param inputChannelCount The number of channels. * @param inputChannelCount The number of channels.
* @param inputSampleRate The sample rate in Hz. * @param inputSampleRate The sample rate in Hz.
* @param inputPcmEncoding For PCM formats, the encoding used. One of
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT}
* and {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size. * suitable buffer size.
* @param outputChannels A mapping from input to output channels that is applied to this sink's * @param outputChannels A mapping from input to output channels that is applied to this sink's
@ -200,9 +205,9 @@ public interface AudioSink {
* immediately preceding the next call to {@link #reset()} or this method. * immediately preceding the next call to {@link #reset()} or this method.
* @throws ConfigurationException If an error occurs configuring the sink. * @throws ConfigurationException If an error occurs configuring the sink.
*/ */
void configure(@C.Encoding int inputEncoding, int inputChannelCount, int inputSampleRate, void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples, @C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimEndSamples) throws ConfigurationException; int trimStartSamples, int trimEndSamples) throws ConfigurationException;
/** /**
* Starts or resumes consuming audio if initialized. * Starts or resumes consuming audio if initialized.
@ -223,7 +228,8 @@ public interface AudioSink {
* Returns whether the data was handled in full. If the data was not handled in full then the same * Returns whether the data was handled in full. If the data was not handled in full then the same
* {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed, * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
* except in the case of an intervening call to {@link #reset()} (or to * except in the case of an intervening call to {@link #reset()} (or to
* {@link #configure(int, int, int, int, int[], int, int)} that causes the sink to be reset). * {@link #configure(String, int, int, int, int, int[], int, int)} that causes the sink to be
* reset).
* *
* @param buffer The buffer containing audio data. * @param buffer The buffer containing audio data.
* @param presentationTimeUs The presentation timestamp of the buffer in microseconds. * @param presentationTimeUs The presentation timestamp of the buffer in microseconds.

View File

@ -29,6 +29,7 @@ import android.util.Log;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
@ -181,13 +182,13 @@ public final class DefaultAudioSink implements AudioSink {
*/ */
private AudioTrack keepSessionIdAudioTrack; private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack; private AudioTrack audioTrack;
private boolean isInputPcm;
private int inputSampleRate; private int inputSampleRate;
private int sampleRate; private int sampleRate;
private int channelConfig; private int channelConfig;
private @C.Encoding int encoding;
private @C.Encoding int outputEncoding; private @C.Encoding int outputEncoding;
private AudioAttributes audioAttributes; private AudioAttributes audioAttributes;
private boolean processingEnabled; private boolean passthrough;
private int bufferSize; private int bufferSize;
private long bufferSizeUs; private long bufferSizeUs;
@ -285,8 +286,9 @@ public final class DefaultAudioSink implements AudioSink {
} }
@Override @Override
public boolean isPassthroughSupported(@C.Encoding int encoding) { public boolean isPassthroughSupported(String mimeType) {
return audioCapabilities != null && audioCapabilities.supportsEncoding(encoding); return audioCapabilities != null
&& audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType));
} }
@Override @Override
@ -329,20 +331,18 @@ public final class DefaultAudioSink implements AudioSink {
} }
@Override @Override
public void configure(@C.Encoding int inputEncoding, int inputChannelCount, int inputSampleRate, public void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples, @C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimEndSamples) throws ConfigurationException { int trimStartSamples, int trimEndSamples) throws ConfigurationException {
boolean flush = false;
this.inputSampleRate = inputSampleRate; this.inputSampleRate = inputSampleRate;
int channelCount = inputChannelCount; int channelCount = inputChannelCount;
int sampleRate = inputSampleRate; int sampleRate = inputSampleRate;
isInputPcm = isEncodingPcm(inputEncoding); @C.Encoding int encoding;
if (isInputPcm) { boolean passthrough = !MimeTypes.AUDIO_RAW.equals(inputMimeType);
pcmFrameSize = Util.getPcmFrameSize(inputEncoding, channelCount); boolean flush = false;
} if (!passthrough) {
@C.Encoding int encoding = inputEncoding; encoding = inputPcmEncoding;
boolean processingEnabled = isInputPcm && inputEncoding != C.ENCODING_PCM_FLOAT; pcmFrameSize = Util.getPcmFrameSize(inputPcmEncoding, channelCount);
if (processingEnabled) {
trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples); trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples);
channelMappingAudioProcessor.setChannelMap(outputChannels); channelMappingAudioProcessor.setChannelMap(outputChannels);
for (AudioProcessor audioProcessor : availableAudioProcessors) { for (AudioProcessor audioProcessor : availableAudioProcessors) {
@ -360,6 +360,8 @@ public final class DefaultAudioSink implements AudioSink {
if (flush) { if (flush) {
resetAudioProcessors(); resetAudioProcessors();
} }
} else {
encoding = getEncodingForMimeType(inputMimeType);
} }
int channelConfig; int channelConfig;
@ -409,11 +411,11 @@ public final class DefaultAudioSink implements AudioSink {
// Workaround for Nexus Player not reporting support for mono passthrough. // Workaround for Nexus Player not reporting support for mono passthrough.
// (See [Internal: b/34268671].) // (See [Internal: b/34268671].)
if (Util.SDK_INT <= 25 && "fugu".equals(Util.DEVICE) && !isInputPcm && channelCount == 1) { if (Util.SDK_INT <= 25 && "fugu".equals(Util.DEVICE) && passthrough && channelCount == 1) {
channelConfig = AudioFormat.CHANNEL_OUT_STEREO; channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
} }
if (!flush && isInitialized() && outputEncoding == encoding && this.sampleRate == sampleRate if (!flush && isInitialized() && this.encoding == encoding && this.sampleRate == sampleRate
&& this.channelConfig == channelConfig) { && this.channelConfig == channelConfig) {
// We already have an audio track with the correct sample rate, channel config and encoding. // We already have an audio track with the correct sample rate, channel config and encoding.
return; return;
@ -421,24 +423,16 @@ public final class DefaultAudioSink implements AudioSink {
reset(); reset();
this.processingEnabled = processingEnabled; this.encoding = encoding;
this.passthrough = passthrough;
this.sampleRate = sampleRate; this.sampleRate = sampleRate;
this.channelConfig = channelConfig; this.channelConfig = channelConfig;
outputEncoding = encoding; outputEncoding = passthrough ? encoding : C.ENCODING_PCM_16BIT;
if (isInputPcm) { outputPcmFrameSize = Util.getPcmFrameSize(C.ENCODING_PCM_16BIT, channelCount);
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, channelCount);
}
if (specifiedBufferSize != 0) { if (specifiedBufferSize != 0) {
bufferSize = specifiedBufferSize; bufferSize = specifiedBufferSize;
} else if (isInputPcm) { } else if (passthrough) {
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize = (int) Math.max(minBufferSize,
durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
bufferSize = Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
} else {
// TODO: Set the minimum buffer size using getMinBufferSize when it takes the encoding into // TODO: Set the minimum buffer size using getMinBufferSize when it takes the encoding into
// account. [Internal: b/25181305] // account. [Internal: b/25181305]
if (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3) { if (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3) {
@ -448,9 +442,18 @@ public final class DefaultAudioSink implements AudioSink {
// DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s. // DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s.
bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND); bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND);
} }
} else {
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize = (int) Math.max(minBufferSize,
durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize
: multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize
: multipliedBufferSize;
} }
bufferSizeUs = bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(bufferSize / outputPcmFrameSize);
isInputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
// The old playback parameters may no longer be applicable so try to reset them now. // The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters); setPlaybackParameters(playbackParameters);
@ -571,7 +574,7 @@ public final class DefaultAudioSink implements AudioSink {
return true; return true;
} }
if (!isInputPcm && framesPerEncodedSample == 0) { if (passthrough && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames. // If this is the first encoded sample, calculate the sample size in frames.
framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer); framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer);
} }
@ -615,19 +618,20 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
if (isInputPcm) { if (passthrough) {
submittedPcmBytes += buffer.remaining();
} else {
submittedEncodedFrames += framesPerEncodedSample; submittedEncodedFrames += framesPerEncodedSample;
} else {
submittedPcmBytes += buffer.remaining();
} }
inputBuffer = buffer; inputBuffer = buffer;
} }
if (processingEnabled) { if (passthrough) {
processBuffers(presentationTimeUs); // Passthrough buffers are not processed.
} else {
writeBuffer(inputBuffer, presentationTimeUs); writeBuffer(inputBuffer, presentationTimeUs);
} else {
processBuffers(presentationTimeUs);
} }
if (!inputBuffer.hasRemaining()) { if (!inputBuffer.hasRemaining()) {
@ -675,9 +679,10 @@ public final class DefaultAudioSink implements AudioSink {
} }
@SuppressWarnings("ReferenceEquality") @SuppressWarnings("ReferenceEquality")
private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException { private boolean writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs)
throws WriteException {
if (!buffer.hasRemaining()) { if (!buffer.hasRemaining()) {
return; return true;
} }
if (outputBuffer != null) { if (outputBuffer != null) {
Assertions.checkArgument(outputBuffer == buffer); Assertions.checkArgument(outputBuffer == buffer);
@ -696,7 +701,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
int bytesRemaining = buffer.remaining(); int bytesRemaining = buffer.remaining();
int bytesWritten = 0; int bytesWritten = 0;
if (Util.SDK_INT < 21) { // isInputPcm == true if (Util.SDK_INT < 21) { // passthrough == false
// Work out how many bytes we can write without the risk of blocking. // Work out how many bytes we can write without the risk of blocking.
int bytesPending = int bytesPending =
(int) (writtenPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * outputPcmFrameSize)); (int) (writtenPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * outputPcmFrameSize));
@ -723,15 +728,17 @@ public final class DefaultAudioSink implements AudioSink {
throw new WriteException(bytesWritten); throw new WriteException(bytesWritten);
} }
if (isInputPcm) { if (!passthrough) {
writtenPcmBytes += bytesWritten; writtenPcmBytes += bytesWritten;
} }
if (bytesWritten == bytesRemaining) { if (bytesWritten == bytesRemaining) {
if (!isInputPcm) { if (passthrough) {
writtenEncodedFrames += framesPerEncodedSample; writtenEncodedFrames += framesPerEncodedSample;
} }
outputBuffer = null; outputBuffer = null;
return true;
} }
return false;
} }
@Override @Override
@ -751,7 +758,7 @@ public final class DefaultAudioSink implements AudioSink {
private boolean drainAudioProcessorsToEndOfStream() throws WriteException { private boolean drainAudioProcessorsToEndOfStream() throws WriteException {
boolean audioProcessorNeedsEndOfStream = false; boolean audioProcessorNeedsEndOfStream = false;
if (drainingAudioProcessorIndex == C.INDEX_UNSET) { if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
drainingAudioProcessorIndex = processingEnabled ? 0 : audioProcessors.length; drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0;
audioProcessorNeedsEndOfStream = true; audioProcessorNeedsEndOfStream = true;
} }
while (drainingAudioProcessorIndex < audioProcessors.length) { while (drainingAudioProcessorIndex < audioProcessors.length) {
@ -792,8 +799,8 @@ public final class DefaultAudioSink implements AudioSink {
@Override @Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
if (!processingEnabled) { if (passthrough) {
// The playback parameters are always the default if processing is disabled. // The playback parameters are always the default in passthrough mode.
this.playbackParameters = PlaybackParameters.DEFAULT; this.playbackParameters = PlaybackParameters.DEFAULT;
return this.playbackParameters; return this.playbackParameters;
} }
@ -1069,7 +1076,7 @@ public final class DefaultAudioSink implements AudioSink {
audioTimestampSet = false; audioTimestampSet = false;
} }
} }
if (getLatencyMethod != null && isInputPcm) { if (getLatencyMethod != null && !passthrough) {
try { try {
// Compute the audio track latency, excluding the latency due to the buffer (leaving // Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver). // latency due to the mixer and audio hardware driver).
@ -1108,11 +1115,11 @@ public final class DefaultAudioSink implements AudioSink {
} }
private long getSubmittedFrames() { private long getSubmittedFrames() {
return isInputPcm ? (submittedPcmBytes / pcmFrameSize) : submittedEncodedFrames; return passthrough ? submittedEncodedFrames : (submittedPcmBytes / pcmFrameSize);
} }
private long getWrittenFrames() { private long getWrittenFrames() {
return isInputPcm ? (writtenPcmBytes / outputPcmFrameSize) : writtenEncodedFrames; return passthrough ? writtenEncodedFrames : (writtenPcmBytes / outputPcmFrameSize);
} }
private void resetSyncParams() { private void resetSyncParams() {
@ -1205,10 +1212,20 @@ public final class DefaultAudioSink implements AudioSink {
MODE_STATIC, audioSessionId); MODE_STATIC, audioSessionId);
} }
private static boolean isEncodingPcm(@C.Encoding int encoding) { @C.Encoding
return encoding == C.ENCODING_PCM_8BIT || encoding == C.ENCODING_PCM_16BIT private static int getEncodingForMimeType(String mimeType) {
|| encoding == C.ENCODING_PCM_24BIT || encoding == C.ENCODING_PCM_32BIT switch (mimeType) {
|| encoding == C.ENCODING_PCM_FLOAT; case MimeTypes.AUDIO_AC3:
return C.ENCODING_AC3;
case MimeTypes.AUDIO_E_AC3:
return C.ENCODING_E_AC3;
case MimeTypes.AUDIO_DTS:
return C.ENCODING_DTS;
case MimeTypes.AUDIO_DTS_HD:
return C.ENCODING_DTS_HD;
default:
return C.ENCODING_INVALID;
}
} }
private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) { private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {

View File

@ -51,7 +51,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean passthroughEnabled; private boolean passthroughEnabled;
private boolean codecNeedsDiscardChannelsWorkaround; private boolean codecNeedsDiscardChannelsWorkaround;
private android.media.MediaFormat passthroughMediaFormat; private android.media.MediaFormat passthroughMediaFormat;
@C.Encoding
private int pcmEncoding; private int pcmEncoding;
private int channelCount; private int channelCount;
private int encoderDelay; private int encoderDelay;
@ -227,7 +226,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @return Whether passthrough playback is supported. * @return Whether passthrough playback is supported.
*/ */
protected boolean allowPassthrough(String mimeType) { protected boolean allowPassthrough(String mimeType) {
return audioSink.isPassthroughSupported(MimeTypes.getEncoding(mimeType)); return audioSink.isPassthroughSupported(mimeType);
} }
@Override @Override
@ -273,15 +272,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override @Override
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat)
throws ExoPlaybackException { throws ExoPlaybackException {
@C.Encoding int encoding; boolean passthrough = passthroughMediaFormat != null;
MediaFormat format; String mimeType = passthrough ? passthroughMediaFormat.getString(MediaFormat.KEY_MIME)
if (passthroughMediaFormat != null) { : MimeTypes.AUDIO_RAW;
encoding = MimeTypes.getEncoding(passthroughMediaFormat.getString(MediaFormat.KEY_MIME)); MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
format = passthroughMediaFormat;
} else {
encoding = pcmEncoding;
format = outputFormat;
}
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int[] channelMap; int[] channelMap;
@ -295,8 +289,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
try { try {
audioSink.configure(encoding, channelCount, sampleRate, 0, channelMap, encoderDelay, audioSink.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap,
encoderPadding); encoderDelay, encoderPadding);
} catch (AudioSink.ConfigurationException e) { } catch (AudioSink.ConfigurationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw ExoPlaybackException.createForRenderer(e, getIndex());
} }

View File

@ -329,8 +329,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (audioTrackNeedsConfigure) { if (audioTrackNeedsConfigure) {
Format outputFormat = getOutputFormat(); Format outputFormat = getOutputFormat();
audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount, audioSink.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding); outputFormat.sampleRate, outputFormat.pcmEncoding, 0, null, encoderDelay, encoderPadding);
audioTrackNeedsConfigure = false; audioTrackNeedsConfigure = false;
} }

View File

@ -208,12 +208,12 @@ public final class MimeTypes {
} }
/** /**
* Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type. * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified mime type.
* {@link C#TRACK_TYPE_UNKNOWN} if the MIME type is not known or the mapping cannot be * {@link C#TRACK_TYPE_UNKNOWN} if the mime type is not known or the mapping cannot be
* established. * established.
* *
* @param mimeType The MIME type. * @param mimeType The mimeType.
* @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type. * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified mime type.
*/ */
public static int getTrackType(String mimeType) { public static int getTrackType(String mimeType) {
if (TextUtils.isEmpty(mimeType)) { if (TextUtils.isEmpty(mimeType)) {
@ -239,28 +239,6 @@ public final class MimeTypes {
} }
} }
/**
* Returns the {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type, or
* {@link C#ENCODING_INVALID} if the mapping cannot be established.
*
* @param mimeType The MIME type.
* @return The {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type.
*/
public static @C.Encoding int getEncoding(String mimeType) {
switch (mimeType) {
case MimeTypes.AUDIO_AC3:
return C.ENCODING_AC3;
case MimeTypes.AUDIO_E_AC3:
return C.ENCODING_E_AC3;
case MimeTypes.AUDIO_DTS:
return C.ENCODING_DTS;
case MimeTypes.AUDIO_DTS_HD:
return C.ENCODING_DTS_HD;
default:
return C.ENCODING_INVALID;
}
}
/** /**
* Equivalent to {@code getTrackType(getMediaMimeType(codec))}. * Equivalent to {@code getTrackType(getMediaMimeType(codec))}.
* *

View File

@ -790,7 +790,6 @@ public final class Util {
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
return channelCount * 3; return channelCount * 3;
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_FLOAT:
return channelCount * 4; return channelCount * 4;
default: default:
throw new IllegalArgumentException(); throw new IllegalArgumentException();