mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Add support for float output for FfmpegAudioRenderer
------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=175940553
This commit is contained in:
parent
15a2f47f31
commit
b8aedfbf4f
@ -6,7 +6,8 @@
|
||||
position, for fast backward seeking. The back-buffer can be configured by
|
||||
custom `LoadControl` implementations.
|
||||
* New Cast extension: Simplifies toggling between local and Cast playbacks.
|
||||
* Support 32-bit PCM float output from `DefaultAudioSink`.
|
||||
* Support 32-bit PCM float output from `DefaultAudioSink`, and add an option to
|
||||
use this with `FfmpegAudioRenderer`.
|
||||
|
||||
### 2.6.0 ###
|
||||
|
||||
|
@ -21,6 +21,8 @@ import com.google.android.exoplayer2.ExoPlaybackException;
|
||||
import com.google.android.exoplayer2.Format;
|
||||
import com.google.android.exoplayer2.audio.AudioProcessor;
|
||||
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
|
||||
import com.google.android.exoplayer2.audio.AudioSink;
|
||||
import com.google.android.exoplayer2.audio.DefaultAudioSink;
|
||||
import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer;
|
||||
import com.google.android.exoplayer2.drm.DrmSessionManager;
|
||||
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
|
||||
@ -41,6 +43,8 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
*/
|
||||
private static final int INITIAL_INPUT_BUFFER_SIZE = 960 * 6;
|
||||
|
||||
private final boolean enableFloatOutput;
|
||||
|
||||
private FfmpegDecoder decoder;
|
||||
|
||||
public FfmpegAudioRenderer() {
|
||||
@ -55,7 +59,23 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
*/
|
||||
public FfmpegAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
|
||||
AudioProcessor... audioProcessors) {
|
||||
super(eventHandler, eventListener, audioProcessors);
|
||||
this(eventHandler, eventListener, new DefaultAudioSink(null, audioProcessors), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
|
||||
* null if delivery of events is not required.
|
||||
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||
* @param audioSink The sink to which audio will be output.
|
||||
* @param enableFloatOutput Whether to enable 32-bit float audio format, if supported on the
|
||||
* device/build and if the input format may have bit depth higher than 16-bit. When using
|
||||
* 32-bit float output, any audio processing will be disabled, including playback speed/pitch
|
||||
* adjustment.
|
||||
*/
|
||||
public FfmpegAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
|
||||
AudioSink audioSink, boolean enableFloatOutput) {
|
||||
super(eventHandler, eventListener, null, false, audioSink);
|
||||
this.enableFloatOutput = enableFloatOutput;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -64,7 +84,7 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
String sampleMimeType = format.sampleMimeType;
|
||||
if (!FfmpegLibrary.isAvailable() || !MimeTypes.isAudio(sampleMimeType)) {
|
||||
return FORMAT_UNSUPPORTED_TYPE;
|
||||
} else if (!FfmpegLibrary.supportsFormat(sampleMimeType)) {
|
||||
} else if (!FfmpegLibrary.supportsFormat(sampleMimeType) || !isOutputSupported(format)) {
|
||||
return FORMAT_UNSUPPORTED_SUBTYPE;
|
||||
} else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) {
|
||||
return FORMAT_UNSUPPORTED_DRM;
|
||||
@ -82,7 +102,7 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
protected FfmpegDecoder createDecoder(Format format, ExoMediaCrypto mediaCrypto)
|
||||
throws FfmpegDecoderException {
|
||||
decoder = new FfmpegDecoder(NUM_BUFFERS, NUM_BUFFERS, INITIAL_INPUT_BUFFER_SIZE,
|
||||
format.sampleMimeType, format.initializationData);
|
||||
format.sampleMimeType, format.initializationData, shouldUseFloatOutput(format));
|
||||
return decoder;
|
||||
}
|
||||
|
||||
@ -90,8 +110,32 @@ public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
public Format getOutputFormat() {
|
||||
int channelCount = decoder.getChannelCount();
|
||||
int sampleRate = decoder.getSampleRate();
|
||||
@C.PcmEncoding int encoding = decoder.getEncoding();
|
||||
return Format.createAudioSampleFormat(null, MimeTypes.AUDIO_RAW, null, Format.NO_VALUE,
|
||||
Format.NO_VALUE, channelCount, sampleRate, C.ENCODING_PCM_16BIT, null, null, 0, null);
|
||||
Format.NO_VALUE, channelCount, sampleRate, encoding, null, null, 0, null);
|
||||
}
|
||||
|
||||
private boolean isOutputSupported(Format inputFormat) {
|
||||
return shouldUseFloatOutput(inputFormat) || supportsOutputEncoding(C.ENCODING_PCM_16BIT);
|
||||
}
|
||||
|
||||
private boolean shouldUseFloatOutput(Format inputFormat) {
|
||||
if (!enableFloatOutput || !supportsOutputEncoding(C.ENCODING_PCM_FLOAT)) {
|
||||
return false;
|
||||
}
|
||||
switch (inputFormat.sampleMimeType) {
|
||||
case MimeTypes.AUDIO_RAW:
|
||||
// For raw audio, output in 32-bit float encoding if the bit depth is > 16-bit.
|
||||
return inputFormat.pcmEncoding == C.ENCODING_PCM_24BIT
|
||||
|| inputFormat.pcmEncoding == C.ENCODING_PCM_32BIT
|
||||
|| inputFormat.pcmEncoding == C.ENCODING_PCM_FLOAT;
|
||||
case MimeTypes.AUDIO_AC3:
|
||||
// AC-3 is always 16-bit, so there is no point outputting in 32-bit float encoding.
|
||||
return false;
|
||||
default:
|
||||
// For all other formats, assume that it's worth using 32-bit float encoding.
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
*/
|
||||
package com.google.android.exoplayer2.ext.ffmpeg;
|
||||
|
||||
import com.google.android.exoplayer2.C;
|
||||
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
|
||||
import com.google.android.exoplayer2.decoder.SimpleDecoder;
|
||||
import com.google.android.exoplayer2.decoder.SimpleOutputBuffer;
|
||||
@ -29,11 +30,15 @@ import java.util.List;
|
||||
/* package */ final class FfmpegDecoder extends
|
||||
SimpleDecoder<DecoderInputBuffer, SimpleOutputBuffer, FfmpegDecoderException> {
|
||||
|
||||
// Space for 64 ms of 6 channel 48 kHz 16-bit PCM audio.
|
||||
private static final int OUTPUT_BUFFER_SIZE = 1536 * 6 * 2 * 2;
|
||||
// Space for 64 ms of 48 kHz 8 channel 16-bit PCM audio.
|
||||
private static final int OUTPUT_BUFFER_SIZE_16BIT = 64 * 48 * 8 * 2;
|
||||
// Space for 64 ms of 48 KhZ 8 channel 32-bit PCM audio.
|
||||
private static final int OUTPUT_BUFFER_SIZE_32BIT = OUTPUT_BUFFER_SIZE_16BIT * 2;
|
||||
|
||||
private final String codecName;
|
||||
private final byte[] extraData;
|
||||
private final @C.Encoding int encoding;
|
||||
private final int outputBufferSize;
|
||||
|
||||
private long nativeContext; // May be reassigned on resetting the codec.
|
||||
private boolean hasOutputFormat;
|
||||
@ -41,14 +46,17 @@ import java.util.List;
|
||||
private volatile int sampleRate;
|
||||
|
||||
public FfmpegDecoder(int numInputBuffers, int numOutputBuffers, int initialInputBufferSize,
|
||||
String mimeType, List<byte[]> initializationData) throws FfmpegDecoderException {
|
||||
String mimeType, List<byte[]> initializationData, boolean outputFloat)
|
||||
throws FfmpegDecoderException {
|
||||
super(new DecoderInputBuffer[numInputBuffers], new SimpleOutputBuffer[numOutputBuffers]);
|
||||
if (!FfmpegLibrary.isAvailable()) {
|
||||
throw new FfmpegDecoderException("Failed to load decoder native libraries.");
|
||||
}
|
||||
codecName = FfmpegLibrary.getCodecName(mimeType);
|
||||
extraData = getExtraData(mimeType, initializationData);
|
||||
nativeContext = ffmpegInitialize(codecName, extraData);
|
||||
encoding = outputFloat ? C.ENCODING_PCM_FLOAT : C.ENCODING_PCM_16BIT;
|
||||
outputBufferSize = outputFloat ? OUTPUT_BUFFER_SIZE_32BIT : OUTPUT_BUFFER_SIZE_16BIT;
|
||||
nativeContext = ffmpegInitialize(codecName, extraData, outputFloat);
|
||||
if (nativeContext == 0) {
|
||||
throw new FfmpegDecoderException("Initialization failed.");
|
||||
}
|
||||
@ -81,8 +89,8 @@ import java.util.List;
|
||||
}
|
||||
ByteBuffer inputData = inputBuffer.data;
|
||||
int inputSize = inputData.limit();
|
||||
ByteBuffer outputData = outputBuffer.init(inputBuffer.timeUs, OUTPUT_BUFFER_SIZE);
|
||||
int result = ffmpegDecode(nativeContext, inputData, inputSize, outputData, OUTPUT_BUFFER_SIZE);
|
||||
ByteBuffer outputData = outputBuffer.init(inputBuffer.timeUs, outputBufferSize);
|
||||
int result = ffmpegDecode(nativeContext, inputData, inputSize, outputData, outputBufferSize);
|
||||
if (result < 0) {
|
||||
return new FfmpegDecoderException("Error decoding (see logcat). Code: " + result);
|
||||
}
|
||||
@ -124,6 +132,13 @@ import java.util.List;
|
||||
return sampleRate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the encoding of output audio.
|
||||
*/
|
||||
public @C.Encoding int getEncoding() {
|
||||
return encoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns FFmpeg-compatible codec-specific initialization data ("extra data"), or {@code null} if
|
||||
* not required.
|
||||
@ -153,7 +168,7 @@ import java.util.List;
|
||||
}
|
||||
}
|
||||
|
||||
private native long ffmpegInitialize(String codecName, byte[] extraData);
|
||||
private native long ffmpegInitialize(String codecName, byte[] extraData, boolean outputFloat);
|
||||
private native int ffmpegDecode(long context, ByteBuffer inputData, int inputSize,
|
||||
ByteBuffer outputData, int outputSize);
|
||||
private native int ffmpegGetChannelCount(long context);
|
||||
|
@ -57,8 +57,10 @@ extern "C" {
|
||||
|
||||
#define ERROR_STRING_BUFFER_LENGTH 256
|
||||
|
||||
// Request a format corresponding to AudioFormat.ENCODING_PCM_16BIT.
|
||||
static const AVSampleFormat OUTPUT_FORMAT = AV_SAMPLE_FMT_S16;
|
||||
// Output format corresponding to AudioFormat.ENCODING_PCM_16BIT.
|
||||
static const AVSampleFormat OUTPUT_FORMAT_PCM_16BIT = AV_SAMPLE_FMT_S16;
|
||||
// Output format corresponding to AudioFormat.ENCODING_PCM_FLOAT.
|
||||
static const AVSampleFormat OUTPUT_FORMAT_PCM_FLOAT = AV_SAMPLE_FMT_FLT;
|
||||
|
||||
/**
|
||||
* Returns the AVCodec with the specified name, or NULL if it is not available.
|
||||
@ -71,7 +73,7 @@ AVCodec *getCodecByName(JNIEnv* env, jstring codecName);
|
||||
* Returns the created context.
|
||||
*/
|
||||
AVCodecContext *createContext(JNIEnv *env, AVCodec *codec,
|
||||
jbyteArray extraData);
|
||||
jbyteArray extraData, jboolean outputFloat);
|
||||
|
||||
/**
|
||||
* Decodes the packet into the output buffer, returning the number of bytes
|
||||
@ -107,13 +109,14 @@ LIBRARY_FUNC(jboolean, ffmpegHasDecoder, jstring codecName) {
|
||||
return getCodecByName(env, codecName) != NULL;
|
||||
}
|
||||
|
||||
DECODER_FUNC(jlong, ffmpegInitialize, jstring codecName, jbyteArray extraData) {
|
||||
DECODER_FUNC(jlong, ffmpegInitialize, jstring codecName, jbyteArray extraData,
|
||||
jboolean outputFloat) {
|
||||
AVCodec *codec = getCodecByName(env, codecName);
|
||||
if (!codec) {
|
||||
LOGE("Codec not found.");
|
||||
return 0L;
|
||||
}
|
||||
return (jlong) createContext(env, codec, extraData);
|
||||
return (jlong) createContext(env, codec, extraData, outputFloat);
|
||||
}
|
||||
|
||||
DECODER_FUNC(jint, ffmpegDecode, jlong context, jobject inputData,
|
||||
@ -177,7 +180,8 @@ DECODER_FUNC(jlong, ffmpegReset, jlong jContext, jbyteArray extraData) {
|
||||
LOGE("Unexpected error finding codec %d.", codecId);
|
||||
return 0L;
|
||||
}
|
||||
return (jlong) createContext(env, codec, extraData);
|
||||
return (jlong) createContext(env, codec, extraData,
|
||||
context->request_sample_fmt == OUTPUT_FORMAT_PCM_FLOAT);
|
||||
}
|
||||
|
||||
avcodec_flush_buffers(context);
|
||||
@ -201,13 +205,14 @@ AVCodec *getCodecByName(JNIEnv* env, jstring codecName) {
|
||||
}
|
||||
|
||||
AVCodecContext *createContext(JNIEnv *env, AVCodec *codec,
|
||||
jbyteArray extraData) {
|
||||
jbyteArray extraData, jboolean outputFloat) {
|
||||
AVCodecContext *context = avcodec_alloc_context3(codec);
|
||||
if (!context) {
|
||||
LOGE("Failed to allocate context.");
|
||||
return NULL;
|
||||
}
|
||||
context->request_sample_fmt = OUTPUT_FORMAT;
|
||||
context->request_sample_fmt =
|
||||
outputFloat ? OUTPUT_FORMAT_PCM_FLOAT : OUTPUT_FORMAT_PCM_16BIT;
|
||||
if (extraData) {
|
||||
jsize size = env->GetArrayLength(extraData);
|
||||
context->extradata_size = size;
|
||||
@ -275,7 +280,9 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
|
||||
av_opt_set_int(resampleContext, "in_sample_rate", sampleRate, 0);
|
||||
av_opt_set_int(resampleContext, "out_sample_rate", sampleRate, 0);
|
||||
av_opt_set_int(resampleContext, "in_sample_fmt", sampleFormat, 0);
|
||||
av_opt_set_int(resampleContext, "out_sample_fmt", OUTPUT_FORMAT, 0);
|
||||
// The output format is always the requested format.
|
||||
av_opt_set_int(resampleContext, "out_sample_fmt",
|
||||
context->request_sample_fmt, 0);
|
||||
result = avresample_open(resampleContext);
|
||||
if (result < 0) {
|
||||
logError("avresample_open", result);
|
||||
@ -285,7 +292,7 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
|
||||
context->opaque = resampleContext;
|
||||
}
|
||||
int inSampleSize = av_get_bytes_per_sample(sampleFormat);
|
||||
int outSampleSize = av_get_bytes_per_sample(OUTPUT_FORMAT);
|
||||
int outSampleSize = av_get_bytes_per_sample(context->request_sample_fmt);
|
||||
int outSamples = avresample_get_out_samples(resampleContext, sampleCount);
|
||||
int bufferOutSize = outSampleSize * channelCount * outSamples;
|
||||
if (outSize + bufferOutSize > outputSize) {
|
||||
|
@ -16,6 +16,7 @@
|
||||
package com.google.android.exoplayer2.ext.flac;
|
||||
|
||||
import android.os.Handler;
|
||||
import com.google.android.exoplayer2.C;
|
||||
import com.google.android.exoplayer2.Format;
|
||||
import com.google.android.exoplayer2.audio.AudioProcessor;
|
||||
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
|
||||
@ -52,6 +53,8 @@ public class LibflacAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
if (!FlacLibrary.isAvailable()
|
||||
|| !MimeTypes.AUDIO_FLAC.equalsIgnoreCase(format.sampleMimeType)) {
|
||||
return FORMAT_UNSUPPORTED_TYPE;
|
||||
} else if (!supportsOutputEncoding(C.ENCODING_PCM_16BIT)) {
|
||||
return FORMAT_UNSUPPORTED_SUBTYPE;
|
||||
} else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) {
|
||||
return FORMAT_UNSUPPORTED_DRM;
|
||||
} else {
|
||||
|
@ -76,6 +76,8 @@ public final class LibopusAudioRenderer extends SimpleDecoderAudioRenderer {
|
||||
if (!OpusLibrary.isAvailable()
|
||||
|| !MimeTypes.AUDIO_OPUS.equalsIgnoreCase(format.sampleMimeType)) {
|
||||
return FORMAT_UNSUPPORTED_TYPE;
|
||||
} else if (!supportsOutputEncoding(C.ENCODING_PCM_16BIT)) {
|
||||
return FORMAT_UNSUPPORTED_SUBTYPE;
|
||||
} else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) {
|
||||
return FORMAT_UNSUPPORTED_DRM;
|
||||
} else {
|
||||
|
@ -75,7 +75,7 @@ public interface AudioSink {
|
||||
*
|
||||
* @param bufferSize The size of the sink's buffer, in bytes.
|
||||
* @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for
|
||||
* PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, as the
|
||||
* PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the
|
||||
* buffered media can have a variable bitrate so the duration may be unknown.
|
||||
* @param elapsedSinceLastFeedMs The time since the sink was last fed data, in milliseconds.
|
||||
*/
|
||||
@ -165,12 +165,12 @@ public interface AudioSink {
|
||||
void setListener(Listener listener);
|
||||
|
||||
/**
|
||||
* Returns whether it's possible to play audio in the specified encoding using passthrough.
|
||||
* Returns whether it's possible to play audio in the specified encoding.
|
||||
*
|
||||
* @param encoding The audio encoding.
|
||||
* @return Whether it's possible to play audio in the specified encoding using passthrough.
|
||||
* @return Whether it's possible to play audio in the specified encoding.
|
||||
*/
|
||||
boolean isPassthroughSupported(@C.Encoding int encoding);
|
||||
boolean isEncodingSupported(@C.Encoding int encoding);
|
||||
|
||||
/**
|
||||
* Returns the playback position in the stream starting at zero, in microseconds, or
|
||||
|
@ -285,9 +285,16 @@ public final class DefaultAudioSink implements AudioSink {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPassthroughSupported(@C.Encoding int encoding) {
|
||||
public boolean isEncodingSupported(@C.Encoding int encoding) {
|
||||
if (isEncodingPcm(encoding)) {
|
||||
// AudioTrack supports 16-bit integer PCM output in all platform API versions, and float
|
||||
// output from platform API version 21 only. Other integer PCM encodings are resampled by this
|
||||
// sink to 16-bit PCM.
|
||||
return encoding != C.ENCODING_PCM_FLOAT || Util.SDK_INT >= 21;
|
||||
} else {
|
||||
return audioCapabilities != null && audioCapabilities.supportsEncoding(encoding);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getCurrentPositionUs(boolean sourceEnded) {
|
||||
|
@ -178,6 +178,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
&& mediaCodecSelector.getPassthroughDecoderInfo() != null) {
|
||||
return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | FORMAT_HANDLED;
|
||||
}
|
||||
if ((MimeTypes.AUDIO_RAW.equals(mimeType) && !audioSink.isEncodingSupported(format.pcmEncoding))
|
||||
|| !audioSink.isEncodingSupported(C.ENCODING_PCM_16BIT)) {
|
||||
// Assume the decoder outputs 16-bit PCM, unless the input is raw.
|
||||
return FORMAT_UNSUPPORTED_SUBTYPE;
|
||||
}
|
||||
boolean requiresSecureDecryption = false;
|
||||
DrmInitData drmInitData = format.drmInitData;
|
||||
if (drmInitData != null) {
|
||||
@ -220,14 +225,15 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
|
||||
/**
|
||||
* Returns whether encoded audio passthrough should be used for playing back the input format.
|
||||
* This implementation returns true if the {@link AudioSink} indicates that passthrough is
|
||||
* supported.
|
||||
* This implementation returns true if the {@link AudioSink} indicates that encoded audio output
|
||||
* is supported.
|
||||
*
|
||||
* @param mimeType The type of input media.
|
||||
* @return Whether passthrough playback is supported.
|
||||
*/
|
||||
protected boolean allowPassthrough(String mimeType) {
|
||||
return audioSink.isPassthroughSupported(MimeTypes.getEncoding(mimeType));
|
||||
@C.Encoding int encoding = MimeTypes.getEncoding(mimeType);
|
||||
return encoding != C.ENCODING_INVALID && audioSink.isEncodingSupported(encoding);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -200,6 +200,16 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
|
||||
protected abstract int supportsFormatInternal(DrmSessionManager<ExoMediaCrypto> drmSessionManager,
|
||||
Format format);
|
||||
|
||||
/**
|
||||
* Returns whether the audio sink can accept audio in the specified encoding.
|
||||
*
|
||||
* @param encoding The audio encoding.
|
||||
* @return Whether the audio sink can accept audio in the specified encoding.
|
||||
*/
|
||||
protected final boolean supportsOutputEncoding(@C.Encoding int encoding) {
|
||||
return audioSink.isEncodingSupported(encoding);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
||||
if (outputStreamEnded) {
|
||||
|
@ -240,11 +240,12 @@ public final class MimeTypes {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type, or
|
||||
* {@link C#ENCODING_INVALID} if the mapping cannot be established.
|
||||
* Returns the {@link C}{@code .ENCODING_*} constant that corresponds to specified MIME type, if
|
||||
* it is an encoded (non-PCM) audio format, or {@link C#ENCODING_INVALID} otherwise.
|
||||
*
|
||||
* @param mimeType The MIME type.
|
||||
* @return The {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type.
|
||||
* @return The {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type, or
|
||||
* {@link C#ENCODING_INVALID}.
|
||||
*/
|
||||
public static @C.Encoding int getEncoding(String mimeType) {
|
||||
switch (mimeType) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user