Use audio passthrough if possible, falling back to on-device codecs.

Remove MPEG TS stream filtering based on AudioCapabilities.

Pass AudioCapabilities to MediaCodecAudioTrackRenderer so it can choose between
passthrough/raw and decoding for AC-3 tracks.
This commit is contained in:
Oliver Woodman 2015-08-11 18:08:23 +01:00
parent 6085d185fa
commit b2206866f0
16 changed files with 225 additions and 234 deletions

View File

@ -119,7 +119,6 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
private String contentId;
private AudioCapabilitiesReceiver audioCapabilitiesReceiver;
private AudioCapabilities audioCapabilities;
// Activity lifecycle
@ -154,7 +153,6 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
return false;
}
});
audioCapabilitiesReceiver = new AudioCapabilitiesReceiver(getApplicationContext(), this);
shutterView = findViewById(R.id.shutter);
debugRootView = findViewById(R.id.controls_root);
@ -179,15 +177,20 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
if (currentHandler != defaultCookieManager) {
CookieHandler.setDefault(defaultCookieManager);
}
audioCapabilitiesReceiver = new AudioCapabilitiesReceiver(this, this);
audioCapabilitiesReceiver.register();
}
@Override
public void onResume() {
super.onResume();
configureSubtitleView();
// The player will be prepared on receiving audio capabilities.
audioCapabilitiesReceiver.register();
if (player == null) {
preparePlayer(true);
} else {
player.setBackgrounded(false);
}
}
@Override
@ -198,13 +201,13 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
} else {
player.setBackgrounded(true);
}
audioCapabilitiesReceiver.unregister();
shutterView.setVisibility(View.VISIBLE);
}
@Override
public void onDestroy() {
super.onDestroy();
audioCapabilitiesReceiver.unregister();
releasePlayer();
}
@ -213,7 +216,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
@Override
public void onClick(View view) {
if (view == retryButton) {
preparePlayer();
preparePlayer(true);
}
}
@ -221,14 +224,14 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
@Override
public void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) {
boolean audioCapabilitiesChanged = !audioCapabilities.equals(this.audioCapabilities);
if (player == null || audioCapabilitiesChanged) {
this.audioCapabilities = audioCapabilities;
releasePlayer();
preparePlayer();
} else if (player != null) {
player.setBackgrounded(false);
if (player == null) {
return;
}
boolean backgrounded = player.getBackgrounded();
boolean playWhenReady = player.getPlayWhenReady();
releasePlayer();
preparePlayer(playWhenReady);
player.setBackgrounded(backgrounded);
}
// Internal methods
@ -241,9 +244,9 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
new SmoothStreamingTestMediaDrmCallback());
case TYPE_DASH:
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
new WidevineTestMediaDrmCallback(contentId), audioCapabilities);
new WidevineTestMediaDrmCallback(contentId));
case TYPE_HLS:
return new HlsRendererBuilder(this, userAgent, contentUri.toString(), audioCapabilities);
return new HlsRendererBuilder(this, userAgent, contentUri.toString());
case TYPE_OTHER:
return new ExtractorRendererBuilder(this, userAgent, contentUri);
default:
@ -251,7 +254,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
}
}
private void preparePlayer() {
private void preparePlayer(boolean playWhenReady) {
if (player == null) {
player = new DemoPlayer(getRendererBuilder());
player.addListener(this);
@ -275,7 +278,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
updateButtonVisibilities();
}
player.setSurface(surfaceView.getHolder().getSurface());
player.setPlayWhenReady(true);
player.setPlayWhenReady(playWhenReady);
}
private void releasePlayer() {

View File

@ -15,7 +15,6 @@
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
@ -81,39 +80,24 @@ public class DashRendererBuilder implements RendererBuilder {
private static final int SECURITY_LEVEL_1 = 1;
private static final int SECURITY_LEVEL_3 = 3;
/**
* Passthrough audio formats (encodings) in order of decreasing priority.
*/
private static final int[] PASSTHROUGH_ENCODINGS_PRIORITY =
new int[] {C.ENCODING_E_AC3, C.ENCODING_AC3};
/**
* Passthrough audio codecs corresponding to the encodings in
* {@link #PASSTHROUGH_ENCODINGS_PRIORITY}.
*/
private static final String[] PASSTHROUGH_CODECS_PRIORITY =
new String[] {"ec-3", "ac-3"};
private final Context context;
private final String userAgent;
private final String url;
private final MediaDrmCallback drmCallback;
private final AudioCapabilities audioCapabilities;
private AsyncRendererBuilder currentAsyncBuilder;
public DashRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities) {
MediaDrmCallback drmCallback) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.drmCallback = drmCallback;
this.audioCapabilities = audioCapabilities;
}
@Override
public void buildRenderers(DemoPlayer player) {
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback,
audioCapabilities, player);
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback, player);
currentAsyncBuilder.init();
}
@ -131,7 +115,6 @@ public class DashRendererBuilder implements RendererBuilder {
private final Context context;
private final String userAgent;
private final MediaDrmCallback drmCallback;
private final AudioCapabilities audioCapabilities;
private final DemoPlayer player;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final UriDataSource manifestDataSource;
@ -141,11 +124,10 @@ public class DashRendererBuilder implements RendererBuilder {
private long elapsedRealtimeOffset;
public AsyncRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities, DemoPlayer player) {
MediaDrmCallback drmCallback, DemoPlayer player) {
this.context = context;
this.userAgent = userAgent;
this.drmCallback = drmCallback;
this.audioCapabilities = audioCapabilities;
this.player = player;
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
manifestDataSource = new DefaultUriDataSource(context, userAgent);
@ -297,26 +279,6 @@ public class DashRendererBuilder implements RendererBuilder {
elapsedRealtimeOffset, mainHandler, player));
codecs.add(format.codecs);
}
if (audioCapabilities != null) {
// If there are any passthrough audio encodings available, select the highest priority
// supported format (e.g. E-AC-3) and remove other tracks.
for (int i = 0; i < PASSTHROUGH_CODECS_PRIORITY.length; i++) {
String codec = PASSTHROUGH_CODECS_PRIORITY[i];
int encoding = PASSTHROUGH_ENCODINGS_PRIORITY[i];
if (codecs.indexOf(codec) == -1 || !audioCapabilities.supportsEncoding(encoding)) {
continue;
}
for (int j = audioRepresentations.size() - 1; j >= 0; j--) {
if (!audioRepresentations.get(j).format.codecs.equals(codec)) {
audioTrackNameList.remove(j);
audioChunkSourceList.remove(j);
}
}
break;
}
}
}
// Build the audio renderer.
@ -335,7 +297,7 @@ public class DashRendererBuilder implements RendererBuilder {
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
mainHandler, player, AudioCapabilities.getCapabilities(context));
}
// Build the text chunk sources.

View File

@ -267,6 +267,10 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
}
public boolean getBackgrounded() {
return backgrounded;
}
public void setBackgrounded(boolean backgrounded) {
if (this.backgrounded == backgrounded) {
return;

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
@ -64,7 +65,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player);
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
player.getMainHandler().getLooper());

View File

@ -57,22 +57,18 @@ public class HlsRendererBuilder implements RendererBuilder {
private final Context context;
private final String userAgent;
private final String url;
private final AudioCapabilities audioCapabilities;
private AsyncRendererBuilder currentAsyncBuilder;
public HlsRendererBuilder(Context context, String userAgent, String url,
AudioCapabilities audioCapabilities) {
public HlsRendererBuilder(Context context, String userAgent, String url) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.audioCapabilities = audioCapabilities;
}
@Override
public void buildRenderers(DemoPlayer player) {
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, audioCapabilities,
player);
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, player);
currentAsyncBuilder.init();
}
@ -89,18 +85,15 @@ public class HlsRendererBuilder implements RendererBuilder {
private final Context context;
private final String userAgent;
private final String url;
private final AudioCapabilities audioCapabilities;
private final DemoPlayer player;
private final ManifestFetcher<HlsPlaylist> playlistFetcher;
private boolean canceled;
public AsyncRendererBuilder(Context context, String userAgent, String url,
AudioCapabilities audioCapabilities, DemoPlayer player) {
public AsyncRendererBuilder(Context context, String userAgent, String url, DemoPlayer player) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.audioCapabilities = audioCapabilities;
this.player = player;
HlsPlaylistParser parser = new HlsPlaylistParser();
playlistFetcher = new ManifestFetcher<>(url, new DefaultUriDataSource(context, userAgent),
@ -152,12 +145,13 @@ public class HlsRendererBuilder implements RendererBuilder {
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
sampleSource, new Id3Parser(), player, mainHandler.getLooper());
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,

View File

@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator;
@ -230,7 +231,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
mainHandler, player, AudioCapabilities.getCapabilities(context));
}
// Build the text renderer.

View File

@ -370,7 +370,7 @@ public final class LibopusAudioTrackRenderer extends SampleSourceTrackRenderer
int result = readSource(positionUs, formatHolder, null, false);
if (result == SampleSource.FORMAT_READ) {
format = formatHolder.format;
audioTrack.reconfigure(format.getFrameworkMediaFormatV16());
audioTrack.reconfigure(format.getFrameworkMediaFormatV16(), false);
return true;
}
return false;

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.MimeTypes;
@ -70,6 +71,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
private final EventListener eventListener;
private final AudioTrack audioTrack;
private android.media.MediaFormat passthroughMediaFormat;
private int audioSessionId;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
@ -122,19 +124,51 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
*/
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener) {
this(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener,
null);
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
*/
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
AudioCapabilities audioCapabilities) {
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
this.eventListener = eventListener;
this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
this.audioTrack = new AudioTrack();
this.audioTrack = new AudioTrack(audioCapabilities);
}
@Override
protected DecoderInfo getDecoderInfo(String mimeType, boolean requiresSecureDecoder)
throws DecoderQueryException {
if (MimeTypes.isPassthroughAudio(mimeType)) {
return new DecoderInfo(RAW_DECODER_NAME, true);
}
return super.getDecoderInfo(mimeType, requiresSecureDecoder);
return allowPassthrough(mimeType) ? new DecoderInfo(RAW_DECODER_NAME, true)
: super.getDecoderInfo(mimeType, requiresSecureDecoder);
}
/**
* Returns whether encoded audio passthrough may be used for playing back the input format. This
* implementation returns true if the {@link AudioTrack}'s audio capabilities indicate that
* passthrough is supported.
*
* @param mimeType The type of input media.
* @return True if passthrough playback should be used. False otherwise.
*/
protected boolean allowPassthrough(String mimeType) {
return audioTrack.isPassthroughSupported(mimeType);
}
@Override
@ -146,8 +180,10 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
format.setString(android.media.MediaFormat.KEY_MIME, MimeTypes.AUDIO_RAW);
codec.configure(format, null, crypto, 0);
format.setString(android.media.MediaFormat.KEY_MIME, mimeType);
passthroughMediaFormat = format;
} else {
codec.configure(format, null, crypto, 0);
passthroughMediaFormat = null;
}
}
@ -169,13 +205,9 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
}
@Override
protected void onOutputFormatChanged(MediaFormat inputFormat,
android.media.MediaFormat outputFormat) {
if (MimeTypes.isPassthroughAudio(inputFormat.mimeType)) {
audioTrack.reconfigure(inputFormat.getFrameworkMediaFormatV16());
} else {
audioTrack.reconfigure(outputFormat);
}
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
boolean passthrough = passthroughMediaFormat != null;
audioTrack.reconfigure(passthrough ? passthroughMediaFormat : outputFormat, passthrough);
}
/**

View File

@ -697,11 +697,9 @@ public abstract class MediaCodecTrackRenderer extends SampleSourceTrackRenderer
* <p>
* The default implementation is a no-op.
*
* @param inputFormat The format of media input to the codec.
* @param outputFormat The new output format.
*/
protected void onOutputFormatChanged(MediaFormat inputFormat,
android.media.MediaFormat outputFormat) {
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
// Do nothing.
}
@ -775,7 +773,7 @@ public abstract class MediaCodecTrackRenderer extends SampleSourceTrackRenderer
}
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
onOutputFormatChanged(format, codec.getOutputFormat());
onOutputFormatChanged(codec.getOutputFormat());
codecCounters.outputFormatChangedCount++;
return true;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {

View File

@ -383,8 +383,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
}
@Override
protected void onOutputFormatChanged(MediaFormat inputFormat,
android.media.MediaFormat outputFormat) {
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
&& outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM)
&& outputFormat.containsKey(KEY_CROP_TOP);

View File

@ -15,7 +15,13 @@
*/
package com.google.android.exoplayer.audio;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioFormat;
import android.media.AudioManager;
import java.util.Arrays;
@ -25,6 +31,34 @@ import java.util.Arrays;
@TargetApi(21)
public final class AudioCapabilities {
/**
* Default to stereo PCM on SDK < 21 and when HDMI is unplugged.
*/
private static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
/**
* Gets the current audio capabilities. Note that to be notified when audio capabilities change,
* you can create an instance of {@link AudioCapabilitiesReceiver} and register a listener.
*
* @param context Context for receiving the initial broadcast.
* @return Current audio capabilities for the device.
*/
@SuppressWarnings("InlinedApi")
public static AudioCapabilities getCapabilities(Context context) {
return getCapabilities(
context.registerReceiver(null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG)));
}
@SuppressLint("InlinedApi")
/* package */ static AudioCapabilities getCapabilities(Intent intent) {
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
return DEFAULT_AUDIO_CAPABILITIES;
}
return new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0));
}
private final int[] supportedEncodings;
private final int maxChannelCount;
@ -36,7 +70,7 @@ public final class AudioCapabilities {
* {@code ENCODING_*} constants.
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
*/
public AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
/* package */ AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
if (supportedEncodings != null) {
this.supportedEncodings = Arrays.copyOf(supportedEncodings, supportedEncodings.length);
Arrays.sort(this.supportedEncodings);

View File

@ -18,41 +18,42 @@ package com.google.android.exoplayer.audio;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioFormat;
import android.media.AudioManager;
/**
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
* receiving notifications, and {@link #unregister} to stop.
* (or resume) receiving notifications, and {@link #unregister} to stop.
*/
public final class AudioCapabilitiesReceiver {
/** Listener notified when audio capabilities change. */
/**
* Listener notified when audio capabilities change.
*/
public interface Listener {
/** Called when the audio capabilities change. */
/**
* Called when the audio capabilities change.
*
* @param audioCapabilities Current audio capabilities for the device.
*/
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
}
/** Default to stereo PCM on SDK < 21 and when HDMI is unplugged. */
private static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
private final Context context;
private final Listener listener;
private final BroadcastReceiver receiver;
/* package */ AudioCapabilities audioCapabilities;
/**
* Constructs a new audio capabilities receiver.
*
* @param context Application context for registering to receive broadcasts.
* @param context Context for registering to receive broadcasts.
* @param listener Listener to notify when audio capabilities change.
*/
public AudioCapabilitiesReceiver(Context context, Listener listener) {
@ -62,41 +63,40 @@ public final class AudioCapabilitiesReceiver {
}
/**
* Registers to notify the listener when audio capabilities change. The listener will immediately
* receive the current audio capabilities. It is important to call {@link #unregister} so that
* the listener can be garbage collected.
* Registers to notify the listener when audio capabilities change. The current capabilities will
* be returned. It is important to call {@link #unregister} so that the listener can be garbage
* collected.
*
* @return Current audio capabilities for the device.
*/
@TargetApi(21)
public void register() {
@SuppressWarnings("InlinedApi")
public AudioCapabilities register() {
Intent stickyIntent = receiver == null ? null
: context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
listener.onAudioCapabilitiesChanged(getCapabilities(stickyIntent));
audioCapabilities = AudioCapabilities.getCapabilities(stickyIntent);
return audioCapabilities;
}
/** Unregisters to stop notifying the listener when audio capabilities change. */
/**
* Unregisters to stop notifying the listener when audio capabilities change.
*/
public void unregister() {
if (receiver != null) {
context.unregisterReceiver(receiver);
}
}
@SuppressLint("InlinedApi")
/* package */ AudioCapabilities getCapabilities(Intent intent) {
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
return DEFAULT_AUDIO_CAPABILITIES;
}
return new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0));
}
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (isInitialStickyBroadcast()) {
return;
if (!isInitialStickyBroadcast()) {
AudioCapabilities newAudioCapabilities = AudioCapabilities.getCapabilities(intent);
if (!newAudioCapabilities.equals(audioCapabilities)) {
audioCapabilities = newAudioCapabilities;
listener.onAudioCapabilitiesChanged(newAudioCapabilities);
}
}
listener.onAudioCapabilitiesChanged(getCapabilities(intent));
}
}

View File

@ -134,8 +134,10 @@ public final class AudioTrack {
*/
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
/** Value for ac3Bitrate before the bitrate has been calculated. */
private static final int UNKNOWN_AC3_BITRATE = 0;
/**
* Value for {@link #passthroughBitrate} before the bitrate has been calculated.
*/
private static final int UNKNOWN_BITRATE = 0;
private static final int START_NOT_SET = 0;
private static final int START_IN_SYNC = 1;
@ -162,6 +164,7 @@ public final class AudioTrack {
*/
public static boolean failOnSpuriousAudioTimestamp = false;
private final AudioCapabilities audioCapabilities;
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
private final AudioTrackUtil audioTrackUtil;
@ -196,12 +199,25 @@ public final class AudioTrack {
private int temporaryBufferOffset;
private int temporaryBufferSize;
private boolean isAc3;
/** Bitrate measured in kilobits per second, if {@link #isAc3} is true. */
private int ac3Bitrate;
/**
* Bitrate measured in kilobits per second, if {@link #isPassthrough()} returns true.
*/
private int passthroughBitrate;
/**
* Creates an audio track with default audio capabilities (no encoded audio passthrough support).
*/
public AudioTrack() {
this(null);
}
/**
* Creates an audio track using the specified audio capabilities.
*
* @param audioCapabilities The current audio playback capabilities.
*/
public AudioTrack(AudioCapabilities audioCapabilities) {
this.audioCapabilities = audioCapabilities;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
try {
@ -221,6 +237,15 @@ public final class AudioTrack {
startMediaTimeState = START_NOT_SET;
}
/**
* Returns whether it is possible to play back input audio in the specified format using encoded
* audio passthrough.
*/
public boolean isPassthroughSupported(String mimeType) {
return audioCapabilities != null
&& audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType));
}
/**
* Returns whether the audio track has been successfully initialized via {@link #initialize} and
* not yet {@link #reset}.
@ -331,7 +356,7 @@ public final class AudioTrack {
}
}
audioTrackUtil.reconfigure(audioTrack, isAc3);
audioTrackUtil.reconfigure(audioTrack, isPassthrough());
setVolume(volume);
return sessionId;
@ -340,19 +365,23 @@ public final class AudioTrack {
/**
* Reconfigures the audio track to play back media in {@code format}, inferring a buffer size from
* the format.
*
* @param format Specifies the channel count and sample rate to play back.
* @param passthrough Whether to play back using a passthrough encoding.
*/
public void reconfigure(MediaFormat format) {
reconfigure(format, 0);
public void reconfigure(MediaFormat format, boolean passthrough) {
reconfigure(format, passthrough, 0);
}
/**
* Reconfigures the audio track to play back media in {@code format}.
*
* @param format Specifies the channel count and sample rate to play back.
* @param passthrough Whether to playback using a passthrough encoding.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to use a
* size inferred from the format.
*/
public void reconfigure(MediaFormat format, int specifiedBufferSize) {
public void reconfigure(MediaFormat format, boolean passthrough, int specifiedBufferSize) {
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig;
switch (channelCount) {
@ -371,16 +400,12 @@ public final class AudioTrack {
default:
throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
}
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
String mimeType = format.getString(MediaFormat.KEY_MIME);
// TODO: Does channelConfig determine channelCount?
int encoding = MimeTypes.getEncodingForMimeType(mimeType);
boolean isAc3 = encoding == C.ENCODING_AC3 || encoding == C.ENCODING_E_AC3;
int encoding = passthrough ? getEncodingForMimeType(mimeType) : AudioFormat.ENCODING_PCM_16BIT;
if (isInitialized() && this.sampleRate == sampleRate && this.channelConfig == channelConfig
&& !this.isAc3 && !isAc3) {
// We already have an existing audio track with the correct sample rate and channel config.
&& this.encoding == encoding) {
// We already have an audio track with the correct sample rate, encoding and channel config.
return;
}
@ -389,8 +414,7 @@ public final class AudioTrack {
this.encoding = encoding;
this.sampleRate = sampleRate;
this.channelConfig = channelConfig;
this.isAc3 = isAc3;
ac3Bitrate = UNKNOWN_AC3_BITRATE; // Calculated on receiving the first buffer if isAc3 is true.
passthroughBitrate = UNKNOWN_BITRATE;
frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
minBufferSize = android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding);
Assertions.checkState(minBufferSize != android.media.AudioTrack.ERROR_BAD_VALUE);
@ -446,7 +470,7 @@ public final class AudioTrack {
}
// Workarounds for issues with AC-3 passthrough AudioTracks on API versions 21/22:
if (Util.SDK_INT <= 22 && isAc3) {
if (Util.SDK_INT <= 22 && isPassthrough()) {
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
// buffer empties. See [Internal: b/18899620].
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PAUSED) {
@ -464,8 +488,8 @@ public final class AudioTrack {
int result = 0;
if (temporaryBufferSize == 0) {
if (isAc3 && ac3Bitrate == UNKNOWN_AC3_BITRATE) {
ac3Bitrate = Ac3Util.getBitrate(size, sampleRate);
if (isPassthrough() && passthroughBitrate == UNKNOWN_BITRATE) {
passthroughBitrate = Ac3Util.getBitrate(size, sampleRate);
}
// This is the first time we've seen this {@code buffer}.
@ -673,9 +697,10 @@ public final class AudioTrack {
}
}
// Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack, as the
// returned values cause audio/video synchronization to be incorrect.
if (!isAc3 && systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
// Don't sample the timestamp and latency if this is a passthrough AudioTrack, as the returned
// values cause audio/video synchronization to be incorrect.
if (!isPassthrough()
&& systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
audioTimestampSet = audioTrackUtil.updateTimestamp();
if (audioTimestampSet) {
// Perform sanity checks on the timestamp.
@ -755,9 +780,9 @@ public final class AudioTrack {
}
private long bytesToFrames(long byteCount) {
if (isAc3) {
return
ac3Bitrate == UNKNOWN_AC3_BITRATE ? 0L : byteCount * 8 * sampleRate / (1000 * ac3Bitrate);
if (isPassthrough()) {
return passthroughBitrate == UNKNOWN_BITRATE
? 0L : byteCount * 8 * sampleRate / (1000 * passthroughBitrate);
} else {
return byteCount / frameSize;
}
@ -780,6 +805,20 @@ public final class AudioTrack {
lastTimestampSampleTimeUs = 0;
}
private boolean isPassthrough() {
return encoding == C.ENCODING_AC3 || encoding == C.ENCODING_E_AC3;
}
private static int getEncodingForMimeType(String mimeType) {
if (MimeTypes.AUDIO_AC3.equals(mimeType)) {
return C.ENCODING_AC3;
}
if (MimeTypes.AUDIO_EC3.equals(mimeType)) {
return C.ENCODING_E_AC3;
}
return AudioFormat.ENCODING_INVALID;
}
/**
* Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
*/

View File

@ -16,7 +16,6 @@
package com.google.android.exoplayer.extractor.ts;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorInput;
import com.google.android.exoplayer.extractor.ExtractorOutput;
@ -59,7 +58,6 @@ public final class TsExtractor implements Extractor {
private final boolean idrKeyframesOnly;
private final long firstSampleTimestampUs;
/* package */ final SparseBooleanArray streamTypes;
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
/* package */ final SparseArray<TsPayloadReader> tsPayloadReaders; // Indexed by pid
// Accessed only by the loading thread.
@ -73,21 +71,15 @@ public final class TsExtractor implements Extractor {
}
public TsExtractor(long firstSampleTimestampUs) {
this(firstSampleTimestampUs, null);
this(firstSampleTimestampUs, true);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
this(firstSampleTimestampUs, audioCapabilities, true);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities,
boolean idrKeyframesOnly) {
public TsExtractor(long firstSampleTimestampUs, boolean idrKeyframesOnly) {
this.firstSampleTimestampUs = firstSampleTimestampUs;
this.idrKeyframesOnly = idrKeyframesOnly;
tsScratch = new ParsableBitArray(new byte[3]);
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
streamTypes = new SparseBooleanArray();
allowedPassthroughStreamTypes = getPassthroughStreamTypes(audioCapabilities);
tsPayloadReaders = new SparseArray<>();
tsPayloadReaders.put(TS_PAT_PID, new PatReader());
lastPts = Long.MIN_VALUE;
@ -195,24 +187,6 @@ public final class TsExtractor implements Extractor {
return timeUs + timestampOffsetUs;
}
/**
* Returns a sparse boolean array of stream types that can be played back based on
* {@code audioCapabilities}.
*/
private static SparseBooleanArray getPassthroughStreamTypes(AudioCapabilities audioCapabilities) {
SparseBooleanArray streamTypes = new SparseBooleanArray();
if (audioCapabilities != null) {
if (audioCapabilities.supportsEncoding(C.ENCODING_AC3)) {
streamTypes.put(TS_STREAM_TYPE_ATSC_AC3, true);
}
if (audioCapabilities.supportsEncoding(C.ENCODING_E_AC3)) {
// TODO: Uncomment when Ac3Reader supports enhanced AC-3.
// streamTypes.put(TS_STREAM_TYPE_ATSC_E_AC3, true);
}
}
return streamTypes;
}
/**
* Parses TS packet payload data.
*/
@ -365,9 +339,6 @@ public final class TsExtractor implements Extractor {
break;
case TS_STREAM_TYPE_ATSC_E_AC3:
case TS_STREAM_TYPE_ATSC_AC3:
if (!allowedPassthroughStreamTypes.get(streamType)) {
continue;
}
pesPayloadReader = new Ac3Reader(output.track(streamType));
break;
case TS_STREAM_TYPE_H264:

View File

@ -17,7 +17,6 @@ package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.DataChunk;
@ -124,7 +123,6 @@ public class HlsChunkSource {
private final int maxHeight;
private final long minBufferDurationToSwitchUpUs;
private final long maxBufferDurationToSwitchDownUs;
private final AudioCapabilities audioCapabilities;
// A list of variants considered during playback, ordered by decreasing bandwidth. The following
// three arrays are of the same length and are ordered in the same way (i.e. variantPlaylists[i],
@ -147,11 +145,9 @@ public class HlsChunkSource {
private byte[] encryptionIv;
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
AudioCapabilities audioCapabilities) {
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode) {
this(dataSource, playlistUrl, playlist, bandwidthMeter, variantIndices, adaptiveMode,
DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS,
audioCapabilities);
DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
}
/**
@ -169,17 +165,13 @@ public class HlsChunkSource {
* for a switch to a higher quality variant to be considered.
* @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered
* for a switch to a lower quality variant to be considered.
* @param audioCapabilities The audio capabilities for playback on this device, or {@code null} if
* the default capabilities should be assumed.
*/
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs,
AudioCapabilities audioCapabilities) {
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) {
this.dataSource = dataSource;
this.bandwidthMeter = bandwidthMeter;
this.adaptiveMode = adaptiveMode;
this.audioCapabilities = audioCapabilities;
minBufferDurationToSwitchUpUs = minBufferDurationToSwitchUpMs * 1000;
maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000;
baseUri = playlist.baseUri;
@ -356,8 +348,7 @@ public class HlsChunkSource {
if (previousTsChunk == null || segment.discontinuity || !format.equals(previousTsChunk.format)
|| liveDiscontinuity) {
Extractor extractor = chunkUri.getLastPathSegment().endsWith(AAC_FILE_EXTENSION)
? new AdtsExtractor(startTimeUs)
: new TsExtractor(startTimeUs, audioCapabilities);
? new AdtsExtractor(startTimeUs) : new TsExtractor(startTimeUs);
extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor,
switchingVariantSpliced);
} else {

View File

@ -15,11 +15,6 @@
*/
package com.google.android.exoplayer.util;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.audio.AudioCapabilities;
import android.media.AudioFormat;
/**
* Defines common MIME types and helper methods.
*/
@ -128,37 +123,4 @@ public final class MimeTypes {
return mimeType.equals(APPLICATION_TTML);
}
/**
* Returns the output audio encoding that will result from processing input in {@code mimeType}.
* For non-passthrough audio formats, this is always {@link AudioFormat#ENCODING_PCM_16BIT}. For
* passthrough formats it will be one of {@link AudioFormat}'s other {@code ENCODING_*} constants.
* For non-audio formats, {@link AudioFormat#ENCODING_INVALID} will be returned.
*
* @param mimeType The MIME type of media that will be decoded (or passed through).
* @return The corresponding {@link AudioFormat} encoding.
*/
public static int getEncodingForMimeType(String mimeType) {
if (AUDIO_AC3.equals(mimeType)) {
return C.ENCODING_AC3;
}
if (AUDIO_EC3.equals(mimeType)) {
return C.ENCODING_E_AC3;
}
// All other audio formats will be decoded to 16-bit PCM.
return isAudio(mimeType) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_INVALID;
}
/**
* Returns whether the specified {@code mimeType} represents audio that can be played via
* passthrough if the device supports it.
*
* @param mimeType The MIME type of input media.
* @return Whether the audio can be played via passthrough. If this method returns {@code true},
* it is still necessary to check the {@link AudioCapabilities} for device support.
*/
public static boolean isPassthroughAudio(String mimeType) {
return AUDIO_AC3.equals(mimeType) || AUDIO_EC3.equals(mimeType);
}
}