mirror of
https://github.com/androidx/media.git
synced 2025-05-08 08:00:49 +08:00
Use audio passthrough if possible, falling back to on-device codecs.
Remove MPEG TS stream filtering based on AudioCapabilities. Pass AudioCapabilities to MediaCodecAudioTrackRenderer so it can choose between passthrough/raw and decoding for AC-3 tracks.
This commit is contained in:
parent
6085d185fa
commit
b2206866f0
@ -119,7 +119,6 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
private String contentId;
|
private String contentId;
|
||||||
|
|
||||||
private AudioCapabilitiesReceiver audioCapabilitiesReceiver;
|
private AudioCapabilitiesReceiver audioCapabilitiesReceiver;
|
||||||
private AudioCapabilities audioCapabilities;
|
|
||||||
|
|
||||||
// Activity lifecycle
|
// Activity lifecycle
|
||||||
|
|
||||||
@ -154,7 +153,6 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
audioCapabilitiesReceiver = new AudioCapabilitiesReceiver(getApplicationContext(), this);
|
|
||||||
|
|
||||||
shutterView = findViewById(R.id.shutter);
|
shutterView = findViewById(R.id.shutter);
|
||||||
debugRootView = findViewById(R.id.controls_root);
|
debugRootView = findViewById(R.id.controls_root);
|
||||||
@ -179,15 +177,20 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
if (currentHandler != defaultCookieManager) {
|
if (currentHandler != defaultCookieManager) {
|
||||||
CookieHandler.setDefault(defaultCookieManager);
|
CookieHandler.setDefault(defaultCookieManager);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
audioCapabilitiesReceiver = new AudioCapabilitiesReceiver(this, this);
|
||||||
|
audioCapabilitiesReceiver.register();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onResume() {
|
public void onResume() {
|
||||||
super.onResume();
|
super.onResume();
|
||||||
configureSubtitleView();
|
configureSubtitleView();
|
||||||
|
if (player == null) {
|
||||||
// The player will be prepared on receiving audio capabilities.
|
preparePlayer(true);
|
||||||
audioCapabilitiesReceiver.register();
|
} else {
|
||||||
|
player.setBackgrounded(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -198,13 +201,13 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
} else {
|
} else {
|
||||||
player.setBackgrounded(true);
|
player.setBackgrounded(true);
|
||||||
}
|
}
|
||||||
audioCapabilitiesReceiver.unregister();
|
|
||||||
shutterView.setVisibility(View.VISIBLE);
|
shutterView.setVisibility(View.VISIBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onDestroy() {
|
public void onDestroy() {
|
||||||
super.onDestroy();
|
super.onDestroy();
|
||||||
|
audioCapabilitiesReceiver.unregister();
|
||||||
releasePlayer();
|
releasePlayer();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,7 +216,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
@Override
|
@Override
|
||||||
public void onClick(View view) {
|
public void onClick(View view) {
|
||||||
if (view == retryButton) {
|
if (view == retryButton) {
|
||||||
preparePlayer();
|
preparePlayer(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -221,14 +224,14 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) {
|
public void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) {
|
||||||
boolean audioCapabilitiesChanged = !audioCapabilities.equals(this.audioCapabilities);
|
if (player == null) {
|
||||||
if (player == null || audioCapabilitiesChanged) {
|
return;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
releasePlayer();
|
|
||||||
preparePlayer();
|
|
||||||
} else if (player != null) {
|
|
||||||
player.setBackgrounded(false);
|
|
||||||
}
|
}
|
||||||
|
boolean backgrounded = player.getBackgrounded();
|
||||||
|
boolean playWhenReady = player.getPlayWhenReady();
|
||||||
|
releasePlayer();
|
||||||
|
preparePlayer(playWhenReady);
|
||||||
|
player.setBackgrounded(backgrounded);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Internal methods
|
// Internal methods
|
||||||
@ -241,9 +244,9 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
new SmoothStreamingTestMediaDrmCallback());
|
new SmoothStreamingTestMediaDrmCallback());
|
||||||
case TYPE_DASH:
|
case TYPE_DASH:
|
||||||
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
|
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
|
||||||
new WidevineTestMediaDrmCallback(contentId), audioCapabilities);
|
new WidevineTestMediaDrmCallback(contentId));
|
||||||
case TYPE_HLS:
|
case TYPE_HLS:
|
||||||
return new HlsRendererBuilder(this, userAgent, contentUri.toString(), audioCapabilities);
|
return new HlsRendererBuilder(this, userAgent, contentUri.toString());
|
||||||
case TYPE_OTHER:
|
case TYPE_OTHER:
|
||||||
return new ExtractorRendererBuilder(this, userAgent, contentUri);
|
return new ExtractorRendererBuilder(this, userAgent, contentUri);
|
||||||
default:
|
default:
|
||||||
@ -251,7 +254,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void preparePlayer() {
|
private void preparePlayer(boolean playWhenReady) {
|
||||||
if (player == null) {
|
if (player == null) {
|
||||||
player = new DemoPlayer(getRendererBuilder());
|
player = new DemoPlayer(getRendererBuilder());
|
||||||
player.addListener(this);
|
player.addListener(this);
|
||||||
@ -275,7 +278,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
updateButtonVisibilities();
|
updateButtonVisibilities();
|
||||||
}
|
}
|
||||||
player.setSurface(surfaceView.getHolder().getSurface());
|
player.setSurface(surfaceView.getHolder().getSurface());
|
||||||
player.setPlayWhenReady(true);
|
player.setPlayWhenReady(playWhenReady);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void releasePlayer() {
|
private void releasePlayer() {
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.demo.player;
|
package com.google.android.exoplayer.demo.player;
|
||||||
|
|
||||||
import com.google.android.exoplayer.C;
|
|
||||||
import com.google.android.exoplayer.DefaultLoadControl;
|
import com.google.android.exoplayer.DefaultLoadControl;
|
||||||
import com.google.android.exoplayer.LoadControl;
|
import com.google.android.exoplayer.LoadControl;
|
||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
@ -81,39 +80,24 @@ public class DashRendererBuilder implements RendererBuilder {
|
|||||||
private static final int SECURITY_LEVEL_1 = 1;
|
private static final int SECURITY_LEVEL_1 = 1;
|
||||||
private static final int SECURITY_LEVEL_3 = 3;
|
private static final int SECURITY_LEVEL_3 = 3;
|
||||||
|
|
||||||
/**
|
|
||||||
* Passthrough audio formats (encodings) in order of decreasing priority.
|
|
||||||
*/
|
|
||||||
private static final int[] PASSTHROUGH_ENCODINGS_PRIORITY =
|
|
||||||
new int[] {C.ENCODING_E_AC3, C.ENCODING_AC3};
|
|
||||||
/**
|
|
||||||
* Passthrough audio codecs corresponding to the encodings in
|
|
||||||
* {@link #PASSTHROUGH_ENCODINGS_PRIORITY}.
|
|
||||||
*/
|
|
||||||
private static final String[] PASSTHROUGH_CODECS_PRIORITY =
|
|
||||||
new String[] {"ec-3", "ac-3"};
|
|
||||||
|
|
||||||
private final Context context;
|
private final Context context;
|
||||||
private final String userAgent;
|
private final String userAgent;
|
||||||
private final String url;
|
private final String url;
|
||||||
private final MediaDrmCallback drmCallback;
|
private final MediaDrmCallback drmCallback;
|
||||||
private final AudioCapabilities audioCapabilities;
|
|
||||||
|
|
||||||
private AsyncRendererBuilder currentAsyncBuilder;
|
private AsyncRendererBuilder currentAsyncBuilder;
|
||||||
|
|
||||||
public DashRendererBuilder(Context context, String userAgent, String url,
|
public DashRendererBuilder(Context context, String userAgent, String url,
|
||||||
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities) {
|
MediaDrmCallback drmCallback) {
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.userAgent = userAgent;
|
this.userAgent = userAgent;
|
||||||
this.url = url;
|
this.url = url;
|
||||||
this.drmCallback = drmCallback;
|
this.drmCallback = drmCallback;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void buildRenderers(DemoPlayer player) {
|
public void buildRenderers(DemoPlayer player) {
|
||||||
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback,
|
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback, player);
|
||||||
audioCapabilities, player);
|
|
||||||
currentAsyncBuilder.init();
|
currentAsyncBuilder.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +115,6 @@ public class DashRendererBuilder implements RendererBuilder {
|
|||||||
private final Context context;
|
private final Context context;
|
||||||
private final String userAgent;
|
private final String userAgent;
|
||||||
private final MediaDrmCallback drmCallback;
|
private final MediaDrmCallback drmCallback;
|
||||||
private final AudioCapabilities audioCapabilities;
|
|
||||||
private final DemoPlayer player;
|
private final DemoPlayer player;
|
||||||
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
|
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
|
||||||
private final UriDataSource manifestDataSource;
|
private final UriDataSource manifestDataSource;
|
||||||
@ -141,11 +124,10 @@ public class DashRendererBuilder implements RendererBuilder {
|
|||||||
private long elapsedRealtimeOffset;
|
private long elapsedRealtimeOffset;
|
||||||
|
|
||||||
public AsyncRendererBuilder(Context context, String userAgent, String url,
|
public AsyncRendererBuilder(Context context, String userAgent, String url,
|
||||||
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities, DemoPlayer player) {
|
MediaDrmCallback drmCallback, DemoPlayer player) {
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.userAgent = userAgent;
|
this.userAgent = userAgent;
|
||||||
this.drmCallback = drmCallback;
|
this.drmCallback = drmCallback;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
this.player = player;
|
this.player = player;
|
||||||
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
|
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
|
||||||
manifestDataSource = new DefaultUriDataSource(context, userAgent);
|
manifestDataSource = new DefaultUriDataSource(context, userAgent);
|
||||||
@ -297,26 +279,6 @@ public class DashRendererBuilder implements RendererBuilder {
|
|||||||
elapsedRealtimeOffset, mainHandler, player));
|
elapsedRealtimeOffset, mainHandler, player));
|
||||||
codecs.add(format.codecs);
|
codecs.add(format.codecs);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioCapabilities != null) {
|
|
||||||
// If there are any passthrough audio encodings available, select the highest priority
|
|
||||||
// supported format (e.g. E-AC-3) and remove other tracks.
|
|
||||||
for (int i = 0; i < PASSTHROUGH_CODECS_PRIORITY.length; i++) {
|
|
||||||
String codec = PASSTHROUGH_CODECS_PRIORITY[i];
|
|
||||||
int encoding = PASSTHROUGH_ENCODINGS_PRIORITY[i];
|
|
||||||
if (codecs.indexOf(codec) == -1 || !audioCapabilities.supportsEncoding(encoding)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int j = audioRepresentations.size() - 1; j >= 0; j--) {
|
|
||||||
if (!audioRepresentations.get(j).format.codecs.equals(codec)) {
|
|
||||||
audioTrackNameList.remove(j);
|
|
||||||
audioChunkSourceList.remove(j);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the audio renderer.
|
// Build the audio renderer.
|
||||||
@ -335,7 +297,7 @@ public class DashRendererBuilder implements RendererBuilder {
|
|||||||
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
||||||
DemoPlayer.TYPE_AUDIO);
|
DemoPlayer.TYPE_AUDIO);
|
||||||
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
|
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
|
||||||
mainHandler, player);
|
mainHandler, player, AudioCapabilities.getCapabilities(context));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the text chunk sources.
|
// Build the text chunk sources.
|
||||||
|
@ -267,6 +267,10 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean getBackgrounded() {
|
||||||
|
return backgrounded;
|
||||||
|
}
|
||||||
|
|
||||||
public void setBackgrounded(boolean backgrounded) {
|
public void setBackgrounded(boolean backgrounded) {
|
||||||
if (this.backgrounded == backgrounded) {
|
if (this.backgrounded == backgrounded) {
|
||||||
return;
|
return;
|
||||||
|
@ -18,6 +18,7 @@ package com.google.android.exoplayer.demo.player;
|
|||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
|
import com.google.android.exoplayer.audio.AudioCapabilities;
|
||||||
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
|
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
|
||||||
import com.google.android.exoplayer.extractor.Extractor;
|
import com.google.android.exoplayer.extractor.Extractor;
|
||||||
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
|
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
|
||||||
@ -64,7 +65,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
|
|||||||
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
|
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
|
||||||
player, 50);
|
player, 50);
|
||||||
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
|
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
|
||||||
null, true, player.getMainHandler(), player);
|
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
|
||||||
TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
|
TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
|
||||||
player.getMainHandler().getLooper());
|
player.getMainHandler().getLooper());
|
||||||
|
|
||||||
|
@ -57,22 +57,18 @@ public class HlsRendererBuilder implements RendererBuilder {
|
|||||||
private final Context context;
|
private final Context context;
|
||||||
private final String userAgent;
|
private final String userAgent;
|
||||||
private final String url;
|
private final String url;
|
||||||
private final AudioCapabilities audioCapabilities;
|
|
||||||
|
|
||||||
private AsyncRendererBuilder currentAsyncBuilder;
|
private AsyncRendererBuilder currentAsyncBuilder;
|
||||||
|
|
||||||
public HlsRendererBuilder(Context context, String userAgent, String url,
|
public HlsRendererBuilder(Context context, String userAgent, String url) {
|
||||||
AudioCapabilities audioCapabilities) {
|
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.userAgent = userAgent;
|
this.userAgent = userAgent;
|
||||||
this.url = url;
|
this.url = url;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void buildRenderers(DemoPlayer player) {
|
public void buildRenderers(DemoPlayer player) {
|
||||||
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, audioCapabilities,
|
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, player);
|
||||||
player);
|
|
||||||
currentAsyncBuilder.init();
|
currentAsyncBuilder.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,18 +85,15 @@ public class HlsRendererBuilder implements RendererBuilder {
|
|||||||
private final Context context;
|
private final Context context;
|
||||||
private final String userAgent;
|
private final String userAgent;
|
||||||
private final String url;
|
private final String url;
|
||||||
private final AudioCapabilities audioCapabilities;
|
|
||||||
private final DemoPlayer player;
|
private final DemoPlayer player;
|
||||||
private final ManifestFetcher<HlsPlaylist> playlistFetcher;
|
private final ManifestFetcher<HlsPlaylist> playlistFetcher;
|
||||||
|
|
||||||
private boolean canceled;
|
private boolean canceled;
|
||||||
|
|
||||||
public AsyncRendererBuilder(Context context, String userAgent, String url,
|
public AsyncRendererBuilder(Context context, String userAgent, String url, DemoPlayer player) {
|
||||||
AudioCapabilities audioCapabilities, DemoPlayer player) {
|
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.userAgent = userAgent;
|
this.userAgent = userAgent;
|
||||||
this.url = url;
|
this.url = url;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
this.player = player;
|
this.player = player;
|
||||||
HlsPlaylistParser parser = new HlsPlaylistParser();
|
HlsPlaylistParser parser = new HlsPlaylistParser();
|
||||||
playlistFetcher = new ManifestFetcher<>(url, new DefaultUriDataSource(context, userAgent),
|
playlistFetcher = new ManifestFetcher<>(url, new DefaultUriDataSource(context, userAgent),
|
||||||
@ -152,12 +145,13 @@ public class HlsRendererBuilder implements RendererBuilder {
|
|||||||
|
|
||||||
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||||
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
|
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
|
||||||
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
|
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
|
||||||
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
|
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
|
||||||
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
|
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
|
||||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
||||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
|
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
|
||||||
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
|
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
|
||||||
|
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
|
||||||
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
|
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
|
||||||
sampleSource, new Id3Parser(), player, mainHandler.getLooper());
|
sampleSource, new Id3Parser(), player, mainHandler.getLooper());
|
||||||
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
|
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
|
||||||
|
@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
|||||||
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
|
import com.google.android.exoplayer.audio.AudioCapabilities;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSource;
|
import com.google.android.exoplayer.chunk.ChunkSource;
|
||||||
import com.google.android.exoplayer.chunk.FormatEvaluator;
|
import com.google.android.exoplayer.chunk.FormatEvaluator;
|
||||||
@ -230,7 +231,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
|
|||||||
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
||||||
DemoPlayer.TYPE_AUDIO);
|
DemoPlayer.TYPE_AUDIO);
|
||||||
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
|
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
|
||||||
mainHandler, player);
|
mainHandler, player, AudioCapabilities.getCapabilities(context));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the text renderer.
|
// Build the text renderer.
|
||||||
|
@ -370,7 +370,7 @@ public final class LibopusAudioTrackRenderer extends SampleSourceTrackRenderer
|
|||||||
int result = readSource(positionUs, formatHolder, null, false);
|
int result = readSource(positionUs, formatHolder, null, false);
|
||||||
if (result == SampleSource.FORMAT_READ) {
|
if (result == SampleSource.FORMAT_READ) {
|
||||||
format = formatHolder.format;
|
format = formatHolder.format;
|
||||||
audioTrack.reconfigure(format.getFrameworkMediaFormatV16());
|
audioTrack.reconfigure(format.getFrameworkMediaFormatV16(), false);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
package com.google.android.exoplayer;
|
package com.google.android.exoplayer;
|
||||||
|
|
||||||
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
|
import com.google.android.exoplayer.audio.AudioCapabilities;
|
||||||
import com.google.android.exoplayer.audio.AudioTrack;
|
import com.google.android.exoplayer.audio.AudioTrack;
|
||||||
import com.google.android.exoplayer.drm.DrmSessionManager;
|
import com.google.android.exoplayer.drm.DrmSessionManager;
|
||||||
import com.google.android.exoplayer.util.MimeTypes;
|
import com.google.android.exoplayer.util.MimeTypes;
|
||||||
@ -70,6 +71,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
|
|||||||
private final EventListener eventListener;
|
private final EventListener eventListener;
|
||||||
private final AudioTrack audioTrack;
|
private final AudioTrack audioTrack;
|
||||||
|
|
||||||
|
private android.media.MediaFormat passthroughMediaFormat;
|
||||||
private int audioSessionId;
|
private int audioSessionId;
|
||||||
private long currentPositionUs;
|
private long currentPositionUs;
|
||||||
private boolean allowPositionDiscontinuity;
|
private boolean allowPositionDiscontinuity;
|
||||||
@ -122,19 +124,51 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
|
|||||||
*/
|
*/
|
||||||
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
||||||
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener) {
|
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener) {
|
||||||
|
this(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener,
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param source The upstream source from which the renderer obtains samples.
|
||||||
|
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
|
||||||
|
* content is not required.
|
||||||
|
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
|
||||||
|
* For example a media file may start with a short clear region so as to allow playback to
|
||||||
|
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
|
||||||
|
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
|
||||||
|
* has obtained the keys necessary to decrypt encrypted regions of the media.
|
||||||
|
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
|
||||||
|
* null if delivery of events is not required.
|
||||||
|
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||||
|
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
|
||||||
|
* default capabilities (no encoded audio passthrough support) should be assumed.
|
||||||
|
*/
|
||||||
|
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
||||||
|
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
|
||||||
|
AudioCapabilities audioCapabilities) {
|
||||||
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
|
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
|
||||||
this.eventListener = eventListener;
|
this.eventListener = eventListener;
|
||||||
this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
|
this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
|
||||||
this.audioTrack = new AudioTrack();
|
this.audioTrack = new AudioTrack(audioCapabilities);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected DecoderInfo getDecoderInfo(String mimeType, boolean requiresSecureDecoder)
|
protected DecoderInfo getDecoderInfo(String mimeType, boolean requiresSecureDecoder)
|
||||||
throws DecoderQueryException {
|
throws DecoderQueryException {
|
||||||
if (MimeTypes.isPassthroughAudio(mimeType)) {
|
return allowPassthrough(mimeType) ? new DecoderInfo(RAW_DECODER_NAME, true)
|
||||||
return new DecoderInfo(RAW_DECODER_NAME, true);
|
: super.getDecoderInfo(mimeType, requiresSecureDecoder);
|
||||||
}
|
}
|
||||||
return super.getDecoderInfo(mimeType, requiresSecureDecoder);
|
|
||||||
|
/**
|
||||||
|
* Returns whether encoded audio passthrough may be used for playing back the input format. This
|
||||||
|
* implementation returns true if the {@link AudioTrack}'s audio capabilities indicate that
|
||||||
|
* passthrough is supported.
|
||||||
|
*
|
||||||
|
* @param mimeType The type of input media.
|
||||||
|
* @return True if passthrough playback should be used. False otherwise.
|
||||||
|
*/
|
||||||
|
protected boolean allowPassthrough(String mimeType) {
|
||||||
|
return audioTrack.isPassthroughSupported(mimeType);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -146,8 +180,10 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
|
|||||||
format.setString(android.media.MediaFormat.KEY_MIME, MimeTypes.AUDIO_RAW);
|
format.setString(android.media.MediaFormat.KEY_MIME, MimeTypes.AUDIO_RAW);
|
||||||
codec.configure(format, null, crypto, 0);
|
codec.configure(format, null, crypto, 0);
|
||||||
format.setString(android.media.MediaFormat.KEY_MIME, mimeType);
|
format.setString(android.media.MediaFormat.KEY_MIME, mimeType);
|
||||||
|
passthroughMediaFormat = format;
|
||||||
} else {
|
} else {
|
||||||
codec.configure(format, null, crypto, 0);
|
codec.configure(format, null, crypto, 0);
|
||||||
|
passthroughMediaFormat = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -169,13 +205,9 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onOutputFormatChanged(MediaFormat inputFormat,
|
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
|
||||||
android.media.MediaFormat outputFormat) {
|
boolean passthrough = passthroughMediaFormat != null;
|
||||||
if (MimeTypes.isPassthroughAudio(inputFormat.mimeType)) {
|
audioTrack.reconfigure(passthrough ? passthroughMediaFormat : outputFormat, passthrough);
|
||||||
audioTrack.reconfigure(inputFormat.getFrameworkMediaFormatV16());
|
|
||||||
} else {
|
|
||||||
audioTrack.reconfigure(outputFormat);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -697,11 +697,9 @@ public abstract class MediaCodecTrackRenderer extends SampleSourceTrackRenderer
|
|||||||
* <p>
|
* <p>
|
||||||
* The default implementation is a no-op.
|
* The default implementation is a no-op.
|
||||||
*
|
*
|
||||||
* @param inputFormat The format of media input to the codec.
|
|
||||||
* @param outputFormat The new output format.
|
* @param outputFormat The new output format.
|
||||||
*/
|
*/
|
||||||
protected void onOutputFormatChanged(MediaFormat inputFormat,
|
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
|
||||||
android.media.MediaFormat outputFormat) {
|
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -775,7 +773,7 @@ public abstract class MediaCodecTrackRenderer extends SampleSourceTrackRenderer
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||||
onOutputFormatChanged(format, codec.getOutputFormat());
|
onOutputFormatChanged(codec.getOutputFormat());
|
||||||
codecCounters.outputFormatChangedCount++;
|
codecCounters.outputFormatChangedCount++;
|
||||||
return true;
|
return true;
|
||||||
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||||
|
@ -383,8 +383,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onOutputFormatChanged(MediaFormat inputFormat,
|
protected void onOutputFormatChanged(android.media.MediaFormat outputFormat) {
|
||||||
android.media.MediaFormat outputFormat) {
|
|
||||||
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
|
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
|
||||||
&& outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM)
|
&& outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM)
|
||||||
&& outputFormat.containsKey(KEY_CROP_TOP);
|
&& outputFormat.containsKey(KEY_CROP_TOP);
|
||||||
|
@ -15,7 +15,13 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.audio;
|
package com.google.android.exoplayer.audio;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
import android.annotation.TargetApi;
|
import android.annotation.TargetApi;
|
||||||
|
import android.content.Context;
|
||||||
|
import android.content.Intent;
|
||||||
|
import android.content.IntentFilter;
|
||||||
|
import android.media.AudioFormat;
|
||||||
|
import android.media.AudioManager;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
@ -25,6 +31,34 @@ import java.util.Arrays;
|
|||||||
@TargetApi(21)
|
@TargetApi(21)
|
||||||
public final class AudioCapabilities {
|
public final class AudioCapabilities {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default to stereo PCM on SDK < 21 and when HDMI is unplugged.
|
||||||
|
*/
|
||||||
|
private static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
|
||||||
|
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the current audio capabilities. Note that to be notified when audio capabilities change,
|
||||||
|
* you can create an instance of {@link AudioCapabilitiesReceiver} and register a listener.
|
||||||
|
*
|
||||||
|
* @param context Context for receiving the initial broadcast.
|
||||||
|
* @return Current audio capabilities for the device.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("InlinedApi")
|
||||||
|
public static AudioCapabilities getCapabilities(Context context) {
|
||||||
|
return getCapabilities(
|
||||||
|
context.registerReceiver(null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressLint("InlinedApi")
|
||||||
|
/* package */ static AudioCapabilities getCapabilities(Intent intent) {
|
||||||
|
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
|
||||||
|
return DEFAULT_AUDIO_CAPABILITIES;
|
||||||
|
}
|
||||||
|
return new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
|
||||||
|
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0));
|
||||||
|
}
|
||||||
|
|
||||||
private final int[] supportedEncodings;
|
private final int[] supportedEncodings;
|
||||||
private final int maxChannelCount;
|
private final int maxChannelCount;
|
||||||
|
|
||||||
@ -36,7 +70,7 @@ public final class AudioCapabilities {
|
|||||||
* {@code ENCODING_*} constants.
|
* {@code ENCODING_*} constants.
|
||||||
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
|
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
|
||||||
*/
|
*/
|
||||||
public AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
|
/* package */ AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
|
||||||
if (supportedEncodings != null) {
|
if (supportedEncodings != null) {
|
||||||
this.supportedEncodings = Arrays.copyOf(supportedEncodings, supportedEncodings.length);
|
this.supportedEncodings = Arrays.copyOf(supportedEncodings, supportedEncodings.length);
|
||||||
Arrays.sort(this.supportedEncodings);
|
Arrays.sort(this.supportedEncodings);
|
||||||
|
@ -18,41 +18,42 @@ package com.google.android.exoplayer.audio;
|
|||||||
import com.google.android.exoplayer.util.Assertions;
|
import com.google.android.exoplayer.util.Assertions;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.annotation.TargetApi;
|
|
||||||
import android.content.BroadcastReceiver;
|
import android.content.BroadcastReceiver;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.Intent;
|
import android.content.Intent;
|
||||||
import android.content.IntentFilter;
|
import android.content.IntentFilter;
|
||||||
import android.media.AudioFormat;
|
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
|
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
|
||||||
* receiving notifications, and {@link #unregister} to stop.
|
* (or resume) receiving notifications, and {@link #unregister} to stop.
|
||||||
*/
|
*/
|
||||||
public final class AudioCapabilitiesReceiver {
|
public final class AudioCapabilitiesReceiver {
|
||||||
|
|
||||||
/** Listener notified when audio capabilities change. */
|
/**
|
||||||
|
* Listener notified when audio capabilities change.
|
||||||
|
*/
|
||||||
public interface Listener {
|
public interface Listener {
|
||||||
|
|
||||||
/** Called when the audio capabilities change. */
|
/**
|
||||||
|
* Called when the audio capabilities change.
|
||||||
|
*
|
||||||
|
* @param audioCapabilities Current audio capabilities for the device.
|
||||||
|
*/
|
||||||
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
|
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Default to stereo PCM on SDK < 21 and when HDMI is unplugged. */
|
|
||||||
private static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
|
|
||||||
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
|
|
||||||
|
|
||||||
private final Context context;
|
private final Context context;
|
||||||
private final Listener listener;
|
private final Listener listener;
|
||||||
private final BroadcastReceiver receiver;
|
private final BroadcastReceiver receiver;
|
||||||
|
|
||||||
|
/* package */ AudioCapabilities audioCapabilities;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new audio capabilities receiver.
|
* Constructs a new audio capabilities receiver.
|
||||||
*
|
*
|
||||||
* @param context Application context for registering to receive broadcasts.
|
* @param context Context for registering to receive broadcasts.
|
||||||
* @param listener Listener to notify when audio capabilities change.
|
* @param listener Listener to notify when audio capabilities change.
|
||||||
*/
|
*/
|
||||||
public AudioCapabilitiesReceiver(Context context, Listener listener) {
|
public AudioCapabilitiesReceiver(Context context, Listener listener) {
|
||||||
@ -62,41 +63,40 @@ public final class AudioCapabilitiesReceiver {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registers to notify the listener when audio capabilities change. The listener will immediately
|
* Registers to notify the listener when audio capabilities change. The current capabilities will
|
||||||
* receive the current audio capabilities. It is important to call {@link #unregister} so that
|
* be returned. It is important to call {@link #unregister} so that the listener can be garbage
|
||||||
* the listener can be garbage collected.
|
* collected.
|
||||||
|
*
|
||||||
|
* @return Current audio capabilities for the device.
|
||||||
*/
|
*/
|
||||||
@TargetApi(21)
|
@SuppressWarnings("InlinedApi")
|
||||||
public void register() {
|
public AudioCapabilities register() {
|
||||||
Intent stickyIntent = receiver == null ? null
|
Intent stickyIntent = receiver == null ? null
|
||||||
: context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
|
: context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
|
||||||
listener.onAudioCapabilitiesChanged(getCapabilities(stickyIntent));
|
audioCapabilities = AudioCapabilities.getCapabilities(stickyIntent);
|
||||||
|
return audioCapabilities;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Unregisters to stop notifying the listener when audio capabilities change. */
|
/**
|
||||||
|
* Unregisters to stop notifying the listener when audio capabilities change.
|
||||||
|
*/
|
||||||
public void unregister() {
|
public void unregister() {
|
||||||
if (receiver != null) {
|
if (receiver != null) {
|
||||||
context.unregisterReceiver(receiver);
|
context.unregisterReceiver(receiver);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressLint("InlinedApi")
|
|
||||||
/* package */ AudioCapabilities getCapabilities(Intent intent) {
|
|
||||||
if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) {
|
|
||||||
return DEFAULT_AUDIO_CAPABILITIES;
|
|
||||||
}
|
|
||||||
return new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
|
|
||||||
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
|
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onReceive(Context context, Intent intent) {
|
public void onReceive(Context context, Intent intent) {
|
||||||
if (isInitialStickyBroadcast()) {
|
if (!isInitialStickyBroadcast()) {
|
||||||
return;
|
AudioCapabilities newAudioCapabilities = AudioCapabilities.getCapabilities(intent);
|
||||||
|
if (!newAudioCapabilities.equals(audioCapabilities)) {
|
||||||
|
audioCapabilities = newAudioCapabilities;
|
||||||
|
listener.onAudioCapabilitiesChanged(newAudioCapabilities);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
listener.onAudioCapabilitiesChanged(getCapabilities(intent));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -134,8 +134,10 @@ public final class AudioTrack {
|
|||||||
*/
|
*/
|
||||||
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
|
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
|
||||||
|
|
||||||
/** Value for ac3Bitrate before the bitrate has been calculated. */
|
/**
|
||||||
private static final int UNKNOWN_AC3_BITRATE = 0;
|
* Value for {@link #passthroughBitrate} before the bitrate has been calculated.
|
||||||
|
*/
|
||||||
|
private static final int UNKNOWN_BITRATE = 0;
|
||||||
|
|
||||||
private static final int START_NOT_SET = 0;
|
private static final int START_NOT_SET = 0;
|
||||||
private static final int START_IN_SYNC = 1;
|
private static final int START_IN_SYNC = 1;
|
||||||
@ -162,6 +164,7 @@ public final class AudioTrack {
|
|||||||
*/
|
*/
|
||||||
public static boolean failOnSpuriousAudioTimestamp = false;
|
public static boolean failOnSpuriousAudioTimestamp = false;
|
||||||
|
|
||||||
|
private final AudioCapabilities audioCapabilities;
|
||||||
private final ConditionVariable releasingConditionVariable;
|
private final ConditionVariable releasingConditionVariable;
|
||||||
private final long[] playheadOffsets;
|
private final long[] playheadOffsets;
|
||||||
private final AudioTrackUtil audioTrackUtil;
|
private final AudioTrackUtil audioTrackUtil;
|
||||||
@ -196,12 +199,25 @@ public final class AudioTrack {
|
|||||||
private int temporaryBufferOffset;
|
private int temporaryBufferOffset;
|
||||||
private int temporaryBufferSize;
|
private int temporaryBufferSize;
|
||||||
|
|
||||||
private boolean isAc3;
|
/**
|
||||||
|
* Bitrate measured in kilobits per second, if {@link #isPassthrough()} returns true.
|
||||||
/** Bitrate measured in kilobits per second, if {@link #isAc3} is true. */
|
*/
|
||||||
private int ac3Bitrate;
|
private int passthroughBitrate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an audio track with default audio capabilities (no encoded audio passthrough support).
|
||||||
|
*/
|
||||||
public AudioTrack() {
|
public AudioTrack() {
|
||||||
|
this(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an audio track using the specified audio capabilities.
|
||||||
|
*
|
||||||
|
* @param audioCapabilities The current audio playback capabilities.
|
||||||
|
*/
|
||||||
|
public AudioTrack(AudioCapabilities audioCapabilities) {
|
||||||
|
this.audioCapabilities = audioCapabilities;
|
||||||
releasingConditionVariable = new ConditionVariable(true);
|
releasingConditionVariable = new ConditionVariable(true);
|
||||||
if (Util.SDK_INT >= 18) {
|
if (Util.SDK_INT >= 18) {
|
||||||
try {
|
try {
|
||||||
@ -221,6 +237,15 @@ public final class AudioTrack {
|
|||||||
startMediaTimeState = START_NOT_SET;
|
startMediaTimeState = START_NOT_SET;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether it is possible to play back input audio in the specified format using encoded
|
||||||
|
* audio passthrough.
|
||||||
|
*/
|
||||||
|
public boolean isPassthroughSupported(String mimeType) {
|
||||||
|
return audioCapabilities != null
|
||||||
|
&& audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns whether the audio track has been successfully initialized via {@link #initialize} and
|
* Returns whether the audio track has been successfully initialized via {@link #initialize} and
|
||||||
* not yet {@link #reset}.
|
* not yet {@link #reset}.
|
||||||
@ -331,7 +356,7 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
audioTrackUtil.reconfigure(audioTrack, isAc3);
|
audioTrackUtil.reconfigure(audioTrack, isPassthrough());
|
||||||
setVolume(volume);
|
setVolume(volume);
|
||||||
|
|
||||||
return sessionId;
|
return sessionId;
|
||||||
@ -340,19 +365,23 @@ public final class AudioTrack {
|
|||||||
/**
|
/**
|
||||||
* Reconfigures the audio track to play back media in {@code format}, inferring a buffer size from
|
* Reconfigures the audio track to play back media in {@code format}, inferring a buffer size from
|
||||||
* the format.
|
* the format.
|
||||||
|
*
|
||||||
|
* @param format Specifies the channel count and sample rate to play back.
|
||||||
|
* @param passthrough Whether to play back using a passthrough encoding.
|
||||||
*/
|
*/
|
||||||
public void reconfigure(MediaFormat format) {
|
public void reconfigure(MediaFormat format, boolean passthrough) {
|
||||||
reconfigure(format, 0);
|
reconfigure(format, passthrough, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reconfigures the audio track to play back media in {@code format}.
|
* Reconfigures the audio track to play back media in {@code format}.
|
||||||
*
|
*
|
||||||
* @param format Specifies the channel count and sample rate to play back.
|
* @param format Specifies the channel count and sample rate to play back.
|
||||||
|
* @param passthrough Whether to playback using a passthrough encoding.
|
||||||
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to use a
|
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to use a
|
||||||
* size inferred from the format.
|
* size inferred from the format.
|
||||||
*/
|
*/
|
||||||
public void reconfigure(MediaFormat format, int specifiedBufferSize) {
|
public void reconfigure(MediaFormat format, boolean passthrough, int specifiedBufferSize) {
|
||||||
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||||
int channelConfig;
|
int channelConfig;
|
||||||
switch (channelCount) {
|
switch (channelCount) {
|
||||||
@ -371,16 +400,12 @@ public final class AudioTrack {
|
|||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
|
throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
||||||
String mimeType = format.getString(MediaFormat.KEY_MIME);
|
String mimeType = format.getString(MediaFormat.KEY_MIME);
|
||||||
|
int encoding = passthrough ? getEncodingForMimeType(mimeType) : AudioFormat.ENCODING_PCM_16BIT;
|
||||||
// TODO: Does channelConfig determine channelCount?
|
|
||||||
int encoding = MimeTypes.getEncodingForMimeType(mimeType);
|
|
||||||
boolean isAc3 = encoding == C.ENCODING_AC3 || encoding == C.ENCODING_E_AC3;
|
|
||||||
if (isInitialized() && this.sampleRate == sampleRate && this.channelConfig == channelConfig
|
if (isInitialized() && this.sampleRate == sampleRate && this.channelConfig == channelConfig
|
||||||
&& !this.isAc3 && !isAc3) {
|
&& this.encoding == encoding) {
|
||||||
// We already have an existing audio track with the correct sample rate and channel config.
|
// We already have an audio track with the correct sample rate, encoding and channel config.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -389,8 +414,7 @@ public final class AudioTrack {
|
|||||||
this.encoding = encoding;
|
this.encoding = encoding;
|
||||||
this.sampleRate = sampleRate;
|
this.sampleRate = sampleRate;
|
||||||
this.channelConfig = channelConfig;
|
this.channelConfig = channelConfig;
|
||||||
this.isAc3 = isAc3;
|
passthroughBitrate = UNKNOWN_BITRATE;
|
||||||
ac3Bitrate = UNKNOWN_AC3_BITRATE; // Calculated on receiving the first buffer if isAc3 is true.
|
|
||||||
frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
|
frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
|
||||||
minBufferSize = android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding);
|
minBufferSize = android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding);
|
||||||
Assertions.checkState(minBufferSize != android.media.AudioTrack.ERROR_BAD_VALUE);
|
Assertions.checkState(minBufferSize != android.media.AudioTrack.ERROR_BAD_VALUE);
|
||||||
@ -446,7 +470,7 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Workarounds for issues with AC-3 passthrough AudioTracks on API versions 21/22:
|
// Workarounds for issues with AC-3 passthrough AudioTracks on API versions 21/22:
|
||||||
if (Util.SDK_INT <= 22 && isAc3) {
|
if (Util.SDK_INT <= 22 && isPassthrough()) {
|
||||||
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
|
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
|
||||||
// buffer empties. See [Internal: b/18899620].
|
// buffer empties. See [Internal: b/18899620].
|
||||||
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PAUSED) {
|
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PAUSED) {
|
||||||
@ -464,8 +488,8 @@ public final class AudioTrack {
|
|||||||
|
|
||||||
int result = 0;
|
int result = 0;
|
||||||
if (temporaryBufferSize == 0) {
|
if (temporaryBufferSize == 0) {
|
||||||
if (isAc3 && ac3Bitrate == UNKNOWN_AC3_BITRATE) {
|
if (isPassthrough() && passthroughBitrate == UNKNOWN_BITRATE) {
|
||||||
ac3Bitrate = Ac3Util.getBitrate(size, sampleRate);
|
passthroughBitrate = Ac3Util.getBitrate(size, sampleRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is the first time we've seen this {@code buffer}.
|
// This is the first time we've seen this {@code buffer}.
|
||||||
@ -673,9 +697,10 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack, as the
|
// Don't sample the timestamp and latency if this is a passthrough AudioTrack, as the returned
|
||||||
// returned values cause audio/video synchronization to be incorrect.
|
// values cause audio/video synchronization to be incorrect.
|
||||||
if (!isAc3 && systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
if (!isPassthrough()
|
||||||
|
&& systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
||||||
audioTimestampSet = audioTrackUtil.updateTimestamp();
|
audioTimestampSet = audioTrackUtil.updateTimestamp();
|
||||||
if (audioTimestampSet) {
|
if (audioTimestampSet) {
|
||||||
// Perform sanity checks on the timestamp.
|
// Perform sanity checks on the timestamp.
|
||||||
@ -755,9 +780,9 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private long bytesToFrames(long byteCount) {
|
private long bytesToFrames(long byteCount) {
|
||||||
if (isAc3) {
|
if (isPassthrough()) {
|
||||||
return
|
return passthroughBitrate == UNKNOWN_BITRATE
|
||||||
ac3Bitrate == UNKNOWN_AC3_BITRATE ? 0L : byteCount * 8 * sampleRate / (1000 * ac3Bitrate);
|
? 0L : byteCount * 8 * sampleRate / (1000 * passthroughBitrate);
|
||||||
} else {
|
} else {
|
||||||
return byteCount / frameSize;
|
return byteCount / frameSize;
|
||||||
}
|
}
|
||||||
@ -780,6 +805,20 @@ public final class AudioTrack {
|
|||||||
lastTimestampSampleTimeUs = 0;
|
lastTimestampSampleTimeUs = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isPassthrough() {
|
||||||
|
return encoding == C.ENCODING_AC3 || encoding == C.ENCODING_E_AC3;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int getEncodingForMimeType(String mimeType) {
|
||||||
|
if (MimeTypes.AUDIO_AC3.equals(mimeType)) {
|
||||||
|
return C.ENCODING_AC3;
|
||||||
|
}
|
||||||
|
if (MimeTypes.AUDIO_EC3.equals(mimeType)) {
|
||||||
|
return C.ENCODING_E_AC3;
|
||||||
|
}
|
||||||
|
return AudioFormat.ENCODING_INVALID;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
|
* Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
|
||||||
*/
|
*/
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
package com.google.android.exoplayer.extractor.ts;
|
package com.google.android.exoplayer.extractor.ts;
|
||||||
|
|
||||||
import com.google.android.exoplayer.C;
|
import com.google.android.exoplayer.C;
|
||||||
import com.google.android.exoplayer.audio.AudioCapabilities;
|
|
||||||
import com.google.android.exoplayer.extractor.Extractor;
|
import com.google.android.exoplayer.extractor.Extractor;
|
||||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||||
import com.google.android.exoplayer.extractor.ExtractorOutput;
|
import com.google.android.exoplayer.extractor.ExtractorOutput;
|
||||||
@ -59,7 +58,6 @@ public final class TsExtractor implements Extractor {
|
|||||||
private final boolean idrKeyframesOnly;
|
private final boolean idrKeyframesOnly;
|
||||||
private final long firstSampleTimestampUs;
|
private final long firstSampleTimestampUs;
|
||||||
/* package */ final SparseBooleanArray streamTypes;
|
/* package */ final SparseBooleanArray streamTypes;
|
||||||
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
|
|
||||||
/* package */ final SparseArray<TsPayloadReader> tsPayloadReaders; // Indexed by pid
|
/* package */ final SparseArray<TsPayloadReader> tsPayloadReaders; // Indexed by pid
|
||||||
|
|
||||||
// Accessed only by the loading thread.
|
// Accessed only by the loading thread.
|
||||||
@ -73,21 +71,15 @@ public final class TsExtractor implements Extractor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public TsExtractor(long firstSampleTimestampUs) {
|
public TsExtractor(long firstSampleTimestampUs) {
|
||||||
this(firstSampleTimestampUs, null);
|
this(firstSampleTimestampUs, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
|
public TsExtractor(long firstSampleTimestampUs, boolean idrKeyframesOnly) {
|
||||||
this(firstSampleTimestampUs, audioCapabilities, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities,
|
|
||||||
boolean idrKeyframesOnly) {
|
|
||||||
this.firstSampleTimestampUs = firstSampleTimestampUs;
|
this.firstSampleTimestampUs = firstSampleTimestampUs;
|
||||||
this.idrKeyframesOnly = idrKeyframesOnly;
|
this.idrKeyframesOnly = idrKeyframesOnly;
|
||||||
tsScratch = new ParsableBitArray(new byte[3]);
|
tsScratch = new ParsableBitArray(new byte[3]);
|
||||||
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
|
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
|
||||||
streamTypes = new SparseBooleanArray();
|
streamTypes = new SparseBooleanArray();
|
||||||
allowedPassthroughStreamTypes = getPassthroughStreamTypes(audioCapabilities);
|
|
||||||
tsPayloadReaders = new SparseArray<>();
|
tsPayloadReaders = new SparseArray<>();
|
||||||
tsPayloadReaders.put(TS_PAT_PID, new PatReader());
|
tsPayloadReaders.put(TS_PAT_PID, new PatReader());
|
||||||
lastPts = Long.MIN_VALUE;
|
lastPts = Long.MIN_VALUE;
|
||||||
@ -195,24 +187,6 @@ public final class TsExtractor implements Extractor {
|
|||||||
return timeUs + timestampOffsetUs;
|
return timeUs + timestampOffsetUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a sparse boolean array of stream types that can be played back based on
|
|
||||||
* {@code audioCapabilities}.
|
|
||||||
*/
|
|
||||||
private static SparseBooleanArray getPassthroughStreamTypes(AudioCapabilities audioCapabilities) {
|
|
||||||
SparseBooleanArray streamTypes = new SparseBooleanArray();
|
|
||||||
if (audioCapabilities != null) {
|
|
||||||
if (audioCapabilities.supportsEncoding(C.ENCODING_AC3)) {
|
|
||||||
streamTypes.put(TS_STREAM_TYPE_ATSC_AC3, true);
|
|
||||||
}
|
|
||||||
if (audioCapabilities.supportsEncoding(C.ENCODING_E_AC3)) {
|
|
||||||
// TODO: Uncomment when Ac3Reader supports enhanced AC-3.
|
|
||||||
// streamTypes.put(TS_STREAM_TYPE_ATSC_E_AC3, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return streamTypes;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses TS packet payload data.
|
* Parses TS packet payload data.
|
||||||
*/
|
*/
|
||||||
@ -365,9 +339,6 @@ public final class TsExtractor implements Extractor {
|
|||||||
break;
|
break;
|
||||||
case TS_STREAM_TYPE_ATSC_E_AC3:
|
case TS_STREAM_TYPE_ATSC_E_AC3:
|
||||||
case TS_STREAM_TYPE_ATSC_AC3:
|
case TS_STREAM_TYPE_ATSC_AC3:
|
||||||
if (!allowedPassthroughStreamTypes.get(streamType)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
pesPayloadReader = new Ac3Reader(output.track(streamType));
|
pesPayloadReader = new Ac3Reader(output.track(streamType));
|
||||||
break;
|
break;
|
||||||
case TS_STREAM_TYPE_H264:
|
case TS_STREAM_TYPE_H264:
|
||||||
|
@ -17,7 +17,6 @@ package com.google.android.exoplayer.hls;
|
|||||||
|
|
||||||
import com.google.android.exoplayer.C;
|
import com.google.android.exoplayer.C;
|
||||||
import com.google.android.exoplayer.MediaFormat;
|
import com.google.android.exoplayer.MediaFormat;
|
||||||
import com.google.android.exoplayer.audio.AudioCapabilities;
|
|
||||||
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
|
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
|
||||||
import com.google.android.exoplayer.chunk.Chunk;
|
import com.google.android.exoplayer.chunk.Chunk;
|
||||||
import com.google.android.exoplayer.chunk.DataChunk;
|
import com.google.android.exoplayer.chunk.DataChunk;
|
||||||
@ -124,7 +123,6 @@ public class HlsChunkSource {
|
|||||||
private final int maxHeight;
|
private final int maxHeight;
|
||||||
private final long minBufferDurationToSwitchUpUs;
|
private final long minBufferDurationToSwitchUpUs;
|
||||||
private final long maxBufferDurationToSwitchDownUs;
|
private final long maxBufferDurationToSwitchDownUs;
|
||||||
private final AudioCapabilities audioCapabilities;
|
|
||||||
|
|
||||||
// A list of variants considered during playback, ordered by decreasing bandwidth. The following
|
// A list of variants considered during playback, ordered by decreasing bandwidth. The following
|
||||||
// three arrays are of the same length and are ordered in the same way (i.e. variantPlaylists[i],
|
// three arrays are of the same length and are ordered in the same way (i.e. variantPlaylists[i],
|
||||||
@ -147,11 +145,9 @@ public class HlsChunkSource {
|
|||||||
private byte[] encryptionIv;
|
private byte[] encryptionIv;
|
||||||
|
|
||||||
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
|
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
|
||||||
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
|
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode) {
|
||||||
AudioCapabilities audioCapabilities) {
|
|
||||||
this(dataSource, playlistUrl, playlist, bandwidthMeter, variantIndices, adaptiveMode,
|
this(dataSource, playlistUrl, playlist, bandwidthMeter, variantIndices, adaptiveMode,
|
||||||
DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS,
|
DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
|
||||||
audioCapabilities);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -169,17 +165,13 @@ public class HlsChunkSource {
|
|||||||
* for a switch to a higher quality variant to be considered.
|
* for a switch to a higher quality variant to be considered.
|
||||||
* @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered
|
* @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered
|
||||||
* for a switch to a lower quality variant to be considered.
|
* for a switch to a lower quality variant to be considered.
|
||||||
* @param audioCapabilities The audio capabilities for playback on this device, or {@code null} if
|
|
||||||
* the default capabilities should be assumed.
|
|
||||||
*/
|
*/
|
||||||
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
|
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
|
||||||
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
|
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
|
||||||
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs,
|
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) {
|
||||||
AudioCapabilities audioCapabilities) {
|
|
||||||
this.dataSource = dataSource;
|
this.dataSource = dataSource;
|
||||||
this.bandwidthMeter = bandwidthMeter;
|
this.bandwidthMeter = bandwidthMeter;
|
||||||
this.adaptiveMode = adaptiveMode;
|
this.adaptiveMode = adaptiveMode;
|
||||||
this.audioCapabilities = audioCapabilities;
|
|
||||||
minBufferDurationToSwitchUpUs = minBufferDurationToSwitchUpMs * 1000;
|
minBufferDurationToSwitchUpUs = minBufferDurationToSwitchUpMs * 1000;
|
||||||
maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000;
|
maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000;
|
||||||
baseUri = playlist.baseUri;
|
baseUri = playlist.baseUri;
|
||||||
@ -356,8 +348,7 @@ public class HlsChunkSource {
|
|||||||
if (previousTsChunk == null || segment.discontinuity || !format.equals(previousTsChunk.format)
|
if (previousTsChunk == null || segment.discontinuity || !format.equals(previousTsChunk.format)
|
||||||
|| liveDiscontinuity) {
|
|| liveDiscontinuity) {
|
||||||
Extractor extractor = chunkUri.getLastPathSegment().endsWith(AAC_FILE_EXTENSION)
|
Extractor extractor = chunkUri.getLastPathSegment().endsWith(AAC_FILE_EXTENSION)
|
||||||
? new AdtsExtractor(startTimeUs)
|
? new AdtsExtractor(startTimeUs) : new TsExtractor(startTimeUs);
|
||||||
: new TsExtractor(startTimeUs, audioCapabilities);
|
|
||||||
extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor,
|
extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor,
|
||||||
switchingVariantSpliced);
|
switchingVariantSpliced);
|
||||||
} else {
|
} else {
|
||||||
|
@ -15,11 +15,6 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.util;
|
package com.google.android.exoplayer.util;
|
||||||
|
|
||||||
import com.google.android.exoplayer.C;
|
|
||||||
import com.google.android.exoplayer.audio.AudioCapabilities;
|
|
||||||
|
|
||||||
import android.media.AudioFormat;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines common MIME types and helper methods.
|
* Defines common MIME types and helper methods.
|
||||||
*/
|
*/
|
||||||
@ -128,37 +123,4 @@ public final class MimeTypes {
|
|||||||
return mimeType.equals(APPLICATION_TTML);
|
return mimeType.equals(APPLICATION_TTML);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the output audio encoding that will result from processing input in {@code mimeType}.
|
|
||||||
* For non-passthrough audio formats, this is always {@link AudioFormat#ENCODING_PCM_16BIT}. For
|
|
||||||
* passthrough formats it will be one of {@link AudioFormat}'s other {@code ENCODING_*} constants.
|
|
||||||
* For non-audio formats, {@link AudioFormat#ENCODING_INVALID} will be returned.
|
|
||||||
*
|
|
||||||
* @param mimeType The MIME type of media that will be decoded (or passed through).
|
|
||||||
* @return The corresponding {@link AudioFormat} encoding.
|
|
||||||
*/
|
|
||||||
public static int getEncodingForMimeType(String mimeType) {
|
|
||||||
if (AUDIO_AC3.equals(mimeType)) {
|
|
||||||
return C.ENCODING_AC3;
|
|
||||||
}
|
|
||||||
if (AUDIO_EC3.equals(mimeType)) {
|
|
||||||
return C.ENCODING_E_AC3;
|
|
||||||
}
|
|
||||||
|
|
||||||
// All other audio formats will be decoded to 16-bit PCM.
|
|
||||||
return isAudio(mimeType) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_INVALID;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether the specified {@code mimeType} represents audio that can be played via
|
|
||||||
* passthrough if the device supports it.
|
|
||||||
*
|
|
||||||
* @param mimeType The MIME type of input media.
|
|
||||||
* @return Whether the audio can be played via passthrough. If this method returns {@code true},
|
|
||||||
* it is still necessary to check the {@link AudioCapabilities} for device support.
|
|
||||||
*/
|
|
||||||
public static boolean isPassthroughAudio(String mimeType) {
|
|
||||||
return AUDIO_AC3.equals(mimeType) || AUDIO_EC3.equals(mimeType);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user