mirror of
https://github.com/androidx/media.git
synced 2025-05-10 00:59:51 +08:00
commit
06e9e5d3ea
@ -20,6 +20,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
|
|||||||
import com.google.android.exoplayer.LoadControl;
|
import com.google.android.exoplayer.LoadControl;
|
||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecUtil;
|
import com.google.android.exoplayer.MediaCodecUtil;
|
||||||
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.SampleSource;
|
import com.google.android.exoplayer.SampleSource;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
@ -172,7 +173,13 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
// Determine which video representations we should use for playback.
|
// Determine which video representations we should use for playback.
|
||||||
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
|
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
|
||||||
if (videoAdaptationSet != null) {
|
if (videoAdaptationSet != null) {
|
||||||
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
int maxDecodableFrameSize;
|
||||||
|
try {
|
||||||
|
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
callback.onRenderersError(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
List<Representation> videoRepresentations = videoAdaptationSet.representations;
|
List<Representation> videoRepresentations = videoAdaptationSet.representations;
|
||||||
for (int i = 0; i < videoRepresentations.size(); i++) {
|
for (int i = 0; i < videoRepresentations.size(); i++) {
|
||||||
Format format = videoRepresentations.get(i).format;
|
Format format = videoRepresentations.get(i).format;
|
||||||
|
@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
|
|||||||
import com.google.android.exoplayer.LoadControl;
|
import com.google.android.exoplayer.LoadControl;
|
||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecUtil;
|
import com.google.android.exoplayer.MediaCodecUtil;
|
||||||
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
@ -125,7 +126,13 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Obtain stream elements for playback.
|
// Obtain stream elements for playback.
|
||||||
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
int maxDecodableFrameSize;
|
||||||
|
try {
|
||||||
|
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
callback.onRenderersError(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
int audioStreamElementCount = 0;
|
int audioStreamElementCount = 0;
|
||||||
int textStreamElementCount = 0;
|
int textStreamElementCount = 0;
|
||||||
int videoStreamElementIndex = -1;
|
int videoStreamElementIndex = -1;
|
||||||
|
@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
|
|||||||
import com.google.android.exoplayer.LoadControl;
|
import com.google.android.exoplayer.LoadControl;
|
||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecUtil;
|
import com.google.android.exoplayer.MediaCodecUtil;
|
||||||
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.SampleSource;
|
import com.google.android.exoplayer.SampleSource;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
@ -99,7 +100,14 @@ import java.util.List;
|
|||||||
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
|
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
|
||||||
|
|
||||||
// Determine which video representations we should use for playback.
|
// Determine which video representations we should use for playback.
|
||||||
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
int maxDecodableFrameSize;
|
||||||
|
try {
|
||||||
|
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
callback.onRenderersError(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
|
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
|
||||||
List<Representation> videoRepresentations =
|
List<Representation> videoRepresentations =
|
||||||
period.adaptationSets.get(videoAdaptationSetIndex).representations;
|
period.adaptationSets.get(videoAdaptationSetIndex).representations;
|
||||||
|
@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
|
|||||||
import com.google.android.exoplayer.LoadControl;
|
import com.google.android.exoplayer.LoadControl;
|
||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecUtil;
|
import com.google.android.exoplayer.MediaCodecUtil;
|
||||||
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
import com.google.android.exoplayer.SampleSource;
|
import com.google.android.exoplayer.SampleSource;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
@ -94,7 +95,13 @@ import java.util.ArrayList;
|
|||||||
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
|
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
|
||||||
|
|
||||||
// Obtain stream elements for playback.
|
// Obtain stream elements for playback.
|
||||||
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
int maxDecodableFrameSize;
|
||||||
|
try {
|
||||||
|
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
callback.onRenderersError(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
int audioStreamElementIndex = -1;
|
int audioStreamElementIndex = -1;
|
||||||
int videoStreamElementIndex = -1;
|
int videoStreamElementIndex = -1;
|
||||||
ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>();
|
ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>();
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer;
|
package com.google.android.exoplayer;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
|
||||||
import com.google.android.exoplayer.drm.DrmSessionManager;
|
import com.google.android.exoplayer.drm.DrmSessionManager;
|
||||||
import com.google.android.exoplayer.util.Assertions;
|
import com.google.android.exoplayer.util.Assertions;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
@ -67,8 +68,12 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
|
|||||||
*/
|
*/
|
||||||
public static class DecoderInitializationException extends Exception {
|
public static class DecoderInitializationException extends Exception {
|
||||||
|
|
||||||
|
private static final int CUSTOM_ERROR_CODE_BASE = -50000;
|
||||||
|
private static final int NO_SUITABLE_DECODER_ERROR = CUSTOM_ERROR_CODE_BASE + 1;
|
||||||
|
private static final int DECODER_QUERY_ERROR = CUSTOM_ERROR_CODE_BASE + 2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The name of the decoder that failed to initialize.
|
* The name of the decoder that failed to initialize. Null if no suitable decoder was found.
|
||||||
*/
|
*/
|
||||||
public final String decoderName;
|
public final String decoderName;
|
||||||
|
|
||||||
@ -77,8 +82,14 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
|
|||||||
*/
|
*/
|
||||||
public final String diagnosticInfo;
|
public final String diagnosticInfo;
|
||||||
|
|
||||||
public DecoderInitializationException(String decoderName, MediaFormat mediaFormat,
|
public DecoderInitializationException(MediaFormat mediaFormat, Throwable cause, int errorCode) {
|
||||||
Throwable cause) {
|
super("Decoder init failed: [" + errorCode + "], " + mediaFormat, cause);
|
||||||
|
this.decoderName = null;
|
||||||
|
this.diagnosticInfo = buildCustomDiagnosticInfo(errorCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DecoderInitializationException(MediaFormat mediaFormat, Throwable cause,
|
||||||
|
String decoderName) {
|
||||||
super("Decoder init failed: " + decoderName + ", " + mediaFormat, cause);
|
super("Decoder init failed: " + decoderName + ", " + mediaFormat, cause);
|
||||||
this.decoderName = decoderName;
|
this.decoderName = decoderName;
|
||||||
this.diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null;
|
this.diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null;
|
||||||
@ -92,6 +103,11 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String buildCustomDiagnosticInfo(int errorCode) {
|
||||||
|
String sign = errorCode < 0 ? "neg_" : "";
|
||||||
|
return "com.google.android.exoplayer.MediaCodecTrackRenderer_" + sign + Math.abs(errorCode);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -281,21 +297,29 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DecoderInfo selectedDecoderInfo = MediaCodecUtil.getDecoderInfo(mimeType,
|
DecoderInfo decoderInfo = null;
|
||||||
requiresSecureDecoder);
|
|
||||||
String selectedDecoderName = selectedDecoderInfo.name;
|
|
||||||
codecIsAdaptive = selectedDecoderInfo.adaptive;
|
|
||||||
try {
|
try {
|
||||||
codec = MediaCodec.createByCodecName(selectedDecoderName);
|
decoderInfo = MediaCodecUtil.getDecoderInfo(mimeType, requiresSecureDecoder);
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, e,
|
||||||
|
DecoderInitializationException.DECODER_QUERY_ERROR));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decoderInfo == null) {
|
||||||
|
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, null,
|
||||||
|
DecoderInitializationException.NO_SUITABLE_DECODER_ERROR));
|
||||||
|
}
|
||||||
|
|
||||||
|
String decoderName = decoderInfo.name;
|
||||||
|
codecIsAdaptive = decoderInfo.adaptive;
|
||||||
|
try {
|
||||||
|
codec = MediaCodec.createByCodecName(decoderName);
|
||||||
configureCodec(codec, format.getFrameworkMediaFormatV16(), mediaCrypto);
|
configureCodec(codec, format.getFrameworkMediaFormatV16(), mediaCrypto);
|
||||||
codec.start();
|
codec.start();
|
||||||
inputBuffers = codec.getInputBuffers();
|
inputBuffers = codec.getInputBuffers();
|
||||||
outputBuffers = codec.getOutputBuffers();
|
outputBuffers = codec.getOutputBuffers();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
DecoderInitializationException exception = new DecoderInitializationException(
|
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, e, decoderName));
|
||||||
selectedDecoderName, format, e);
|
|
||||||
notifyDecoderInitializationError(exception);
|
|
||||||
throw new ExoPlaybackException(exception);
|
|
||||||
}
|
}
|
||||||
codecHotswapTimeMs = getState() == TrackRenderer.STATE_STARTED ?
|
codecHotswapTimeMs = getState() == TrackRenderer.STATE_STARTED ?
|
||||||
SystemClock.elapsedRealtime() : -1;
|
SystemClock.elapsedRealtime() : -1;
|
||||||
@ -305,6 +329,12 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
|
|||||||
codecCounters.codecInitCount++;
|
codecCounters.codecInitCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void notifyAndThrowDecoderInitError(DecoderInitializationException e)
|
||||||
|
throws ExoPlaybackException {
|
||||||
|
notifyDecoderInitializationError(e);
|
||||||
|
throw new ExoPlaybackException(e);
|
||||||
|
}
|
||||||
|
|
||||||
protected boolean shouldInitCodec() {
|
protected boolean shouldInitCodec() {
|
||||||
return codec == null && format != null;
|
return codec == null && format != null;
|
||||||
}
|
}
|
||||||
|
@ -35,6 +35,20 @@ import java.util.HashMap;
|
|||||||
@TargetApi(16)
|
@TargetApi(16)
|
||||||
public class MediaCodecUtil {
|
public class MediaCodecUtil {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when an error occurs querying the device for its underlying media capabilities.
|
||||||
|
* <p>
|
||||||
|
* Such failures are not expected in normal operation and are normally temporary (e.g. if the
|
||||||
|
* mediaserver process has crashed and is yet to restart).
|
||||||
|
*/
|
||||||
|
public static class DecoderQueryException extends Exception {
|
||||||
|
|
||||||
|
private DecoderQueryException(Throwable cause) {
|
||||||
|
super("Failed to query underlying media codecs", cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private static final String TAG = "MediaCodecUtil";
|
private static final String TAG = "MediaCodecUtil";
|
||||||
|
|
||||||
private static final HashMap<CodecKey, Pair<String, CodecCapabilities>> codecs =
|
private static final HashMap<CodecKey, Pair<String, CodecCapabilities>> codecs =
|
||||||
@ -48,7 +62,8 @@ public class MediaCodecUtil {
|
|||||||
* unless secure decryption really is required.
|
* unless secure decryption really is required.
|
||||||
* @return Information about the decoder that will be used, or null if no decoder exists.
|
* @return Information about the decoder that will be used, or null if no decoder exists.
|
||||||
*/
|
*/
|
||||||
public static DecoderInfo getDecoderInfo(String mimeType, boolean secure) {
|
public static DecoderInfo getDecoderInfo(String mimeType, boolean secure)
|
||||||
|
throws DecoderQueryException {
|
||||||
Pair<String, CodecCapabilities> info = getMediaCodecInfo(mimeType, secure);
|
Pair<String, CodecCapabilities> info = getMediaCodecInfo(mimeType, secure);
|
||||||
if (info == null) {
|
if (info == null) {
|
||||||
return null;
|
return null;
|
||||||
@ -66,14 +81,19 @@ public class MediaCodecUtil {
|
|||||||
* unless secure decryption really is required.
|
* unless secure decryption really is required.
|
||||||
*/
|
*/
|
||||||
public static synchronized void warmCodec(String mimeType, boolean secure) {
|
public static synchronized void warmCodec(String mimeType, boolean secure) {
|
||||||
getMediaCodecInfo(mimeType, secure);
|
try {
|
||||||
|
getMediaCodecInfo(mimeType, secure);
|
||||||
|
} catch (DecoderQueryException e) {
|
||||||
|
// Codec warming is best effort, so we can swallow the exception.
|
||||||
|
Log.e(TAG, "Codec warming failed", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the name of the best decoder and its capabilities for the given mimeType.
|
* Returns the name of the best decoder and its capabilities for the given mimeType.
|
||||||
*/
|
*/
|
||||||
private static synchronized Pair<String, CodecCapabilities> getMediaCodecInfo(
|
private static synchronized Pair<String, CodecCapabilities> getMediaCodecInfo(
|
||||||
String mimeType, boolean secure) {
|
String mimeType, boolean secure) throws DecoderQueryException {
|
||||||
CodecKey key = new CodecKey(mimeType, secure);
|
CodecKey key = new CodecKey(mimeType, secure);
|
||||||
if (codecs.containsKey(key)) {
|
if (codecs.containsKey(key)) {
|
||||||
return codecs.get(key);
|
return codecs.get(key);
|
||||||
@ -95,6 +115,17 @@ public class MediaCodecUtil {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static Pair<String, CodecCapabilities> getMediaCodecInfo(CodecKey key,
|
private static Pair<String, CodecCapabilities> getMediaCodecInfo(CodecKey key,
|
||||||
|
MediaCodecListCompat mediaCodecList) throws DecoderQueryException {
|
||||||
|
try {
|
||||||
|
return getMediaCodecInfoInternal(key, mediaCodecList);
|
||||||
|
} catch (Exception e) {
|
||||||
|
// If the underlying mediaserver is in a bad state, we may catch an IllegalStateException
|
||||||
|
// or an IllegalArgumentException here.
|
||||||
|
throw new DecoderQueryException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Pair<String, CodecCapabilities> getMediaCodecInfoInternal(CodecKey key,
|
||||||
MediaCodecListCompat mediaCodecList) {
|
MediaCodecListCompat mediaCodecList) {
|
||||||
String mimeType = key.mimeType;
|
String mimeType = key.mimeType;
|
||||||
int numberOfCodecs = mediaCodecList.getCodecCount();
|
int numberOfCodecs = mediaCodecList.getCodecCount();
|
||||||
@ -153,7 +184,8 @@ public class MediaCodecUtil {
|
|||||||
* @param level An AVC profile level from {@link CodecProfileLevel}.
|
* @param level An AVC profile level from {@link CodecProfileLevel}.
|
||||||
* @return Whether the specified profile is supported at the specified level.
|
* @return Whether the specified profile is supported at the specified level.
|
||||||
*/
|
*/
|
||||||
public static boolean isH264ProfileSupported(int profile, int level) {
|
public static boolean isH264ProfileSupported(int profile, int level)
|
||||||
|
throws DecoderQueryException {
|
||||||
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
|
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
|
||||||
if (info == null) {
|
if (info == null) {
|
||||||
return false;
|
return false;
|
||||||
@ -173,7 +205,7 @@ public class MediaCodecUtil {
|
|||||||
/**
|
/**
|
||||||
* @return the maximum frame size for an H264 stream that can be decoded on the device.
|
* @return the maximum frame size for an H264 stream that can be decoded on the device.
|
||||||
*/
|
*/
|
||||||
public static int maxH264DecodableFrameSize() {
|
public static int maxH264DecodableFrameSize() throws DecoderQueryException {
|
||||||
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
|
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
|
||||||
if (info == null) {
|
if (info == null) {
|
||||||
return 0;
|
return 0;
|
||||||
@ -248,20 +280,23 @@ public class MediaCodecUtil {
|
|||||||
@TargetApi(21)
|
@TargetApi(21)
|
||||||
private static final class MediaCodecListCompatV21 implements MediaCodecListCompat {
|
private static final class MediaCodecListCompatV21 implements MediaCodecListCompat {
|
||||||
|
|
||||||
private final MediaCodecInfo[] mediaCodecInfos;
|
private final int codecKind;
|
||||||
|
|
||||||
|
private MediaCodecInfo[] mediaCodecInfos;
|
||||||
|
|
||||||
public MediaCodecListCompatV21(boolean includeSecure) {
|
public MediaCodecListCompatV21(boolean includeSecure) {
|
||||||
int codecKind = includeSecure ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS;
|
codecKind = includeSecure ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS;
|
||||||
mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getCodecCount() {
|
public int getCodecCount() {
|
||||||
|
ensureMediaCodecInfosInitialized();
|
||||||
return mediaCodecInfos.length;
|
return mediaCodecInfos.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MediaCodecInfo getCodecInfoAt(int index) {
|
public MediaCodecInfo getCodecInfoAt(int index) {
|
||||||
|
ensureMediaCodecInfosInitialized();
|
||||||
return mediaCodecInfos[index];
|
return mediaCodecInfos[index];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -275,6 +310,12 @@ public class MediaCodecUtil {
|
|||||||
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
|
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void ensureMediaCodecInfosInitialized() {
|
||||||
|
if (mediaCodecInfos == null) {
|
||||||
|
mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
|
@ -109,14 +109,14 @@ public final class AudioTrack {
|
|||||||
*
|
*
|
||||||
* <p>This is a fail safe that should not be required on correctly functioning devices.
|
* <p>This is a fail safe that should not be required on correctly functioning devices.
|
||||||
*/
|
*/
|
||||||
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 10 * C.MICROS_PER_SECOND;
|
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
|
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
|
||||||
*
|
*
|
||||||
* <p>This is a fail safe that should not be required on correctly functioning devices.
|
* <p>This is a fail safe that should not be required on correctly functioning devices.
|
||||||
*/
|
*/
|
||||||
private static final long MAX_LATENCY_US = 10 * C.MICROS_PER_SECOND;
|
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
|
||||||
|
|
||||||
/** Value for ac3Bitrate before the bitrate has been calculated. */
|
/** Value for ac3Bitrate before the bitrate has been calculated. */
|
||||||
private static final int UNKNOWN_AC3_BITRATE = 0;
|
private static final int UNKNOWN_AC3_BITRATE = 0;
|
||||||
@ -130,10 +130,10 @@ public final class AudioTrack {
|
|||||||
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
|
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
|
||||||
|
|
||||||
private final ConditionVariable releasingConditionVariable;
|
private final ConditionVariable releasingConditionVariable;
|
||||||
private final AudioTimestampCompat audioTimestampCompat;
|
|
||||||
private final long[] playheadOffsets;
|
private final long[] playheadOffsets;
|
||||||
|
|
||||||
private android.media.AudioTrack audioTrack;
|
private android.media.AudioTrack audioTrack;
|
||||||
|
private AudioTrackUtil audioTrackUtil;
|
||||||
private int sampleRate;
|
private int sampleRate;
|
||||||
private int channelConfig;
|
private int channelConfig;
|
||||||
private int encoding;
|
private int encoding;
|
||||||
@ -147,8 +147,6 @@ public final class AudioTrack {
|
|||||||
private long lastPlayheadSampleTimeUs;
|
private long lastPlayheadSampleTimeUs;
|
||||||
private boolean audioTimestampSet;
|
private boolean audioTimestampSet;
|
||||||
private long lastTimestampSampleTimeUs;
|
private long lastTimestampSampleTimeUs;
|
||||||
private long lastRawPlaybackHeadPosition;
|
|
||||||
private long rawPlaybackHeadWrapCount;
|
|
||||||
|
|
||||||
private Method getLatencyMethod;
|
private Method getLatencyMethod;
|
||||||
private long submittedBytes;
|
private long submittedBytes;
|
||||||
@ -169,11 +167,6 @@ public final class AudioTrack {
|
|||||||
|
|
||||||
public AudioTrack() {
|
public AudioTrack() {
|
||||||
releasingConditionVariable = new ConditionVariable(true);
|
releasingConditionVariable = new ConditionVariable(true);
|
||||||
if (Util.SDK_INT >= 19) {
|
|
||||||
audioTimestampCompat = new AudioTimestampCompatV19();
|
|
||||||
} else {
|
|
||||||
audioTimestampCompat = new NoopAudioTimestampCompat();
|
|
||||||
}
|
|
||||||
if (Util.SDK_INT >= 18) {
|
if (Util.SDK_INT >= 18) {
|
||||||
try {
|
try {
|
||||||
getLatencyMethod =
|
getLatencyMethod =
|
||||||
@ -219,15 +212,15 @@ public final class AudioTrack {
|
|||||||
long currentPositionUs;
|
long currentPositionUs;
|
||||||
if (audioTimestampSet) {
|
if (audioTimestampSet) {
|
||||||
// How long ago in the past the audio timestamp is (negative if it's in the future).
|
// How long ago in the past the audio timestamp is (negative if it's in the future).
|
||||||
long presentationDiff = systemClockUs - (audioTimestampCompat.getNanoTime() / 1000);
|
long presentationDiff = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
|
||||||
long framesDiff = durationUsToFrames(presentationDiff);
|
long framesDiff = durationUsToFrames(presentationDiff);
|
||||||
// The position of the frame that's currently being presented.
|
// The position of the frame that's currently being presented.
|
||||||
long currentFramePosition = audioTimestampCompat.getFramePosition() + framesDiff;
|
long currentFramePosition = audioTrackUtil.getTimestampFramePosition() + framesDiff;
|
||||||
currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
|
currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
|
||||||
} else {
|
} else {
|
||||||
if (playheadOffsetCount == 0) {
|
if (playheadOffsetCount == 0) {
|
||||||
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
|
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
|
||||||
currentPositionUs = getPlaybackPositionUs() + startMediaTimeUs;
|
currentPositionUs = audioTrackUtil.getPlaybackHeadPositionUs() + startMediaTimeUs;
|
||||||
} else {
|
} else {
|
||||||
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
|
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
|
||||||
// system clock (and a smoothed offset between it and the playhead position) so as to
|
// system clock (and a smoothed offset between it and the playhead position) so as to
|
||||||
@ -274,7 +267,13 @@ public final class AudioTrack {
|
|||||||
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
|
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
|
||||||
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
|
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkAudioTrackInitialized();
|
checkAudioTrackInitialized();
|
||||||
|
if (Util.SDK_INT >= 19) {
|
||||||
|
audioTrackUtil = new AudioTrackUtilV19(audioTrack);
|
||||||
|
} else {
|
||||||
|
audioTrackUtil = new AudioTrackUtil(audioTrack);
|
||||||
|
}
|
||||||
setVolume(volume);
|
setVolume(volume);
|
||||||
return audioTrack.getAudioSessionId();
|
return audioTrack.getAudioSessionId();
|
||||||
}
|
}
|
||||||
@ -440,7 +439,8 @@ public final class AudioTrack {
|
|||||||
int bytesWritten = 0;
|
int bytesWritten = 0;
|
||||||
if (Util.SDK_INT < 21) {
|
if (Util.SDK_INT < 21) {
|
||||||
// Work out how many bytes we can write without the risk of blocking.
|
// Work out how many bytes we can write without the risk of blocking.
|
||||||
int bytesPending = (int) (submittedBytes - (getPlaybackPositionFrames() * frameSize));
|
int bytesPending =
|
||||||
|
(int) (submittedBytes - (audioTrackUtil.getPlaybackHeadPosition() * frameSize));
|
||||||
int bytesToWrite = bufferSize - bytesPending;
|
int bytesToWrite = bufferSize - bytesPending;
|
||||||
if (bytesToWrite > 0) {
|
if (bytesToWrite > 0) {
|
||||||
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
|
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
|
||||||
@ -473,7 +473,8 @@ public final class AudioTrack {
|
|||||||
|
|
||||||
/** Returns whether the audio track has more data pending that will be played back. */
|
/** Returns whether the audio track has more data pending that will be played back. */
|
||||||
public boolean hasPendingData() {
|
public boolean hasPendingData() {
|
||||||
return isInitialized() && bytesToFrames(submittedBytes) > getPlaybackPositionFrames();
|
return isInitialized()
|
||||||
|
&& bytesToFrames(submittedBytes) > audioTrackUtil.getPlaybackHeadPosition();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns whether enough data has been supplied via {@link #handleBuffer} to begin playback. */
|
/** Returns whether enough data has been supplied via {@link #handleBuffer} to begin playback. */
|
||||||
@ -520,8 +521,6 @@ public final class AudioTrack {
|
|||||||
if (isInitialized()) {
|
if (isInitialized()) {
|
||||||
submittedBytes = 0;
|
submittedBytes = 0;
|
||||||
temporaryBufferSize = 0;
|
temporaryBufferSize = 0;
|
||||||
lastRawPlaybackHeadPosition = 0;
|
|
||||||
rawPlaybackHeadWrapCount = 0;
|
|
||||||
startMediaTimeUs = START_NOT_SET;
|
startMediaTimeUs = START_NOT_SET;
|
||||||
resetSyncParams();
|
resetSyncParams();
|
||||||
int playState = audioTrack.getPlayState();
|
int playState = audioTrack.getPlayState();
|
||||||
@ -531,6 +530,7 @@ public final class AudioTrack {
|
|||||||
// AudioTrack.release can take some time, so we call it on a background thread.
|
// AudioTrack.release can take some time, so we call it on a background thread.
|
||||||
final android.media.AudioTrack toRelease = audioTrack;
|
final android.media.AudioTrack toRelease = audioTrack;
|
||||||
audioTrack = null;
|
audioTrack = null;
|
||||||
|
audioTrackUtil = null;
|
||||||
releasingConditionVariable.close();
|
releasingConditionVariable.close();
|
||||||
new Thread() {
|
new Thread() {
|
||||||
@Override
|
@Override
|
||||||
@ -552,7 +552,7 @@ public final class AudioTrack {
|
|||||||
|
|
||||||
/** Updates the audio track latency and playback position parameters. */
|
/** Updates the audio track latency and playback position parameters. */
|
||||||
private void maybeSampleSyncParams() {
|
private void maybeSampleSyncParams() {
|
||||||
long playbackPositionUs = getPlaybackPositionUs();
|
long playbackPositionUs = audioTrackUtil.getPlaybackHeadPositionUs();
|
||||||
if (playbackPositionUs == 0) {
|
if (playbackPositionUs == 0) {
|
||||||
// The AudioTrack hasn't output anything yet.
|
// The AudioTrack hasn't output anything yet.
|
||||||
return;
|
return;
|
||||||
@ -573,18 +573,27 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
||||||
audioTimestampSet = audioTimestampCompat.update(audioTrack);
|
audioTimestampSet = audioTrackUtil.updateTimestamp();
|
||||||
if (audioTimestampSet) {
|
if (audioTimestampSet) {
|
||||||
// Perform sanity checks on the timestamp.
|
// Perform sanity checks on the timestamp.
|
||||||
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
|
long audioTimestampUs = audioTrackUtil.getTimestampNanoTime() / 1000;
|
||||||
|
long audioTimestampFramePosition = audioTrackUtil.getTimestampFramePosition();
|
||||||
if (audioTimestampUs < resumeSystemTimeUs) {
|
if (audioTimestampUs < resumeSystemTimeUs) {
|
||||||
// The timestamp corresponds to a time before the track was most recently resumed.
|
// The timestamp corresponds to a time before the track was most recently resumed.
|
||||||
audioTimestampSet = false;
|
audioTimestampSet = false;
|
||||||
} else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
|
} else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
|
||||||
// The timestamp time base is probably wrong.
|
// The timestamp time base is probably wrong.
|
||||||
audioTimestampSet = false;
|
audioTimestampSet = false;
|
||||||
Log.w(TAG, "Spurious audio timestamp: " + audioTimestampCompat.getFramePosition() + ", "
|
Log.w(TAG, "Spurious audio timestamp (system clock mismatch): "
|
||||||
+ audioTimestampUs + ", " + systemClockUs);
|
+ audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
|
||||||
|
+ playbackPositionUs);
|
||||||
|
} else if (Math.abs(framesToDurationUs(audioTimestampFramePosition) - playbackPositionUs)
|
||||||
|
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
|
||||||
|
// The timestamp frame position is probably wrong.
|
||||||
|
audioTimestampSet = false;
|
||||||
|
Log.w(TAG, "Spurious audio timestamp (frame position mismatch): "
|
||||||
|
+ audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
|
||||||
|
+ playbackPositionUs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (getLatencyMethod != null) {
|
if (getLatencyMethod != null) {
|
||||||
@ -634,29 +643,6 @@ public final class AudioTrack {
|
|||||||
throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
|
throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
|
|
||||||
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
|
|
||||||
* returns the playback head position as a long that will only wrap around if the value exceeds
|
|
||||||
* {@link Long#MAX_VALUE} (which in practice will never happen).
|
|
||||||
*
|
|
||||||
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
|
|
||||||
* expressed as a long.
|
|
||||||
*/
|
|
||||||
private long getPlaybackPositionFrames() {
|
|
||||||
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
|
|
||||||
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
|
|
||||||
// The value must have wrapped around.
|
|
||||||
rawPlaybackHeadWrapCount++;
|
|
||||||
}
|
|
||||||
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
|
|
||||||
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
|
|
||||||
}
|
|
||||||
|
|
||||||
private long getPlaybackPositionUs() {
|
|
||||||
return framesToDurationUs(getPlaybackPositionFrames());
|
|
||||||
}
|
|
||||||
|
|
||||||
private long bytesToFrames(long byteCount) {
|
private long bytesToFrames(long byteCount) {
|
||||||
if (isAc3) {
|
if (isAc3) {
|
||||||
return
|
return
|
||||||
@ -684,72 +670,126 @@ public final class AudioTrack {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface exposing the {@link android.media.AudioTimestamp} methods we need that were added in
|
* Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
|
||||||
* SDK 19.
|
|
||||||
*/
|
*/
|
||||||
private interface AudioTimestampCompat {
|
private static class AudioTrackUtil {
|
||||||
|
|
||||||
|
protected final android.media.AudioTrack audioTrack;
|
||||||
|
private final int sampleRate;
|
||||||
|
|
||||||
|
private long lastRawPlaybackHeadPosition;
|
||||||
|
private long rawPlaybackHeadWrapCount;
|
||||||
|
|
||||||
|
public AudioTrackUtil(android.media.AudioTrack audioTrack) {
|
||||||
|
this.audioTrack = audioTrack;
|
||||||
|
this.sampleRate = audioTrack.getSampleRate();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if the audioTimestamp was retrieved from the audioTrack.
|
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
|
||||||
|
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
|
||||||
|
* returns the playback head position as a long that will only wrap around if the value exceeds
|
||||||
|
* {@link Long#MAX_VALUE} (which in practice will never happen).
|
||||||
|
*
|
||||||
|
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
|
||||||
|
* expressed as a long.
|
||||||
*/
|
*/
|
||||||
boolean update(android.media.AudioTrack audioTrack);
|
public long getPlaybackHeadPosition() {
|
||||||
|
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
|
||||||
|
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
|
||||||
|
// The value must have wrapped around.
|
||||||
|
rawPlaybackHeadWrapCount++;
|
||||||
|
}
|
||||||
|
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
|
||||||
|
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
|
||||||
|
}
|
||||||
|
|
||||||
long getNanoTime();
|
/**
|
||||||
|
* Returns {@link #getPlaybackHeadPosition()} expressed as microseconds.
|
||||||
|
*/
|
||||||
|
public long getPlaybackHeadPositionUs() {
|
||||||
|
return (getPlaybackHeadPosition() * C.MICROS_PER_SECOND) / sampleRate;
|
||||||
|
}
|
||||||
|
|
||||||
long getFramePosition();
|
/**
|
||||||
|
* Updates the values returned by {@link #getTimestampNanoTime()} and
|
||||||
}
|
* {@link #getTimestampFramePosition()}.
|
||||||
|
*
|
||||||
/**
|
* @return True if the timestamp values were updated. False otherwise.
|
||||||
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
|
*/
|
||||||
*/
|
public boolean updateTimestamp() {
|
||||||
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean update(android.media.AudioTrack audioTrack) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
/**
|
||||||
public long getNanoTime() {
|
* Returns the {@link android.media.AudioTimestamp#nanoTime} obtained during the most recent
|
||||||
// Should never be called if initTimestamp() returned false.
|
* call to {@link #updateTimestamp()} that returned true.
|
||||||
|
*
|
||||||
|
* @return The nanoTime obtained during the most recent call to {@link #updateTimestamp()} that
|
||||||
|
* returned true.
|
||||||
|
* @throws UnsupportedOperationException If the implementation does not support audio timestamp
|
||||||
|
* queries. {@link #updateTimestamp()} will always return false in this case.
|
||||||
|
*/
|
||||||
|
public long getTimestampNanoTime() {
|
||||||
|
// Should never be called if updateTimestamp() returned false.
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
/**
|
||||||
public long getFramePosition() {
|
* Returns the {@link android.media.AudioTimestamp#framePosition} obtained during the most
|
||||||
// Should never be called if initTimestamp() returned false.
|
* recent call to {@link #updateTimestamp()} that returned true. The value is adjusted so that
|
||||||
|
* wrap around only occurs if the value exceeds {@link Long#MAX_VALUE} (which in practice will
|
||||||
|
* never happen).
|
||||||
|
*
|
||||||
|
* @return The framePosition obtained during the most recent call to {@link #updateTimestamp()}
|
||||||
|
* that returned true.
|
||||||
|
* @throws UnsupportedOperationException If the implementation does not support audio timestamp
|
||||||
|
* queries. {@link #updateTimestamp()} will always return false in this case.
|
||||||
|
*/
|
||||||
|
public long getTimestampFramePosition() {
|
||||||
|
// Should never be called if updateTimestamp() returned false.
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
|
|
||||||
* implementations added in SDK 19.
|
|
||||||
*/
|
|
||||||
@TargetApi(19)
|
@TargetApi(19)
|
||||||
private static final class AudioTimestampCompatV19 implements AudioTimestampCompat {
|
private static class AudioTrackUtilV19 extends AudioTrackUtil {
|
||||||
|
|
||||||
private final AudioTimestamp audioTimestamp;
|
private final AudioTimestamp audioTimestamp;
|
||||||
|
|
||||||
public AudioTimestampCompatV19() {
|
private long rawTimestampFramePositionWrapCount;
|
||||||
|
private long lastRawTimestampFramePosition;
|
||||||
|
private long lastTimestampFramePosition;
|
||||||
|
|
||||||
|
public AudioTrackUtilV19(android.media.AudioTrack audioTrack) {
|
||||||
|
super(audioTrack);
|
||||||
audioTimestamp = new AudioTimestamp();
|
audioTimestamp = new AudioTimestamp();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean update(android.media.AudioTrack audioTrack) {
|
public boolean updateTimestamp() {
|
||||||
return audioTrack.getTimestamp(audioTimestamp);
|
boolean updated = audioTrack.getTimestamp(audioTimestamp);
|
||||||
|
if (updated) {
|
||||||
|
long rawFramePosition = audioTimestamp.framePosition;
|
||||||
|
if (lastRawTimestampFramePosition > rawFramePosition) {
|
||||||
|
// The value must have wrapped around.
|
||||||
|
rawTimestampFramePositionWrapCount++;
|
||||||
|
}
|
||||||
|
lastRawTimestampFramePosition = rawFramePosition;
|
||||||
|
lastTimestampFramePosition = rawFramePosition + (rawTimestampFramePositionWrapCount << 32);
|
||||||
|
}
|
||||||
|
return updated;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getNanoTime() {
|
public long getTimestampNanoTime() {
|
||||||
return audioTimestamp.nanoTime;
|
return audioTimestamp.nanoTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getFramePosition() {
|
public long getTimestampFramePosition() {
|
||||||
return audioTimestamp.framePosition;
|
return lastTimestampFramePosition;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ import com.google.android.exoplayer.upstream.DataSpec;
|
|||||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||||
import com.google.android.exoplayer.util.Assertions;
|
import com.google.android.exoplayer.util.Assertions;
|
||||||
|
import com.google.android.exoplayer.util.TraceUtil;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
@ -173,7 +174,12 @@ public abstract class Chunk implements Loadable {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final void load() throws IOException, InterruptedException {
|
public final void load() throws IOException, InterruptedException {
|
||||||
dataSourceStream.load();
|
TraceUtil.beginSection("chunkLoad");
|
||||||
|
try {
|
||||||
|
dataSourceStream.load();
|
||||||
|
} finally {
|
||||||
|
TraceUtil.endSection();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||||||
/**
|
/**
|
||||||
* The default minimum number of times to retry loading data prior to failing.
|
* The default minimum number of times to retry loading data prior to failing.
|
||||||
*/
|
*/
|
||||||
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 1;
|
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 3;
|
||||||
|
|
||||||
private static final int STATE_UNPREPARED = 0;
|
private static final int STATE_UNPREPARED = 0;
|
||||||
private static final int STATE_PREPARED = 1;
|
private static final int STATE_PREPARED = 1;
|
||||||
|
@ -489,8 +489,8 @@ public final class WebmExtractor implements Extractor {
|
|||||||
} else if (CODEC_ID_OPUS.equals(codecId)) {
|
} else if (CODEC_ID_OPUS.equals(codecId)) {
|
||||||
ArrayList<byte[]> opusInitializationData = new ArrayList<byte[]>(3);
|
ArrayList<byte[]> opusInitializationData = new ArrayList<byte[]>(3);
|
||||||
opusInitializationData.add(codecPrivate);
|
opusInitializationData.add(codecPrivate);
|
||||||
opusInitializationData.add(ByteBuffer.allocate(8).putLong(codecDelayNs).array());
|
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(codecDelayNs).array());
|
||||||
opusInitializationData.add(ByteBuffer.allocate(8).putLong(seekPreRollNs).array());
|
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(seekPreRollNs).array());
|
||||||
format = MediaFormat.createAudioFormat(
|
format = MediaFormat.createAudioFormat(
|
||||||
MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE, channelCount, sampleRate,
|
MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE, channelCount, sampleRate,
|
||||||
opusInitializationData);
|
opusInitializationData);
|
||||||
|
@ -393,18 +393,22 @@ public class HttpDataSource implements DataSource {
|
|||||||
connection.setRequestProperty(property.getKey(), property.getValue());
|
connection.setRequestProperty(property.getKey(), property.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
setRangeHeader(connection, dataSpec);
|
||||||
connection.setRequestProperty("User-Agent", userAgent);
|
connection.setRequestProperty("User-Agent", userAgent);
|
||||||
connection.setRequestProperty("Range", buildRangeHeader(dataSpec));
|
|
||||||
connection.connect();
|
connection.connect();
|
||||||
return connection;
|
return connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String buildRangeHeader(DataSpec dataSpec) {
|
private void setRangeHeader(HttpURLConnection connection, DataSpec dataSpec) {
|
||||||
|
if (dataSpec.position == 0 && dataSpec.length == C.LENGTH_UNBOUNDED) {
|
||||||
|
// Not required.
|
||||||
|
return;
|
||||||
|
}
|
||||||
String rangeRequest = "bytes=" + dataSpec.position + "-";
|
String rangeRequest = "bytes=" + dataSpec.position + "-";
|
||||||
if (dataSpec.length != C.LENGTH_UNBOUNDED) {
|
if (dataSpec.length != C.LENGTH_UNBOUNDED) {
|
||||||
rangeRequest += (dataSpec.position + dataSpec.length - 1);
|
rangeRequest += (dataSpec.position + dataSpec.length - 1);
|
||||||
}
|
}
|
||||||
return rangeRequest;
|
connection.setRequestProperty("Range", rangeRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getContentLength(HttpURLConnection connection) {
|
private long getContentLength(HttpURLConnection connection) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user