Merge pull request #234 from google/dev

dev -> dev-hls
This commit is contained in:
ojw28 2015-01-12 17:43:44 +00:00
commit 06e9e5d3ea
11 changed files with 260 additions and 110 deletions

View File

@ -20,6 +20,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer;
@ -172,7 +173,13 @@ public class DashRendererBuilder implements RendererBuilder,
// Determine which video representations we should use for playback.
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
if (videoAdaptationSet != null) {
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
int maxDecodableFrameSize;
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
List<Representation> videoRepresentations = videoAdaptationSet.representations;
for (int i = 0; i < videoRepresentations.size(); i++) {
Format format = videoRepresentations.get(i).format;

View File

@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
@ -125,7 +126,13 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
}
// Obtain stream elements for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
int maxDecodableFrameSize;
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;

View File

@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
@ -99,7 +100,14 @@ import java.util.List;
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
// Determine which video representations we should use for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
int maxDecodableFrameSize;
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
List<Representation> videoRepresentations =
period.adaptationSets.get(videoAdaptationSetIndex).representations;

View File

@ -19,6 +19,7 @@ import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
@ -94,7 +95,13 @@ import java.util.ArrayList;
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
// Obtain stream elements for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
int maxDecodableFrameSize;
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
int audioStreamElementIndex = -1;
int videoStreamElementIndex = -1;
ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>();

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
@ -67,8 +68,12 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
*/
public static class DecoderInitializationException extends Exception {
private static final int CUSTOM_ERROR_CODE_BASE = -50000;
private static final int NO_SUITABLE_DECODER_ERROR = CUSTOM_ERROR_CODE_BASE + 1;
private static final int DECODER_QUERY_ERROR = CUSTOM_ERROR_CODE_BASE + 2;
/**
* The name of the decoder that failed to initialize.
* The name of the decoder that failed to initialize. Null if no suitable decoder was found.
*/
public final String decoderName;
@ -77,8 +82,14 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
*/
public final String diagnosticInfo;
public DecoderInitializationException(String decoderName, MediaFormat mediaFormat,
Throwable cause) {
public DecoderInitializationException(MediaFormat mediaFormat, Throwable cause, int errorCode) {
super("Decoder init failed: [" + errorCode + "], " + mediaFormat, cause);
this.decoderName = null;
this.diagnosticInfo = buildCustomDiagnosticInfo(errorCode);
}
public DecoderInitializationException(MediaFormat mediaFormat, Throwable cause,
String decoderName) {
super("Decoder init failed: " + decoderName + ", " + mediaFormat, cause);
this.decoderName = decoderName;
this.diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null;
@ -92,6 +103,11 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
return null;
}
private static String buildCustomDiagnosticInfo(int errorCode) {
String sign = errorCode < 0 ? "neg_" : "";
return "com.google.android.exoplayer.MediaCodecTrackRenderer_" + sign + Math.abs(errorCode);
}
}
/**
@ -281,21 +297,29 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
DecoderInfo selectedDecoderInfo = MediaCodecUtil.getDecoderInfo(mimeType,
requiresSecureDecoder);
String selectedDecoderName = selectedDecoderInfo.name;
codecIsAdaptive = selectedDecoderInfo.adaptive;
DecoderInfo decoderInfo = null;
try {
codec = MediaCodec.createByCodecName(selectedDecoderName);
decoderInfo = MediaCodecUtil.getDecoderInfo(mimeType, requiresSecureDecoder);
} catch (DecoderQueryException e) {
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, e,
DecoderInitializationException.DECODER_QUERY_ERROR));
}
if (decoderInfo == null) {
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, null,
DecoderInitializationException.NO_SUITABLE_DECODER_ERROR));
}
String decoderName = decoderInfo.name;
codecIsAdaptive = decoderInfo.adaptive;
try {
codec = MediaCodec.createByCodecName(decoderName);
configureCodec(codec, format.getFrameworkMediaFormatV16(), mediaCrypto);
codec.start();
inputBuffers = codec.getInputBuffers();
outputBuffers = codec.getOutputBuffers();
} catch (Exception e) {
DecoderInitializationException exception = new DecoderInitializationException(
selectedDecoderName, format, e);
notifyDecoderInitializationError(exception);
throw new ExoPlaybackException(exception);
notifyAndThrowDecoderInitError(new DecoderInitializationException(format, e, decoderName));
}
codecHotswapTimeMs = getState() == TrackRenderer.STATE_STARTED ?
SystemClock.elapsedRealtime() : -1;
@ -305,6 +329,12 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
codecCounters.codecInitCount++;
}
private void notifyAndThrowDecoderInitError(DecoderInitializationException e)
throws ExoPlaybackException {
notifyDecoderInitializationError(e);
throw new ExoPlaybackException(e);
}
protected boolean shouldInitCodec() {
return codec == null && format != null;
}

View File

@ -35,6 +35,20 @@ import java.util.HashMap;
@TargetApi(16)
public class MediaCodecUtil {
/**
* Thrown when an error occurs querying the device for its underlying media capabilities.
* <p>
* Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart).
*/
public static class DecoderQueryException extends Exception {
private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause);
}
}
private static final String TAG = "MediaCodecUtil";
private static final HashMap<CodecKey, Pair<String, CodecCapabilities>> codecs =
@ -48,7 +62,8 @@ public class MediaCodecUtil {
* unless secure decryption really is required.
* @return Information about the decoder that will be used, or null if no decoder exists.
*/
public static DecoderInfo getDecoderInfo(String mimeType, boolean secure) {
public static DecoderInfo getDecoderInfo(String mimeType, boolean secure)
throws DecoderQueryException {
Pair<String, CodecCapabilities> info = getMediaCodecInfo(mimeType, secure);
if (info == null) {
return null;
@ -66,14 +81,19 @@ public class MediaCodecUtil {
* unless secure decryption really is required.
*/
public static synchronized void warmCodec(String mimeType, boolean secure) {
getMediaCodecInfo(mimeType, secure);
try {
getMediaCodecInfo(mimeType, secure);
} catch (DecoderQueryException e) {
// Codec warming is best effort, so we can swallow the exception.
Log.e(TAG, "Codec warming failed", e);
}
}
/**
* Returns the name of the best decoder and its capabilities for the given mimeType.
*/
private static synchronized Pair<String, CodecCapabilities> getMediaCodecInfo(
String mimeType, boolean secure) {
String mimeType, boolean secure) throws DecoderQueryException {
CodecKey key = new CodecKey(mimeType, secure);
if (codecs.containsKey(key)) {
return codecs.get(key);
@ -95,6 +115,17 @@ public class MediaCodecUtil {
}
private static Pair<String, CodecCapabilities> getMediaCodecInfo(CodecKey key,
MediaCodecListCompat mediaCodecList) throws DecoderQueryException {
try {
return getMediaCodecInfoInternal(key, mediaCodecList);
} catch (Exception e) {
// If the underlying mediaserver is in a bad state, we may catch an IllegalStateException
// or an IllegalArgumentException here.
throw new DecoderQueryException(e);
}
}
private static Pair<String, CodecCapabilities> getMediaCodecInfoInternal(CodecKey key,
MediaCodecListCompat mediaCodecList) {
String mimeType = key.mimeType;
int numberOfCodecs = mediaCodecList.getCodecCount();
@ -153,7 +184,8 @@ public class MediaCodecUtil {
* @param level An AVC profile level from {@link CodecProfileLevel}.
* @return Whether the specified profile is supported at the specified level.
*/
public static boolean isH264ProfileSupported(int profile, int level) {
public static boolean isH264ProfileSupported(int profile, int level)
throws DecoderQueryException {
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
if (info == null) {
return false;
@ -173,7 +205,7 @@ public class MediaCodecUtil {
/**
* @return the maximum frame size for an H264 stream that can be decoded on the device.
*/
public static int maxH264DecodableFrameSize() {
public static int maxH264DecodableFrameSize() throws DecoderQueryException {
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
if (info == null) {
return 0;
@ -248,20 +280,23 @@ public class MediaCodecUtil {
@TargetApi(21)
private static final class MediaCodecListCompatV21 implements MediaCodecListCompat {
private final MediaCodecInfo[] mediaCodecInfos;
private final int codecKind;
private MediaCodecInfo[] mediaCodecInfos;
public MediaCodecListCompatV21(boolean includeSecure) {
int codecKind = includeSecure ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS;
mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos();
codecKind = includeSecure ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS;
}
@Override
public int getCodecCount() {
ensureMediaCodecInfosInitialized();
return mediaCodecInfos.length;
}
@Override
public MediaCodecInfo getCodecInfoAt(int index) {
ensureMediaCodecInfosInitialized();
return mediaCodecInfos[index];
}
@ -275,6 +310,12 @@ public class MediaCodecUtil {
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
}
private void ensureMediaCodecInfosInitialized() {
if (mediaCodecInfos == null) {
mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos();
}
}
}
@SuppressWarnings("deprecation")

View File

@ -109,14 +109,14 @@ public final class AudioTrack {
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 10 * C.MICROS_PER_SECOND;
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_LATENCY_US = 10 * C.MICROS_PER_SECOND;
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
/** Value for ac3Bitrate before the bitrate has been calculated. */
private static final int UNKNOWN_AC3_BITRATE = 0;
@ -130,10 +130,10 @@ public final class AudioTrack {
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
private final ConditionVariable releasingConditionVariable;
private final AudioTimestampCompat audioTimestampCompat;
private final long[] playheadOffsets;
private android.media.AudioTrack audioTrack;
private AudioTrackUtil audioTrackUtil;
private int sampleRate;
private int channelConfig;
private int encoding;
@ -147,8 +147,6 @@ public final class AudioTrack {
private long lastPlayheadSampleTimeUs;
private boolean audioTimestampSet;
private long lastTimestampSampleTimeUs;
private long lastRawPlaybackHeadPosition;
private long rawPlaybackHeadWrapCount;
private Method getLatencyMethod;
private long submittedBytes;
@ -169,11 +167,6 @@ public final class AudioTrack {
public AudioTrack() {
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 19) {
audioTimestampCompat = new AudioTimestampCompatV19();
} else {
audioTimestampCompat = new NoopAudioTimestampCompat();
}
if (Util.SDK_INT >= 18) {
try {
getLatencyMethod =
@ -219,15 +212,15 @@ public final class AudioTrack {
long currentPositionUs;
if (audioTimestampSet) {
// How long ago in the past the audio timestamp is (negative if it's in the future).
long presentationDiff = systemClockUs - (audioTimestampCompat.getNanoTime() / 1000);
long presentationDiff = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
long framesDiff = durationUsToFrames(presentationDiff);
// The position of the frame that's currently being presented.
long currentFramePosition = audioTimestampCompat.getFramePosition() + framesDiff;
long currentFramePosition = audioTrackUtil.getTimestampFramePosition() + framesDiff;
currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs = getPlaybackPositionUs() + startMediaTimeUs;
currentPositionUs = audioTrackUtil.getPlaybackHeadPositionUs() + startMediaTimeUs;
} else {
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
@ -274,7 +267,13 @@ public final class AudioTrack {
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
}
checkAudioTrackInitialized();
if (Util.SDK_INT >= 19) {
audioTrackUtil = new AudioTrackUtilV19(audioTrack);
} else {
audioTrackUtil = new AudioTrackUtil(audioTrack);
}
setVolume(volume);
return audioTrack.getAudioSessionId();
}
@ -440,7 +439,8 @@ public final class AudioTrack {
int bytesWritten = 0;
if (Util.SDK_INT < 21) {
// Work out how many bytes we can write without the risk of blocking.
int bytesPending = (int) (submittedBytes - (getPlaybackPositionFrames() * frameSize));
int bytesPending =
(int) (submittedBytes - (audioTrackUtil.getPlaybackHeadPosition() * frameSize));
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) {
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
@ -473,7 +473,8 @@ public final class AudioTrack {
/** Returns whether the audio track has more data pending that will be played back. */
public boolean hasPendingData() {
return isInitialized() && bytesToFrames(submittedBytes) > getPlaybackPositionFrames();
return isInitialized()
&& bytesToFrames(submittedBytes) > audioTrackUtil.getPlaybackHeadPosition();
}
/** Returns whether enough data has been supplied via {@link #handleBuffer} to begin playback. */
@ -520,8 +521,6 @@ public final class AudioTrack {
if (isInitialized()) {
submittedBytes = 0;
temporaryBufferSize = 0;
lastRawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
startMediaTimeUs = START_NOT_SET;
resetSyncParams();
int playState = audioTrack.getPlayState();
@ -531,6 +530,7 @@ public final class AudioTrack {
// AudioTrack.release can take some time, so we call it on a background thread.
final android.media.AudioTrack toRelease = audioTrack;
audioTrack = null;
audioTrackUtil = null;
releasingConditionVariable.close();
new Thread() {
@Override
@ -552,7 +552,7 @@ public final class AudioTrack {
/** Updates the audio track latency and playback position parameters. */
private void maybeSampleSyncParams() {
long playbackPositionUs = getPlaybackPositionUs();
long playbackPositionUs = audioTrackUtil.getPlaybackHeadPositionUs();
if (playbackPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
@ -573,18 +573,27 @@ public final class AudioTrack {
}
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
audioTimestampSet = audioTimestampCompat.update(audioTrack);
audioTimestampSet = audioTrackUtil.updateTimestamp();
if (audioTimestampSet) {
// Perform sanity checks on the timestamp.
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
long audioTimestampUs = audioTrackUtil.getTimestampNanoTime() / 1000;
long audioTimestampFramePosition = audioTrackUtil.getTimestampFramePosition();
if (audioTimestampUs < resumeSystemTimeUs) {
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet = false;
} else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
// The timestamp time base is probably wrong.
audioTimestampSet = false;
Log.w(TAG, "Spurious audio timestamp: " + audioTimestampCompat.getFramePosition() + ", "
+ audioTimestampUs + ", " + systemClockUs);
Log.w(TAG, "Spurious audio timestamp (system clock mismatch): "
+ audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
+ playbackPositionUs);
} else if (Math.abs(framesToDurationUs(audioTimestampFramePosition) - playbackPositionUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
// The timestamp frame position is probably wrong.
audioTimestampSet = false;
Log.w(TAG, "Spurious audio timestamp (frame position mismatch): "
+ audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
+ playbackPositionUs);
}
}
if (getLatencyMethod != null) {
@ -634,29 +643,6 @@ public final class AudioTrack {
throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
}
/**
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
* expressed as a long.
*/
private long getPlaybackPositionFrames() {
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
// The value must have wrapped around.
rawPlaybackHeadWrapCount++;
}
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
private long getPlaybackPositionUs() {
return framesToDurationUs(getPlaybackPositionFrames());
}
private long bytesToFrames(long byteCount) {
if (isAc3) {
return
@ -684,72 +670,126 @@ public final class AudioTrack {
}
/**
* Interface exposing the {@link android.media.AudioTimestamp} methods we need that were added in
* SDK 19.
* Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
*/
private interface AudioTimestampCompat {
private static class AudioTrackUtil {
protected final android.media.AudioTrack audioTrack;
private final int sampleRate;
private long lastRawPlaybackHeadPosition;
private long rawPlaybackHeadWrapCount;
public AudioTrackUtil(android.media.AudioTrack audioTrack) {
this.audioTrack = audioTrack;
this.sampleRate = audioTrack.getSampleRate();
}
/**
* Returns true if the audioTimestamp was retrieved from the audioTrack.
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
* expressed as a long.
*/
boolean update(android.media.AudioTrack audioTrack);
public long getPlaybackHeadPosition() {
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
// The value must have wrapped around.
rawPlaybackHeadWrapCount++;
}
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
long getNanoTime();
/**
* Returns {@link #getPlaybackHeadPosition()} expressed as microseconds.
*/
public long getPlaybackHeadPositionUs() {
return (getPlaybackHeadPosition() * C.MICROS_PER_SECOND) / sampleRate;
}
long getFramePosition();
}
/**
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
*/
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
@Override
public boolean update(android.media.AudioTrack audioTrack) {
/**
* Updates the values returned by {@link #getTimestampNanoTime()} and
* {@link #getTimestampFramePosition()}.
*
* @return True if the timestamp values were updated. False otherwise.
*/
public boolean updateTimestamp() {
return false;
}
@Override
public long getNanoTime() {
// Should never be called if initTimestamp() returned false.
/**
* Returns the {@link android.media.AudioTimestamp#nanoTime} obtained during the most recent
* call to {@link #updateTimestamp()} that returned true.
*
* @return The nanoTime obtained during the most recent call to {@link #updateTimestamp()} that
* returned true.
* @throws UnsupportedOperationException If the implementation does not support audio timestamp
* queries. {@link #updateTimestamp()} will always return false in this case.
*/
public long getTimestampNanoTime() {
// Should never be called if updateTimestamp() returned false.
throw new UnsupportedOperationException();
}
@Override
public long getFramePosition() {
// Should never be called if initTimestamp() returned false.
/**
* Returns the {@link android.media.AudioTimestamp#framePosition} obtained during the most
* recent call to {@link #updateTimestamp()} that returned true. The value is adjusted so that
* wrap around only occurs if the value exceeds {@link Long#MAX_VALUE} (which in practice will
* never happen).
*
* @return The framePosition obtained during the most recent call to {@link #updateTimestamp()}
* that returned true.
* @throws UnsupportedOperationException If the implementation does not support audio timestamp
* queries. {@link #updateTimestamp()} will always return false in this case.
*/
public long getTimestampFramePosition() {
// Should never be called if updateTimestamp() returned false.
throw new UnsupportedOperationException();
}
}
/**
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
* implementations added in SDK 19.
*/
@TargetApi(19)
private static final class AudioTimestampCompatV19 implements AudioTimestampCompat {
private static class AudioTrackUtilV19 extends AudioTrackUtil {
private final AudioTimestamp audioTimestamp;
public AudioTimestampCompatV19() {
private long rawTimestampFramePositionWrapCount;
private long lastRawTimestampFramePosition;
private long lastTimestampFramePosition;
public AudioTrackUtilV19(android.media.AudioTrack audioTrack) {
super(audioTrack);
audioTimestamp = new AudioTimestamp();
}
@Override
public boolean update(android.media.AudioTrack audioTrack) {
return audioTrack.getTimestamp(audioTimestamp);
public boolean updateTimestamp() {
boolean updated = audioTrack.getTimestamp(audioTimestamp);
if (updated) {
long rawFramePosition = audioTimestamp.framePosition;
if (lastRawTimestampFramePosition > rawFramePosition) {
// The value must have wrapped around.
rawTimestampFramePositionWrapCount++;
}
lastRawTimestampFramePosition = rawFramePosition;
lastTimestampFramePosition = rawFramePosition + (rawTimestampFramePositionWrapCount << 32);
}
return updated;
}
@Override
public long getNanoTime() {
public long getTimestampNanoTime() {
return audioTimestamp.nanoTime;
}
@Override
public long getFramePosition() {
return audioTimestamp.framePosition;
public long getTimestampFramePosition() {
return lastTimestampFramePosition;
}
}

View File

@ -24,6 +24,7 @@ import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.TraceUtil;
import java.io.IOException;
@ -173,7 +174,12 @@ public abstract class Chunk implements Loadable {
@Override
public final void load() throws IOException, InterruptedException {
dataSourceStream.load();
TraceUtil.beginSection("chunkLoad");
try {
dataSourceStream.load();
} finally {
TraceUtil.endSection();
}
}
}

View File

@ -137,7 +137,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
/**
* The default minimum number of times to retry loading data prior to failing.
*/
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 1;
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 3;
private static final int STATE_UNPREPARED = 0;
private static final int STATE_PREPARED = 1;

View File

@ -489,8 +489,8 @@ public final class WebmExtractor implements Extractor {
} else if (CODEC_ID_OPUS.equals(codecId)) {
ArrayList<byte[]> opusInitializationData = new ArrayList<byte[]>(3);
opusInitializationData.add(codecPrivate);
opusInitializationData.add(ByteBuffer.allocate(8).putLong(codecDelayNs).array());
opusInitializationData.add(ByteBuffer.allocate(8).putLong(seekPreRollNs).array());
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(codecDelayNs).array());
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(seekPreRollNs).array());
format = MediaFormat.createAudioFormat(
MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE, channelCount, sampleRate,
opusInitializationData);

View File

@ -393,18 +393,22 @@ public class HttpDataSource implements DataSource {
connection.setRequestProperty(property.getKey(), property.getValue());
}
}
setRangeHeader(connection, dataSpec);
connection.setRequestProperty("User-Agent", userAgent);
connection.setRequestProperty("Range", buildRangeHeader(dataSpec));
connection.connect();
return connection;
}
private String buildRangeHeader(DataSpec dataSpec) {
private void setRangeHeader(HttpURLConnection connection, DataSpec dataSpec) {
if (dataSpec.position == 0 && dataSpec.length == C.LENGTH_UNBOUNDED) {
// Not required.
return;
}
String rangeRequest = "bytes=" + dataSpec.position + "-";
if (dataSpec.length != C.LENGTH_UNBOUNDED) {
rangeRequest += (dataSpec.position + dataSpec.length - 1);
}
return rangeRequest;
connection.setRequestProperty("Range", rangeRequest);
}
private long getContentLength(HttpURLConnection connection) {