Merge pull request #130 from google/dev-l
Merge dev-l into dev, now L SDK is released.
This commit is contained in:
commit
441d7f4e17
@ -19,7 +19,7 @@ android {
|
|||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion 16
|
minSdkVersion 16
|
||||||
targetSdkVersion 19
|
targetSdkVersion 21
|
||||||
}
|
}
|
||||||
buildTypes {
|
buildTypes {
|
||||||
release {
|
release {
|
||||||
|
@ -25,7 +25,7 @@
|
|||||||
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>
|
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>
|
||||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
|
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
|
||||||
|
|
||||||
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="19"/>
|
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21"/>
|
||||||
|
|
||||||
<application
|
<application
|
||||||
android:label="@string/application_name"
|
android:label="@string/application_name"
|
||||||
|
@ -8,6 +8,6 @@
|
|||||||
# project structure.
|
# project structure.
|
||||||
|
|
||||||
# Project target.
|
# Project target.
|
||||||
target=android-19
|
target=android-21
|
||||||
android.library=false
|
android.library=false
|
||||||
android.library.reference.1=../../../library/src/main
|
android.library.reference.1=../../../library/src/main
|
||||||
|
@ -19,7 +19,7 @@ android {
|
|||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion 9
|
minSdkVersion 9
|
||||||
targetSdkVersion 19
|
targetSdkVersion 21
|
||||||
}
|
}
|
||||||
|
|
||||||
buildTypes {
|
buildTypes {
|
||||||
|
@ -27,6 +27,6 @@
|
|||||||
the library may be of use on older devices. However, please note that the core video playback
|
the library may be of use on older devices. However, please note that the core video playback
|
||||||
functionality provided by the library requires API level 16 or greater.
|
functionality provided by the library requires API level 16 or greater.
|
||||||
-->
|
-->
|
||||||
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="19"/>
|
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="21"/>
|
||||||
|
|
||||||
</manifest>
|
</manifest>
|
||||||
|
@ -336,7 +336,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM, audioSessionId);
|
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM, audioSessionId);
|
||||||
checkAudioTrackInitialized();
|
checkAudioTrackInitialized();
|
||||||
}
|
}
|
||||||
audioTrack.setStereoVolume(volume, volume);
|
setVolume(volume);
|
||||||
if (getState() == TrackRenderer.STATE_STARTED) {
|
if (getState() == TrackRenderer.STATE_STARTED) {
|
||||||
audioTrackResumeSystemTimeUs = System.nanoTime() / 1000;
|
audioTrackResumeSystemTimeUs = System.nanoTime() / 1000;
|
||||||
audioTrack.play();
|
audioTrack.play();
|
||||||
@ -519,7 +519,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
|
||||||
audioTimestampSet = audioTimestampCompat.initTimestamp(audioTrack);
|
audioTimestampSet = audioTimestampCompat.update(audioTrack);
|
||||||
if (audioTimestampSet) {
|
if (audioTimestampSet) {
|
||||||
// Perform sanity checks on the timestamp.
|
// Perform sanity checks on the timestamp.
|
||||||
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
|
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
|
||||||
@ -637,42 +637,52 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy {@code buffer} into {@code temporaryBuffer}.
|
|
||||||
// TODO: Bypass this copy step on versions of Android where [redacted] is implemented.
|
|
||||||
if (temporaryBuffer == null || temporaryBuffer.length < bufferInfo.size) {
|
|
||||||
temporaryBuffer = new byte[bufferInfo.size];
|
|
||||||
}
|
|
||||||
buffer.position(bufferInfo.offset);
|
|
||||||
buffer.get(temporaryBuffer, 0, bufferInfo.size);
|
|
||||||
temporaryBufferOffset = 0;
|
|
||||||
temporaryBufferSize = bufferInfo.size;
|
temporaryBufferSize = bufferInfo.size;
|
||||||
|
buffer.position(bufferInfo.offset);
|
||||||
|
if (Util.SDK_INT < 21) {
|
||||||
|
// Copy {@code buffer} into {@code temporaryBuffer}.
|
||||||
|
if (temporaryBuffer == null || temporaryBuffer.length < bufferInfo.size) {
|
||||||
|
temporaryBuffer = new byte[bufferInfo.size];
|
||||||
|
}
|
||||||
|
buffer.get(temporaryBuffer, 0, bufferInfo.size);
|
||||||
|
temporaryBufferOffset = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioTrack == null) {
|
if (audioTrack == null) {
|
||||||
initAudioTrack();
|
initAudioTrack();
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Don't bother doing this once [redacted] is fixed.
|
int bytesWritten = 0;
|
||||||
// Work out how many bytes we can write without the risk of blocking.
|
if (Util.SDK_INT < 21) {
|
||||||
int bytesPending = (int) (submittedBytes - getPlaybackHeadPosition() * frameSize);
|
// Work out how many bytes we can write without the risk of blocking.
|
||||||
int bytesToWrite = bufferSize - bytesPending;
|
int bytesPending = (int) (submittedBytes - getPlaybackHeadPosition() * frameSize);
|
||||||
|
int bytesToWrite = bufferSize - bytesPending;
|
||||||
if (bytesToWrite > 0) {
|
if (bytesToWrite > 0) {
|
||||||
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
|
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
|
||||||
audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
|
bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
|
||||||
temporaryBufferOffset += bytesToWrite;
|
temporaryBufferOffset += bytesWritten;
|
||||||
temporaryBufferSize -= bytesToWrite;
|
|
||||||
submittedBytes += bytesToWrite;
|
|
||||||
if (temporaryBufferSize == 0) {
|
|
||||||
codec.releaseOutputBuffer(bufferIndex, false);
|
|
||||||
codecCounters.renderedOutputBufferCount++;
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
bytesWritten = writeNonBlockingV21(audioTrack, buffer, temporaryBufferSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
temporaryBufferSize -= bytesWritten;
|
||||||
|
submittedBytes += bytesWritten;
|
||||||
|
if (temporaryBufferSize == 0) {
|
||||||
|
codec.releaseOutputBuffer(bufferIndex, false);
|
||||||
|
codecCounters.renderedOutputBufferCount++;
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@TargetApi(21)
|
||||||
|
private int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) {
|
||||||
|
return audioTrack.write(buffer, size, AudioTrack.WRITE_NON_BLOCKING);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as
|
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as
|
||||||
* an unsigned 32 bit integer, which also wraps around periodically. This method returns the
|
* an unsigned 32 bit integer, which also wraps around periodically. This method returns the
|
||||||
@ -709,10 +719,24 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
private void setVolume(float volume) {
|
private void setVolume(float volume) {
|
||||||
this.volume = volume;
|
this.volume = volume;
|
||||||
if (audioTrack != null) {
|
if (audioTrack != null) {
|
||||||
audioTrack.setStereoVolume(volume, volume);
|
if (Util.SDK_INT >= 21) {
|
||||||
|
setVolumeV21(audioTrack, volume);
|
||||||
|
} else {
|
||||||
|
setVolumeV3(audioTrack, volume);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@TargetApi(21)
|
||||||
|
private static void setVolumeV21(AudioTrack audioTrack, float volume) {
|
||||||
|
audioTrack.setVolume(volume);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
|
private static void setVolumeV3(AudioTrack audioTrack, float volume) {
|
||||||
|
audioTrack.setStereoVolume(volume, volume);
|
||||||
|
}
|
||||||
|
|
||||||
private void notifyAudioTrackInitializationError(final AudioTrackInitializationException e) {
|
private void notifyAudioTrackInitializationError(final AudioTrackInitializationException e) {
|
||||||
if (eventHandler != null && eventListener != null) {
|
if (eventHandler != null && eventListener != null) {
|
||||||
eventHandler.post(new Runnable() {
|
eventHandler.post(new Runnable() {
|
||||||
@ -732,7 +756,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
/**
|
/**
|
||||||
* Returns true if the audioTimestamp was retrieved from the audioTrack.
|
* Returns true if the audioTimestamp was retrieved from the audioTrack.
|
||||||
*/
|
*/
|
||||||
boolean initTimestamp(AudioTrack audioTrack);
|
boolean update(AudioTrack audioTrack);
|
||||||
|
|
||||||
long getNanoTime();
|
long getNanoTime();
|
||||||
|
|
||||||
@ -746,7 +770,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
|
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean initTimestamp(AudioTrack audioTrack) {
|
public boolean update(AudioTrack audioTrack) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -778,7 +802,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean initTimestamp(AudioTrack audioTrack) {
|
public boolean update(AudioTrack audioTrack) {
|
||||||
return audioTrack.getTimestamp(audioTimestamp);
|
return audioTrack.getTimestamp(audioTimestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ package com.google.android.exoplayer;
|
|||||||
import com.google.android.exoplayer.drm.DrmSessionManager;
|
import com.google.android.exoplayer.drm.DrmSessionManager;
|
||||||
import com.google.android.exoplayer.util.MimeTypes;
|
import com.google.android.exoplayer.util.MimeTypes;
|
||||||
import com.google.android.exoplayer.util.TraceUtil;
|
import com.google.android.exoplayer.util.TraceUtil;
|
||||||
|
import com.google.android.exoplayer.util.Util;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
import android.annotation.TargetApi;
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
@ -93,7 +94,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
private final int maxDroppedFrameCountToNotify;
|
private final int maxDroppedFrameCountToNotify;
|
||||||
|
|
||||||
private Surface surface;
|
private Surface surface;
|
||||||
private boolean drawnToSurface;
|
private boolean reportedDrawnToSurface;
|
||||||
private boolean renderedFirstFrame;
|
private boolean renderedFirstFrame;
|
||||||
private long joiningDeadlineUs;
|
private long joiningDeadlineUs;
|
||||||
private long droppedFrameAccumulationStartTimeMs;
|
private long droppedFrameAccumulationStartTimeMs;
|
||||||
@ -270,7 +271,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
@Override
|
@Override
|
||||||
protected void onStopped() {
|
protected void onStopped() {
|
||||||
joiningDeadlineUs = -1;
|
joiningDeadlineUs = -1;
|
||||||
notifyAndResetDroppedFrameCount();
|
maybeNotifyDroppedFrameCount();
|
||||||
super.onStopped();
|
super.onStopped();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,7 +304,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.surface = surface;
|
this.surface = surface;
|
||||||
this.drawnToSurface = false;
|
this.reportedDrawnToSurface = false;
|
||||||
int state = getState();
|
int state = getState();
|
||||||
if (state == TrackRenderer.STATE_ENABLED || state == TrackRenderer.STATE_STARTED) {
|
if (state == TrackRenderer.STATE_ENABLED || state == TrackRenderer.STATE_STARTED) {
|
||||||
releaseCodec();
|
releaseCodec();
|
||||||
@ -370,24 +371,37 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!renderedFirstFrame) {
|
if (!renderedFirstFrame) {
|
||||||
renderOutputBuffer(codec, bufferIndex);
|
renderOutputBufferImmediate(codec, bufferIndex);
|
||||||
renderedFirstFrame = true;
|
renderedFirstFrame = true;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (getState() == TrackRenderer.STATE_STARTED && earlyUs < 30000) {
|
if (getState() != TrackRenderer.STATE_STARTED) {
|
||||||
if (earlyUs > 11000) {
|
return false;
|
||||||
// We're a little too early to render the frame. Sleep until the frame can be rendered.
|
}
|
||||||
// Note: The 11ms threshold was chosen fairly arbitrarily.
|
|
||||||
try {
|
if (Util.SDK_INT >= 21) {
|
||||||
// Subtracting 10000 rather than 11000 ensures that the sleep time will be at least 1ms.
|
// Let the underlying framework time the release.
|
||||||
Thread.sleep((earlyUs - 10000) / 1000);
|
if (earlyUs < 50000) {
|
||||||
} catch (InterruptedException e) {
|
renderOutputBufferTimedV21(codec, bufferIndex, System.nanoTime() + (earlyUs * 1000L));
|
||||||
Thread.currentThread().interrupt();
|
return true;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// We need to time the release ourselves.
|
||||||
|
if (earlyUs < 30000) {
|
||||||
|
if (earlyUs > 11000) {
|
||||||
|
// We're a little too early to render the frame. Sleep until the frame can be rendered.
|
||||||
|
// Note: The 11ms threshold was chosen fairly arbitrarily.
|
||||||
|
try {
|
||||||
|
// Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms.
|
||||||
|
Thread.sleep((earlyUs - 10000) / 1000);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
renderOutputBufferImmediate(codec, bufferIndex);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
renderOutputBuffer(codec, bufferIndex);
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We're either not playing, or it's not time to render the frame yet.
|
// We're either not playing, or it's not time to render the frame yet.
|
||||||
@ -408,65 +422,84 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
|||||||
codecCounters.droppedOutputBufferCount++;
|
codecCounters.droppedOutputBufferCount++;
|
||||||
droppedFrameCount++;
|
droppedFrameCount++;
|
||||||
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
|
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
|
||||||
notifyAndResetDroppedFrameCount();
|
maybeNotifyDroppedFrameCount();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
|
private void renderOutputBufferImmediate(MediaCodec codec, int bufferIndex) {
|
||||||
if (lastReportedWidth != currentWidth || lastReportedHeight != currentHeight
|
maybeNotifyVideoSizeChanged();
|
||||||
|| lastReportedPixelWidthHeightRatio != currentPixelWidthHeightRatio) {
|
TraceUtil.beginSection("renderVideoBufferImmediate");
|
||||||
lastReportedWidth = currentWidth;
|
|
||||||
lastReportedHeight = currentHeight;
|
|
||||||
lastReportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
|
|
||||||
notifyVideoSizeChanged(currentWidth, currentHeight, currentPixelWidthHeightRatio);
|
|
||||||
}
|
|
||||||
TraceUtil.beginSection("renderVideoBuffer");
|
|
||||||
codec.releaseOutputBuffer(bufferIndex, true);
|
codec.releaseOutputBuffer(bufferIndex, true);
|
||||||
TraceUtil.endSection();
|
TraceUtil.endSection();
|
||||||
codecCounters.renderedOutputBufferCount++;
|
codecCounters.renderedOutputBufferCount++;
|
||||||
if (!drawnToSurface) {
|
maybeNotifyDrawnToSurface();
|
||||||
drawnToSurface = true;
|
|
||||||
notifyDrawnToSurface(surface);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyVideoSizeChanged(final int width, final int height,
|
@TargetApi(21)
|
||||||
final float pixelWidthHeightRatio) {
|
private void renderOutputBufferTimedV21(MediaCodec codec, int bufferIndex, long nanoTime) {
|
||||||
if (eventHandler != null && eventListener != null) {
|
maybeNotifyVideoSizeChanged();
|
||||||
eventHandler.post(new Runnable() {
|
TraceUtil.beginSection("releaseOutputBufferTimed");
|
||||||
@Override
|
codec.releaseOutputBuffer(bufferIndex, nanoTime);
|
||||||
public void run() {
|
TraceUtil.endSection();
|
||||||
eventListener.onVideoSizeChanged(width, height, pixelWidthHeightRatio);
|
codecCounters.renderedOutputBufferCount++;
|
||||||
}
|
maybeNotifyDrawnToSurface();
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyDrawnToSurface(final Surface surface) {
|
private void maybeNotifyVideoSizeChanged() {
|
||||||
if (eventHandler != null && eventListener != null) {
|
if (eventHandler == null || eventListener == null
|
||||||
eventHandler.post(new Runnable() {
|
|| (lastReportedWidth == currentWidth && lastReportedHeight == currentHeight
|
||||||
@Override
|
&& lastReportedPixelWidthHeightRatio == currentPixelWidthHeightRatio)) {
|
||||||
public void run() {
|
return;
|
||||||
eventListener.onDrawnToSurface(surface);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
// Make final copies to ensure the runnable reports the correct values.
|
||||||
|
final int currentWidth = this.currentWidth;
|
||||||
|
final int currentHeight = this.currentHeight;
|
||||||
|
final float currentPixelWidthHeightRatio = this.currentPixelWidthHeightRatio;
|
||||||
|
eventHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
eventListener.onVideoSizeChanged(currentWidth, currentHeight, currentPixelWidthHeightRatio);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Update the last reported values.
|
||||||
|
lastReportedWidth = currentWidth;
|
||||||
|
lastReportedHeight = currentHeight;
|
||||||
|
lastReportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyAndResetDroppedFrameCount() {
|
private void maybeNotifyDrawnToSurface() {
|
||||||
if (eventHandler != null && eventListener != null && droppedFrameCount > 0) {
|
if (eventHandler == null || eventListener == null || reportedDrawnToSurface) {
|
||||||
long now = SystemClock.elapsedRealtime();
|
return;
|
||||||
final int countToNotify = droppedFrameCount;
|
|
||||||
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
|
|
||||||
droppedFrameCount = 0;
|
|
||||||
droppedFrameAccumulationStartTimeMs = now;
|
|
||||||
eventHandler.post(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
// Make a final copy to ensure the runnable reports the correct surface.
|
||||||
|
final Surface surface = this.surface;
|
||||||
|
eventHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
eventListener.onDrawnToSurface(surface);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Record that we have reported that the surface has been drawn to.
|
||||||
|
reportedDrawnToSurface = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void maybeNotifyDroppedFrameCount() {
|
||||||
|
if (eventHandler == null || eventListener == null || droppedFrameCount == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
long now = SystemClock.elapsedRealtime();
|
||||||
|
// Make final copies to ensure the runnable reports the correct values.
|
||||||
|
final int countToNotify = droppedFrameCount;
|
||||||
|
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
|
||||||
|
eventHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Reset the dropped frame tracking.
|
||||||
|
droppedFrameCount = 0;
|
||||||
|
droppedFrameAccumulationStartTimeMs = now;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -104,14 +104,19 @@ public final class CaptionStyleCompat {
|
|||||||
/**
|
/**
|
||||||
* Creates a {@link CaptionStyleCompat} equivalent to a provided {@link CaptionStyle}.
|
* Creates a {@link CaptionStyleCompat} equivalent to a provided {@link CaptionStyle}.
|
||||||
*
|
*
|
||||||
* @param style A {@link CaptionStyle}.
|
* @param captionStyle A {@link CaptionStyle}.
|
||||||
* @return The equivalent {@link CaptionStyleCompat}.
|
* @return The equivalent {@link CaptionStyleCompat}.
|
||||||
*/
|
*/
|
||||||
@TargetApi(19)
|
@TargetApi(19)
|
||||||
public static CaptionStyleCompat createFromCaptionStyle(CaptionStyle style) {
|
public static CaptionStyleCompat createFromCaptionStyle(
|
||||||
int windowColor = Util.SDK_INT >= 21 ? getWindowColorV21(style) : Color.TRANSPARENT;
|
CaptioningManager.CaptionStyle captionStyle) {
|
||||||
return new CaptionStyleCompat(style.foregroundColor, style.backgroundColor, windowColor,
|
if (Util.SDK_INT >= 21) {
|
||||||
style.edgeType, style.edgeColor, style.getTypeface());
|
return createFromCaptionStyleV21(captionStyle);
|
||||||
|
} else {
|
||||||
|
// Note - Any caller must be on at least API level 19 of greater (because CaptionStyle did
|
||||||
|
// not exist in earlier API levels).
|
||||||
|
return createFromCaptionStyleV19(captionStyle);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -132,11 +137,24 @@ public final class CaptionStyleCompat {
|
|||||||
this.typeface = typeface;
|
this.typeface = typeface;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@TargetApi(19)
|
||||||
|
private static CaptionStyleCompat createFromCaptionStyleV19(
|
||||||
|
CaptioningManager.CaptionStyle captionStyle) {
|
||||||
|
return new CaptionStyleCompat(
|
||||||
|
captionStyle.foregroundColor, captionStyle.backgroundColor, Color.TRANSPARENT,
|
||||||
|
captionStyle.edgeType, captionStyle.edgeColor, captionStyle.getTypeface());
|
||||||
|
}
|
||||||
|
|
||||||
@TargetApi(21)
|
@TargetApi(21)
|
||||||
private static int getWindowColorV21(CaptioningManager.CaptionStyle captionStyle) {
|
private static CaptionStyleCompat createFromCaptionStyleV21(
|
||||||
// TODO: Uncomment when building against API level 21.
|
CaptioningManager.CaptionStyle captionStyle) {
|
||||||
return Color.TRANSPARENT; //captionStyle.windowColor;
|
return new CaptionStyleCompat(
|
||||||
|
captionStyle.hasForegroundColor() ? captionStyle.foregroundColor : DEFAULT.foregroundColor,
|
||||||
|
captionStyle.hasBackgroundColor() ? captionStyle.backgroundColor : DEFAULT.backgroundColor,
|
||||||
|
captionStyle.hasWindowColor() ? captionStyle.windowColor : DEFAULT.windowColor,
|
||||||
|
captionStyle.hasEdgeType() ? captionStyle.edgeType : DEFAULT.edgeType,
|
||||||
|
captionStyle.hasEdgeColor() ? captionStyle.edgeColor : DEFAULT.edgeColor,
|
||||||
|
captionStyle.getTypeface());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
# project structure.
|
# project structure.
|
||||||
|
|
||||||
# Project target.
|
# Project target.
|
||||||
target=android-19
|
target=android-21
|
||||||
android.library=true
|
android.library=true
|
||||||
|
Loading…
x
Reference in New Issue
Block a user