API level 21 enhancements for ExoPlayer playbacks.

- Use native frame release timing in video renderer for
  smoother video playback.
- Avoid unnecessary memory copy steps in audio renderer.
- Use non-blocking AudioTrack API.
This commit is contained in:
ojw28 2014-09-25 20:29:44 +01:00
parent dd30632aa1
commit 9cfe5fcf44
8 changed files with 133 additions and 93 deletions

View File

@ -19,7 +19,7 @@ android {
defaultConfig { defaultConfig {
minSdkVersion 16 minSdkVersion 16
targetSdkVersion 19 targetSdkVersion 21
} }
buildTypes { buildTypes {
release { release {

View File

@ -25,7 +25,7 @@
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/> <uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="19"/> <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21"/>
<application <application
android:label="@string/application_name" android:label="@string/application_name"

View File

@ -8,6 +8,6 @@
# project structure. # project structure.
# Project target. # Project target.
target=android-19 target=android-21
android.library=false android.library=false
android.library.reference.1=../../../library/src/main android.library.reference.1=../../../library/src/main

View File

@ -19,7 +19,7 @@ android {
defaultConfig { defaultConfig {
minSdkVersion 9 minSdkVersion 9
targetSdkVersion 19 targetSdkVersion 21
} }
buildTypes { buildTypes {

View File

@ -27,6 +27,6 @@
the library may be of use on older devices. However, please note that the core video playback the library may be of use on older devices. However, please note that the core video playback
functionality provided by the library requires API level 16 or greater. functionality provided by the library requires API level 16 or greater.
--> -->
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="19"/> <uses-sdk android:minSdkVersion="9" android:targetSdkVersion="21"/>
</manifest> </manifest>

View File

@ -563,12 +563,9 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
@Override @Override
protected void onDisabled() { protected void onDisabled() {
super.onDisabled();
releaseAudioTrack();
audioSessionId = 0; audioSessionId = 0;
try {
releaseAudioTrack();
} finally {
super.onDisabled();
}
} }
@Override @Override
@ -620,42 +617,52 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
} }
} }
// Copy {@code buffer} into {@code temporaryBuffer}.
// TODO: Bypass this copy step on versions of Android where [redacted] is implemented.
if (temporaryBuffer == null || temporaryBuffer.length < bufferInfo.size) {
temporaryBuffer = new byte[bufferInfo.size];
}
buffer.position(bufferInfo.offset);
buffer.get(temporaryBuffer, 0, bufferInfo.size);
temporaryBufferOffset = 0;
temporaryBufferSize = bufferInfo.size; temporaryBufferSize = bufferInfo.size;
buffer.position(bufferInfo.offset);
if (Util.SDK_INT < 21) {
// Copy {@code buffer} into {@code temporaryBuffer}.
if (temporaryBuffer == null || temporaryBuffer.length < bufferInfo.size) {
temporaryBuffer = new byte[bufferInfo.size];
}
buffer.get(temporaryBuffer, 0, bufferInfo.size);
temporaryBufferOffset = 0;
}
} }
if (audioTrack == null) { if (audioTrack == null) {
initAudioTrack(); initAudioTrack();
} }
// TODO: Don't bother doing this once [redacted] is fixed. int bytesWritten = 0;
// Work out how many bytes we can write without the risk of blocking. if (Util.SDK_INT < 21) {
int bytesPending = (int) (submittedBytes - getPlaybackHeadPosition() * frameSize); // Work out how many bytes we can write without the risk of blocking.
int bytesToWrite = bufferSize - bytesPending; int bytesPending = (int) (submittedBytes - getPlaybackHeadPosition() * frameSize);
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) { if (bytesToWrite > 0) {
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite); bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite); bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
temporaryBufferOffset += bytesToWrite; temporaryBufferOffset += bytesWritten;
temporaryBufferSize -= bytesToWrite;
submittedBytes += bytesToWrite;
if (temporaryBufferSize == 0) {
codec.releaseOutputBuffer(bufferIndex, false);
codecCounters.renderedOutputBufferCount++;
return true;
} }
} else {
bytesWritten = writeNonBlockingV21(audioTrack, buffer, temporaryBufferSize);
}
temporaryBufferSize -= bytesWritten;
submittedBytes += bytesWritten;
if (temporaryBufferSize == 0) {
codec.releaseOutputBuffer(bufferIndex, false);
codecCounters.renderedOutputBufferCount++;
return true;
} }
return false; return false;
} }
@TargetApi(21)
private int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) {
return audioTrack.write(buffer, size, AudioTrack.WRITE_NON_BLOCKING);
}
/** /**
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as * {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as
* an unsigned 32 bit integer, which also wraps around periodically. This method returns the * an unsigned 32 bit integer, which also wraps around periodically. This method returns the

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer;
import com.google.android.exoplayer.drm.DrmSessionManager; import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.MimeTypes; import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.TraceUtil; import com.google.android.exoplayer.util.TraceUtil;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.media.MediaCodec; import android.media.MediaCodec;
@ -93,7 +94,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
private final int maxDroppedFrameCountToNotify; private final int maxDroppedFrameCountToNotify;
private Surface surface; private Surface surface;
private boolean drawnToSurface; private boolean reportedDrawnToSurface;
private boolean renderedFirstFrame; private boolean renderedFirstFrame;
private long joiningDeadlineUs; private long joiningDeadlineUs;
private long droppedFrameAccumulationStartTimeMs; private long droppedFrameAccumulationStartTimeMs;
@ -270,7 +271,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
@Override @Override
protected void onStopped() { protected void onStopped() {
joiningDeadlineUs = -1; joiningDeadlineUs = -1;
notifyAndResetDroppedFrameCount(); maybeNotifyDroppedFrameCount();
super.onStopped(); super.onStopped();
} }
@ -303,7 +304,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
return; return;
} }
this.surface = surface; this.surface = surface;
this.drawnToSurface = false; this.reportedDrawnToSurface = false;
int state = getState(); int state = getState();
if (state == TrackRenderer.STATE_ENABLED || state == TrackRenderer.STATE_STARTED) { if (state == TrackRenderer.STATE_ENABLED || state == TrackRenderer.STATE_STARTED) {
releaseCodec(); releaseCodec();
@ -369,24 +370,37 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
} }
if (!renderedFirstFrame) { if (!renderedFirstFrame) {
renderOutputBuffer(codec, bufferIndex); renderOutputBufferImmediate(codec, bufferIndex);
renderedFirstFrame = true; renderedFirstFrame = true;
return true; return true;
} }
if (getState() == TrackRenderer.STATE_STARTED && earlyUs < 30000) { if (getState() == TrackRenderer.STATE_STARTED && earlyUs < 30000) {
if (earlyUs > 11000) { if (Util.SDK_INT >= 21) {
// We're a little too early to render the frame. Sleep until the frame can be rendered. // Let the underlying framework time the release.
// Note: The 11ms threshold was chosen fairly arbitrarily. if (earlyUs < 50000) {
try { renderOutputBufferTimedV21(codec, bufferIndex, System.nanoTime() + (earlyUs * 1000L));
// Subtracting 10000 rather than 11000 ensures that the sleep time will be at least 1ms. return true;
Thread.sleep((earlyUs - 10000) / 1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} }
return false;
} else {
// We need to time the release ourselves.
if (earlyUs < 30000) {
if (earlyUs > 11000) {
// We're a little too early to render the frame. Sleep until the frame can be rendered.
// Note: The 11ms threshold was chosen fairly arbitrarily.
try {
// Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms.
Thread.sleep((earlyUs - 10000) / 1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
renderOutputBufferImmediate(codec, bufferIndex);
return true;
}
return false;
} }
renderOutputBuffer(codec, bufferIndex);
return true;
} }
// We're either not playing, or it's not time to render the frame yet. // We're either not playing, or it's not time to render the frame yet.
@ -407,65 +421,84 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
codecCounters.droppedOutputBufferCount++; codecCounters.droppedOutputBufferCount++;
droppedFrameCount++; droppedFrameCount++;
if (droppedFrameCount == maxDroppedFrameCountToNotify) { if (droppedFrameCount == maxDroppedFrameCountToNotify) {
notifyAndResetDroppedFrameCount(); maybeNotifyDroppedFrameCount();
} }
} }
private void renderOutputBuffer(MediaCodec codec, int bufferIndex) { private void renderOutputBufferImmediate(MediaCodec codec, int bufferIndex) {
if (lastReportedWidth != currentWidth || lastReportedHeight != currentHeight maybeNotifyVideoSizeChanged();
|| lastReportedPixelWidthHeightRatio != currentPixelWidthHeightRatio) { TraceUtil.beginSection("renderVideoBufferImmediate");
lastReportedWidth = currentWidth;
lastReportedHeight = currentHeight;
lastReportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
notifyVideoSizeChanged(currentWidth, currentHeight, currentPixelWidthHeightRatio);
}
TraceUtil.beginSection("renderVideoBuffer");
codec.releaseOutputBuffer(bufferIndex, true); codec.releaseOutputBuffer(bufferIndex, true);
TraceUtil.endSection(); TraceUtil.endSection();
codecCounters.renderedOutputBufferCount++; codecCounters.renderedOutputBufferCount++;
if (!drawnToSurface) { maybeNotifyDrawnToSurface();
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
} }
private void notifyVideoSizeChanged(final int width, final int height, @TargetApi(21)
final float pixelWidthHeightRatio) { private void renderOutputBufferTimedV21(MediaCodec codec, int bufferIndex, long nanoTime) {
if (eventHandler != null && eventListener != null) { maybeNotifyVideoSizeChanged();
eventHandler.post(new Runnable() { TraceUtil.beginSection("releaseOutputBufferTimed");
@Override codec.releaseOutputBuffer(bufferIndex, nanoTime);
public void run() { TraceUtil.endSection();
eventListener.onVideoSizeChanged(width, height, pixelWidthHeightRatio); codecCounters.renderedOutputBufferCount++;
} maybeNotifyDrawnToSurface();
});
}
} }
private void notifyDrawnToSurface(final Surface surface) { private void maybeNotifyVideoSizeChanged() {
if (eventHandler != null && eventListener != null) { if (eventHandler == null || eventListener == null
eventHandler.post(new Runnable() { || (lastReportedWidth == currentWidth && lastReportedHeight == currentHeight
@Override && lastReportedPixelWidthHeightRatio == currentPixelWidthHeightRatio)) {
public void run() { return;
eventListener.onDrawnToSurface(surface);
}
});
} }
// Make final copies to ensure the runnable reports the correct values.
final int currentWidth = this.currentWidth;
final int currentHeight = this.currentHeight;
final float currentPixelWidthHeightRatio = this.currentPixelWidthHeightRatio;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onVideoSizeChanged(currentWidth, currentHeight, currentPixelWidthHeightRatio);
}
});
// Update the last reported values.
lastReportedWidth = currentWidth;
lastReportedHeight = currentHeight;
lastReportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
} }
private void notifyAndResetDroppedFrameCount() { private void maybeNotifyDrawnToSurface() {
if (eventHandler != null && eventListener != null && droppedFrameCount > 0) { if (eventHandler == null || eventListener == null || reportedDrawnToSurface) {
long now = SystemClock.elapsedRealtime(); return;
final int countToNotify = droppedFrameCount;
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = now;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
}
});
} }
// Make a final copy to ensure the runnable reports the correct surface.
final Surface surface = this.surface;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDrawnToSurface(surface);
}
});
// Record that we have reported that the surface has been drawn to.
reportedDrawnToSurface = true;
}
private void maybeNotifyDroppedFrameCount() {
if (eventHandler == null || eventListener == null || droppedFrameCount == 0) {
return;
}
long now = SystemClock.elapsedRealtime();
// Make final copies to ensure the runnable reports the correct values.
final int countToNotify = droppedFrameCount;
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
}
});
// Reset the dropped frame tracking.
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = now;
} }
} }

View File

@ -8,5 +8,5 @@
# project structure. # project structure.
# Project target. # Project target.
target=android-19 target=android-21
android.library=true android.library=true