Enable SmoothFrameTimeHelper by default.

Context:
- Currently, playback is significantly more juddery with it disabled,
  particularly on AndroidTV.
- We should be able to do the "best" job of this internally, so injection
  doesn't buy anything useful. If someone has a better implementation for
  adjusting the frame release, they should improve the core library.
This commit is contained in:
Oliver Woodman 2015-10-12 12:32:10 +01:00
parent 9b4e9723e5
commit bcb9f8282d
7 changed files with 102 additions and 141 deletions

View File

@ -219,8 +219,8 @@ public class DashRendererBuilder implements RendererBuilder {
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO); DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
mainHandler, player, 50); mainHandler, player, 50);
// Build the audio renderer. // Build the audio renderer.

View File

@ -61,8 +61,8 @@ public class ExtractorRendererBuilder implements RendererBuilder {
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator, ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE); BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(), sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, player.getMainHandler(),
player, 50); player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource, MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context)); null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));

View File

@ -148,8 +148,8 @@ public class HlsRendererBuilder implements RendererBuilder {
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE); variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource, MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context)); null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>( MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(

View File

@ -163,9 +163,9 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO); DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler,
mainHandler, player, 50); player, 50);
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);

View File

@ -23,6 +23,7 @@ import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCrypto; import android.media.MediaCrypto;
import android.os.Handler; import android.os.Handler;
@ -86,34 +87,6 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
} }
/**
* An interface for fine-grained adjustment of frame release times.
*/
public interface FrameReleaseTimeHelper {
/**
* Enables the helper.
*/
void enable();
/**
* Disables the helper.
*/
void disable();
/**
* Called to make a fine-grained adjustment to a frame release time.
*
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
* the same time base as {@link System#nanoTime()}.
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
* {@link System#nanoTime()}.
*/
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs);
}
// TODO: Use MediaFormat constants if these get exposed through the API. See [Internal: b/14127601]. // TODO: Use MediaFormat constants if these get exposed through the API. See [Internal: b/14127601].
private static final String KEY_CROP_LEFT = "crop-left"; private static final String KEY_CROP_LEFT = "crop-left";
private static final String KEY_CROP_RIGHT = "crop-right"; private static final String KEY_CROP_RIGHT = "crop-right";
@ -127,7 +100,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
*/ */
public static final int MSG_SET_SURFACE = 1; public static final int MSG_SET_SURFACE = 1;
private final FrameReleaseTimeHelper frameReleaseTimeHelper; private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper;
private final EventListener eventListener; private final EventListener eventListener;
private final long allowedJoiningTimeUs; private final long allowedJoiningTimeUs;
private final int videoScalingMode; private final int videoScalingMode;
@ -152,64 +125,30 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
private float lastReportedPixelWidthHeightRatio; private float lastReportedPixelWidthHeightRatio;
/** /**
* @param context A context.
* @param source The upstream source from which the renderer obtains samples. * @param source The upstream source from which the renderer obtains samples.
* @param videoScalingMode The scaling mode to pass to * @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}. * {@link MediaCodec#setVideoScalingMode(int)}.
*/ */
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode) { public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode) {
this(source, null, true, videoScalingMode); this(context, source, videoScalingMode, 0);
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}.
*/
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, int videoScalingMode) {
this(source, drmSessionManager, playClearSamplesWithoutKeys, videoScalingMode, 0);
} }
/** /**
* @param context A context.
* @param source The upstream source from which the renderer obtains samples. * @param source The upstream source from which the renderer obtains samples.
* @param videoScalingMode The scaling mode to pass to * @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}. * {@link MediaCodec#setVideoScalingMode(int)}.
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
* can attempt to seamlessly join an ongoing playback. * can attempt to seamlessly join an ongoing playback.
*/ */
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode, public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
long allowedJoiningTimeMs) { long allowedJoiningTimeMs) {
this(source, null, true, videoScalingMode, allowedJoiningTimeMs); this(context, source, videoScalingMode, allowedJoiningTimeMs, null, null, -1);
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}.
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
* can attempt to seamlessly join an ongoing playback.
*/
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs) {
this(source, drmSessionManager, playClearSamplesWithoutKeys, videoScalingMode,
allowedJoiningTimeMs, null, null, null, -1);
} }
/** /**
* @param context A context.
* @param source The upstream source from which the renderer obtains samples. * @param source The upstream source from which the renderer obtains samples.
* @param videoScalingMode The scaling mode to pass to * @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}. * {@link MediaCodec#setVideoScalingMode(int)}.
@ -221,15 +160,20 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
* invocations of {@link EventListener#onDroppedFrames(int, long)}. * invocations of {@link EventListener#onDroppedFrames(int, long)}.
*/ */
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode, public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener, long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener,
int maxDroppedFrameCountToNotify) { int maxDroppedFrameCountToNotify) {
this(source, null, true, videoScalingMode, allowedJoiningTimeMs, null, eventHandler, this(context, source, videoScalingMode, allowedJoiningTimeMs, null, false, eventHandler,
eventListener, maxDroppedFrameCountToNotify); eventListener, maxDroppedFrameCountToNotify);
} }
/** /**
* @param context A context.
* @param source The upstream source from which the renderer obtains samples. * @param source The upstream source from which the renderer obtains samples.
* @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}.
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
* can attempt to seamlessly join an ongoing playback.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required. * content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
@ -237,26 +181,20 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
* begin in parallel with key acquisision. This parameter specifies whether the renderer is * begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager} * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media. * has obtained the keys necessary to decrypt encrypted regions of the media.
* @param videoScalingMode The scaling mode to pass to
* {@link MediaCodec#setVideoScalingMode(int)}.
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
* can attempt to seamlessly join an ongoing playback.
* @param frameReleaseTimeHelper An optional helper to make fine-grained adjustments to frame
* release times. May be null.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
* invocations of {@link EventListener#onDroppedFrames(int, long)}. * invocations of {@link EventListener#onDroppedFrames(int, long)}.
*/ */
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager, public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs, long allowedJoiningTimeMs, DrmSessionManager drmSessionManager,
FrameReleaseTimeHelper frameReleaseTimeHelper, Handler eventHandler, boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
EventListener eventListener, int maxDroppedFrameCountToNotify) { int maxDroppedFrameCountToNotify) {
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener); super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
this.frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context);
this.videoScalingMode = videoScalingMode; this.videoScalingMode = videoScalingMode;
this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000; this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000;
this.frameReleaseTimeHelper = frameReleaseTimeHelper;
this.eventListener = eventListener; this.eventListener = eventListener;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
joiningDeadlineUs = -1; joiningDeadlineUs = -1;
@ -285,9 +223,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
if (joining && allowedJoiningTimeUs > 0) { if (joining && allowedJoiningTimeUs > 0) {
joiningDeadlineUs = SystemClock.elapsedRealtime() * 1000L + allowedJoiningTimeUs; joiningDeadlineUs = SystemClock.elapsedRealtime() * 1000L + allowedJoiningTimeUs;
} }
if (frameReleaseTimeHelper != null) { frameReleaseTimeHelper.enable();
frameReleaseTimeHelper.enable();
}
} }
@Override @Override
@ -340,9 +276,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
lastReportedWidth = -1; lastReportedWidth = -1;
lastReportedHeight = -1; lastReportedHeight = -1;
lastReportedPixelWidthHeightRatio = -1; lastReportedPixelWidthHeightRatio = -1;
if (frameReleaseTimeHelper != null) { frameReleaseTimeHelper.disable();
frameReleaseTimeHelper.disable();
}
super.onDisabled(); super.onDisabled();
} }
@ -468,14 +402,9 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000); long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000);
// Apply a timestamp adjustment, if there is one. // Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs; long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
if (frameReleaseTimeHelper != null) { bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime( earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
} else {
adjustedReleaseTimeNs = unadjustedFrameReleaseTimeNs;
}
if (earlyUs < -30000) { if (earlyUs < -30000) {
// We're more than 30ms late rendering the frame. // We're more than 30ms late rendering the frame.

View File

@ -15,17 +15,17 @@
*/ */
package com.google.android.exoplayer; package com.google.android.exoplayer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer.FrameReleaseTimeHelper;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context;
import android.view.Choreographer; import android.view.Choreographer;
import android.view.Choreographer.FrameCallback; import android.view.Choreographer.FrameCallback;
import android.view.WindowManager;
/** /**
* Makes a best effort to adjust frame release timestamps for a smoother visual result. * Makes a best effort to adjust frame release timestamps for a smoother visual result.
*/ */
@TargetApi(16) @TargetApi(16)
public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelper, FrameCallback { public final class VideoFrameReleaseTimeHelper implements FrameCallback {
private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500; private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500;
private static final long MAX_ALLOWED_DRIFT_NS = 20000000; private static final long MAX_ALLOWED_DRIFT_NS = 20000000;
@ -33,32 +33,45 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
private static final long VSYNC_OFFSET_PERCENTAGE = 80; private static final long VSYNC_OFFSET_PERCENTAGE = 80;
private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6; private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6;
private final boolean usePrimaryDisplayVsync; private final boolean useDefaultDisplayVsync;
private final long vsyncDurationNs; private final long vsyncDurationNs;
private final long vsyncOffsetNs; private final long vsyncOffsetNs;
private Choreographer choreographer; private Choreographer choreographer;
private long sampledVsyncTimeNs; private long sampledVsyncTimeNs;
private long lastUnadjustedFrameTimeUs; private long lastFramePresentationTimeUs;
private long adjustedLastFrameTimeNs; private long adjustedLastFrameTimeNs;
private long pendingAdjustedFrameTimeNs; private long pendingAdjustedFrameTimeNs;
private boolean haveSync; private boolean haveSync;
private long syncReleaseTimeNs; private long syncUnadjustedReleaseTimeNs;
private long syncFrameTimeNs; private long syncFramePresentationTimeNs;
private int frameCount; private long frameCount;
/** /**
* @param primaryDisplayRefreshRate The refresh rate of the default display. * Constructs an instance that smoothes frame release but does not snap release to the default
* @param usePrimaryDisplayVsync Whether to snap to the primary display vsync. May not be * display's vsync signal.
* suitable when rendering to secondary displays.
*/ */
public SmoothFrameReleaseTimeHelper( public VideoFrameReleaseTimeHelper() {
float primaryDisplayRefreshRate, boolean usePrimaryDisplayVsync) { this(-1, false);
this.usePrimaryDisplayVsync = usePrimaryDisplayVsync; }
if (usePrimaryDisplayVsync) {
vsyncDurationNs = (long) (1000000000d / primaryDisplayRefreshRate); /**
* Constructs an instance that smoothes frame release and snaps release to the default display's
* vsync signal.
*
* @param context A context from which information about the default display can be retrieved.
*/
public VideoFrameReleaseTimeHelper(Context context) {
this(getDefaultDisplayRefreshRate(context), true);
}
private VideoFrameReleaseTimeHelper(float defaultDisplayRefreshRate,
boolean useDefaultDisplayVsync) {
this.useDefaultDisplayVsync = useDefaultDisplayVsync;
if (useDefaultDisplayVsync) {
vsyncDurationNs = (long) (1000000000d / defaultDisplayRefreshRate);
vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100; vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100;
} else { } else {
vsyncDurationNs = -1; vsyncDurationNs = -1;
@ -66,19 +79,23 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
} }
} }
@Override /**
* Enables the helper.
*/
public void enable() { public void enable() {
haveSync = false; haveSync = false;
if (usePrimaryDisplayVsync) { if (useDefaultDisplayVsync) {
sampledVsyncTimeNs = 0; sampledVsyncTimeNs = 0;
choreographer = Choreographer.getInstance(); choreographer = Choreographer.getInstance();
choreographer.postFrameCallback(this); choreographer.postFrameCallback(this);
} }
} }
@Override /**
* Disables the helper.
*/
public void disable() { public void disable() {
if (usePrimaryDisplayVsync) { if (useDefaultDisplayVsync) {
choreographer.removeFrameCallback(this); choreographer.removeFrameCallback(this);
choreographer = null; choreographer = null;
} }
@ -90,17 +107,25 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS); choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS);
} }
@Override /**
public long adjustReleaseTime(long unadjustedFrameTimeUs, long unadjustedReleaseTimeNs) { * Called to make a fine-grained adjustment to a frame release time.
long unadjustedFrameTimeNs = unadjustedFrameTimeUs * 1000; *
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
* the same time base as {@link System#nanoTime()}.
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
* {@link System#nanoTime()}.
*/
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) {
long framePresentationTimeNs = framePresentationTimeUs * 1000;
// Until we know better, the adjustment will be a no-op. // Until we know better, the adjustment will be a no-op.
long adjustedFrameTimeNs = unadjustedFrameTimeNs; long adjustedFrameTimeNs = framePresentationTimeNs;
long adjustedReleaseTimeNs = unadjustedReleaseTimeNs; long adjustedReleaseTimeNs = unadjustedReleaseTimeNs;
if (haveSync) { if (haveSync) {
// See if we've advanced to the next frame. // See if we've advanced to the next frame.
if (unadjustedFrameTimeUs != lastUnadjustedFrameTimeUs) { if (framePresentationTimeUs != lastFramePresentationTimeUs) {
frameCount++; frameCount++;
adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs; adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs;
} }
@ -109,20 +134,22 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
// Calculate the average frame time across all the frames we've seen since the last sync. // Calculate the average frame time across all the frames we've seen since the last sync.
// This will typically give us a frame rate at a finer granularity than the frame times // This will typically give us a frame rate at a finer granularity than the frame times
// themselves (which often only have millisecond granularity). // themselves (which often only have millisecond granularity).
long averageFrameTimeNs = (unadjustedFrameTimeNs - syncFrameTimeNs) / frameCount; long averageFrameDurationNs = (framePresentationTimeNs - syncFramePresentationTimeNs)
/ frameCount;
// Project the adjusted frame time forward using the average. // Project the adjusted frame time forward using the average.
long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameTimeNs; long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameDurationNs;
if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) { if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
haveSync = false; haveSync = false;
} else { } else {
adjustedFrameTimeNs = candidateAdjustedFrameTimeNs; adjustedFrameTimeNs = candidateAdjustedFrameTimeNs;
adjustedReleaseTimeNs = syncReleaseTimeNs + adjustedFrameTimeNs - syncFrameTimeNs; adjustedReleaseTimeNs = syncUnadjustedReleaseTimeNs + adjustedFrameTimeNs
- syncFramePresentationTimeNs;
} }
} else { } else {
// We're synced but haven't waited the required number of frames to apply an adjustment. // We're synced but haven't waited the required number of frames to apply an adjustment.
// Check drift anyway. // Check drift anyway.
if (isDriftTooLarge(unadjustedFrameTimeNs, unadjustedReleaseTimeNs)) { if (isDriftTooLarge(framePresentationTimeNs, unadjustedReleaseTimeNs)) {
haveSync = false; haveSync = false;
} }
} }
@ -130,14 +157,14 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
// If we need to sync, do so now. // If we need to sync, do so now.
if (!haveSync) { if (!haveSync) {
syncFrameTimeNs = unadjustedFrameTimeNs; syncFramePresentationTimeNs = framePresentationTimeNs;
syncReleaseTimeNs = unadjustedReleaseTimeNs; syncUnadjustedReleaseTimeNs = unadjustedReleaseTimeNs;
frameCount = 0; frameCount = 0;
haveSync = true; haveSync = true;
onSynced(); onSynced();
} }
lastUnadjustedFrameTimeUs = unadjustedFrameTimeUs; lastFramePresentationTimeUs = framePresentationTimeUs;
pendingAdjustedFrameTimeNs = adjustedFrameTimeNs; pendingAdjustedFrameTimeNs = adjustedFrameTimeNs;
if (sampledVsyncTimeNs == 0) { if (sampledVsyncTimeNs == 0) {
@ -155,8 +182,8 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
} }
private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) { private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) {
long elapsedFrameTimeNs = frameTimeNs - syncFrameTimeNs; long elapsedFrameTimeNs = frameTimeNs - syncFramePresentationTimeNs;
long elapsedReleaseTimeNs = releaseTimeNs - syncReleaseTimeNs; long elapsedReleaseTimeNs = releaseTimeNs - syncUnadjustedReleaseTimeNs;
return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS; return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS;
} }
@ -177,4 +204,9 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs; return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs;
} }
private static float getDefaultDisplayRefreshRate(Context context) {
WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
return manager.getDefaultDisplay().getRefreshRate();
}
} }

View File

@ -217,7 +217,7 @@ public final class H264DashTest extends ActivityInstrumentationTestCase2<HostAct
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID,
MIN_LOADABLE_RETRY_COUNT); MIN_LOADABLE_RETRY_COUNT);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer( MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(host,
videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50); videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50);
videoCounters = videoRenderer.codecCounters; videoCounters = videoRenderer.codecCounters;
player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface); player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface);