Enable SmoothFrameTimeHelper by default.
Context: - Currently, playback is significantly more juddery with it disabled, particularly on AndroidTV. - We should be able to do the "best" job of this internally, so injection doesn't buy anything useful. If someone has a better implementation for adjusting the frame release, they should improve the core library.
This commit is contained in:
parent
9b4e9723e5
commit
bcb9f8282d
@ -219,8 +219,8 @@ public class DashRendererBuilder implements RendererBuilder {
|
||||
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
||||
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
||||
DemoPlayer.TYPE_VIDEO);
|
||||
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
|
||||
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
|
||||
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
|
||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
|
||||
mainHandler, player, 50);
|
||||
|
||||
// Build the audio renderer.
|
||||
|
@ -61,8 +61,8 @@ public class ExtractorRendererBuilder implements RendererBuilder {
|
||||
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
|
||||
BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE);
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
||||
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
|
||||
sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, player.getMainHandler(),
|
||||
player, 50);
|
||||
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
|
||||
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
|
||||
|
@ -148,8 +148,8 @@ public class HlsRendererBuilder implements RendererBuilder {
|
||||
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
|
||||
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
|
||||
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
|
||||
sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
|
||||
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
|
||||
null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context));
|
||||
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
|
||||
|
@ -163,9 +163,9 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
|
||||
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
||||
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
|
||||
DemoPlayer.TYPE_VIDEO);
|
||||
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
|
||||
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
|
||||
mainHandler, player, 50);
|
||||
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
|
||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler,
|
||||
player, 50);
|
||||
|
||||
// Build the audio renderer.
|
||||
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||
|
@ -23,6 +23,7 @@ import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCrypto;
|
||||
import android.os.Handler;
|
||||
@ -86,34 +87,6 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* An interface for fine-grained adjustment of frame release times.
|
||||
*/
|
||||
public interface FrameReleaseTimeHelper {
|
||||
|
||||
/**
|
||||
* Enables the helper.
|
||||
*/
|
||||
void enable();
|
||||
|
||||
/**
|
||||
* Disables the helper.
|
||||
*/
|
||||
void disable();
|
||||
|
||||
/**
|
||||
* Called to make a fine-grained adjustment to a frame release time.
|
||||
*
|
||||
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
|
||||
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
|
||||
* the same time base as {@link System#nanoTime()}.
|
||||
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
|
||||
* {@link System#nanoTime()}.
|
||||
*/
|
||||
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs);
|
||||
|
||||
}
|
||||
|
||||
// TODO: Use MediaFormat constants if these get exposed through the API. See [Internal: b/14127601].
|
||||
private static final String KEY_CROP_LEFT = "crop-left";
|
||||
private static final String KEY_CROP_RIGHT = "crop-right";
|
||||
@ -127,7 +100,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
*/
|
||||
public static final int MSG_SET_SURFACE = 1;
|
||||
|
||||
private final FrameReleaseTimeHelper frameReleaseTimeHelper;
|
||||
private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper;
|
||||
private final EventListener eventListener;
|
||||
private final long allowedJoiningTimeUs;
|
||||
private final int videoScalingMode;
|
||||
@ -152,64 +125,30 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
private float lastReportedPixelWidthHeightRatio;
|
||||
|
||||
/**
|
||||
* @param context A context.
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode) {
|
||||
this(source, null, true, videoScalingMode);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
|
||||
* content is not required.
|
||||
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
|
||||
* For example a media file may start with a short clear region so as to allow playback to
|
||||
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
|
||||
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
|
||||
* has obtained the keys necessary to decrypt encrypted regions of the media.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
||||
boolean playClearSamplesWithoutKeys, int videoScalingMode) {
|
||||
this(source, drmSessionManager, playClearSamplesWithoutKeys, videoScalingMode, 0);
|
||||
public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode) {
|
||||
this(context, source, videoScalingMode, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param context A context.
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
|
||||
* can attempt to seamlessly join an ongoing playback.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode,
|
||||
public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
|
||||
long allowedJoiningTimeMs) {
|
||||
this(source, null, true, videoScalingMode, allowedJoiningTimeMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
|
||||
* content is not required.
|
||||
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
|
||||
* For example a media file may start with a short clear region so as to allow playback to
|
||||
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
|
||||
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
|
||||
* has obtained the keys necessary to decrypt encrypted regions of the media.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
|
||||
* can attempt to seamlessly join an ongoing playback.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
||||
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs) {
|
||||
this(source, drmSessionManager, playClearSamplesWithoutKeys, videoScalingMode,
|
||||
allowedJoiningTimeMs, null, null, null, -1);
|
||||
this(context, source, videoScalingMode, allowedJoiningTimeMs, null, null, -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param context A context.
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
@ -221,15 +160,20 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
|
||||
* invocations of {@link EventListener#onDroppedFrames(int, long)}.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode,
|
||||
public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
|
||||
long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener,
|
||||
int maxDroppedFrameCountToNotify) {
|
||||
this(source, null, true, videoScalingMode, allowedJoiningTimeMs, null, eventHandler,
|
||||
this(context, source, videoScalingMode, allowedJoiningTimeMs, null, false, eventHandler,
|
||||
eventListener, maxDroppedFrameCountToNotify);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param context A context.
|
||||
* @param source The upstream source from which the renderer obtains samples.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
|
||||
* can attempt to seamlessly join an ongoing playback.
|
||||
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
|
||||
* content is not required.
|
||||
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
|
||||
@ -237,26 +181,20 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
|
||||
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
|
||||
* has obtained the keys necessary to decrypt encrypted regions of the media.
|
||||
* @param videoScalingMode The scaling mode to pass to
|
||||
* {@link MediaCodec#setVideoScalingMode(int)}.
|
||||
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
|
||||
* can attempt to seamlessly join an ongoing playback.
|
||||
* @param frameReleaseTimeHelper An optional helper to make fine-grained adjustments to frame
|
||||
* release times. May be null.
|
||||
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
|
||||
* null if delivery of events is not required.
|
||||
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||
* @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
|
||||
* invocations of {@link EventListener#onDroppedFrames(int, long)}.
|
||||
*/
|
||||
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
|
||||
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs,
|
||||
FrameReleaseTimeHelper frameReleaseTimeHelper, Handler eventHandler,
|
||||
EventListener eventListener, int maxDroppedFrameCountToNotify) {
|
||||
public MediaCodecVideoTrackRenderer(Context context, SampleSource source, int videoScalingMode,
|
||||
long allowedJoiningTimeMs, DrmSessionManager drmSessionManager,
|
||||
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
|
||||
int maxDroppedFrameCountToNotify) {
|
||||
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
|
||||
this.frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context);
|
||||
this.videoScalingMode = videoScalingMode;
|
||||
this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000;
|
||||
this.frameReleaseTimeHelper = frameReleaseTimeHelper;
|
||||
this.eventListener = eventListener;
|
||||
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
|
||||
joiningDeadlineUs = -1;
|
||||
@ -285,9 +223,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
if (joining && allowedJoiningTimeUs > 0) {
|
||||
joiningDeadlineUs = SystemClock.elapsedRealtime() * 1000L + allowedJoiningTimeUs;
|
||||
}
|
||||
if (frameReleaseTimeHelper != null) {
|
||||
frameReleaseTimeHelper.enable();
|
||||
}
|
||||
frameReleaseTimeHelper.enable();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -340,9 +276,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
lastReportedWidth = -1;
|
||||
lastReportedHeight = -1;
|
||||
lastReportedPixelWidthHeightRatio = -1;
|
||||
if (frameReleaseTimeHelper != null) {
|
||||
frameReleaseTimeHelper.disable();
|
||||
}
|
||||
frameReleaseTimeHelper.disable();
|
||||
super.onDisabled();
|
||||
}
|
||||
|
||||
@ -468,14 +402,9 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
|
||||
long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000);
|
||||
|
||||
// Apply a timestamp adjustment, if there is one.
|
||||
long adjustedReleaseTimeNs;
|
||||
if (frameReleaseTimeHelper != null) {
|
||||
adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
|
||||
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
|
||||
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
|
||||
} else {
|
||||
adjustedReleaseTimeNs = unadjustedFrameReleaseTimeNs;
|
||||
}
|
||||
long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
|
||||
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
|
||||
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
|
||||
|
||||
if (earlyUs < -30000) {
|
||||
// We're more than 30ms late rendering the frame.
|
||||
|
@ -15,17 +15,17 @@
|
||||
*/
|
||||
package com.google.android.exoplayer;
|
||||
|
||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer.FrameReleaseTimeHelper;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.view.Choreographer;
|
||||
import android.view.Choreographer.FrameCallback;
|
||||
import android.view.WindowManager;
|
||||
|
||||
/**
|
||||
* Makes a best effort to adjust frame release timestamps for a smoother visual result.
|
||||
*/
|
||||
@TargetApi(16)
|
||||
public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelper, FrameCallback {
|
||||
public final class VideoFrameReleaseTimeHelper implements FrameCallback {
|
||||
|
||||
private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500;
|
||||
private static final long MAX_ALLOWED_DRIFT_NS = 20000000;
|
||||
@ -33,32 +33,45 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
private static final long VSYNC_OFFSET_PERCENTAGE = 80;
|
||||
private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6;
|
||||
|
||||
private final boolean usePrimaryDisplayVsync;
|
||||
private final boolean useDefaultDisplayVsync;
|
||||
private final long vsyncDurationNs;
|
||||
private final long vsyncOffsetNs;
|
||||
|
||||
private Choreographer choreographer;
|
||||
private long sampledVsyncTimeNs;
|
||||
|
||||
private long lastUnadjustedFrameTimeUs;
|
||||
private long lastFramePresentationTimeUs;
|
||||
private long adjustedLastFrameTimeNs;
|
||||
private long pendingAdjustedFrameTimeNs;
|
||||
|
||||
private boolean haveSync;
|
||||
private long syncReleaseTimeNs;
|
||||
private long syncFrameTimeNs;
|
||||
private int frameCount;
|
||||
private long syncUnadjustedReleaseTimeNs;
|
||||
private long syncFramePresentationTimeNs;
|
||||
private long frameCount;
|
||||
|
||||
/**
|
||||
* @param primaryDisplayRefreshRate The refresh rate of the default display.
|
||||
* @param usePrimaryDisplayVsync Whether to snap to the primary display vsync. May not be
|
||||
* suitable when rendering to secondary displays.
|
||||
* Constructs an instance that smoothes frame release but does not snap release to the default
|
||||
* display's vsync signal.
|
||||
*/
|
||||
public SmoothFrameReleaseTimeHelper(
|
||||
float primaryDisplayRefreshRate, boolean usePrimaryDisplayVsync) {
|
||||
this.usePrimaryDisplayVsync = usePrimaryDisplayVsync;
|
||||
if (usePrimaryDisplayVsync) {
|
||||
vsyncDurationNs = (long) (1000000000d / primaryDisplayRefreshRate);
|
||||
public VideoFrameReleaseTimeHelper() {
|
||||
this(-1, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an instance that smoothes frame release and snaps release to the default display's
|
||||
* vsync signal.
|
||||
*
|
||||
* @param context A context from which information about the default display can be retrieved.
|
||||
*/
|
||||
public VideoFrameReleaseTimeHelper(Context context) {
|
||||
this(getDefaultDisplayRefreshRate(context), true);
|
||||
}
|
||||
|
||||
private VideoFrameReleaseTimeHelper(float defaultDisplayRefreshRate,
|
||||
boolean useDefaultDisplayVsync) {
|
||||
this.useDefaultDisplayVsync = useDefaultDisplayVsync;
|
||||
if (useDefaultDisplayVsync) {
|
||||
vsyncDurationNs = (long) (1000000000d / defaultDisplayRefreshRate);
|
||||
vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100;
|
||||
} else {
|
||||
vsyncDurationNs = -1;
|
||||
@ -66,19 +79,23 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Enables the helper.
|
||||
*/
|
||||
public void enable() {
|
||||
haveSync = false;
|
||||
if (usePrimaryDisplayVsync) {
|
||||
if (useDefaultDisplayVsync) {
|
||||
sampledVsyncTimeNs = 0;
|
||||
choreographer = Choreographer.getInstance();
|
||||
choreographer.postFrameCallback(this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Disables the helper.
|
||||
*/
|
||||
public void disable() {
|
||||
if (usePrimaryDisplayVsync) {
|
||||
if (useDefaultDisplayVsync) {
|
||||
choreographer.removeFrameCallback(this);
|
||||
choreographer = null;
|
||||
}
|
||||
@ -90,17 +107,25 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long adjustReleaseTime(long unadjustedFrameTimeUs, long unadjustedReleaseTimeNs) {
|
||||
long unadjustedFrameTimeNs = unadjustedFrameTimeUs * 1000;
|
||||
/**
|
||||
* Called to make a fine-grained adjustment to a frame release time.
|
||||
*
|
||||
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
|
||||
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
|
||||
* the same time base as {@link System#nanoTime()}.
|
||||
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
|
||||
* {@link System#nanoTime()}.
|
||||
*/
|
||||
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) {
|
||||
long framePresentationTimeNs = framePresentationTimeUs * 1000;
|
||||
|
||||
// Until we know better, the adjustment will be a no-op.
|
||||
long adjustedFrameTimeNs = unadjustedFrameTimeNs;
|
||||
long adjustedFrameTimeNs = framePresentationTimeNs;
|
||||
long adjustedReleaseTimeNs = unadjustedReleaseTimeNs;
|
||||
|
||||
if (haveSync) {
|
||||
// See if we've advanced to the next frame.
|
||||
if (unadjustedFrameTimeUs != lastUnadjustedFrameTimeUs) {
|
||||
if (framePresentationTimeUs != lastFramePresentationTimeUs) {
|
||||
frameCount++;
|
||||
adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs;
|
||||
}
|
||||
@ -109,20 +134,22 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
// Calculate the average frame time across all the frames we've seen since the last sync.
|
||||
// This will typically give us a frame rate at a finer granularity than the frame times
|
||||
// themselves (which often only have millisecond granularity).
|
||||
long averageFrameTimeNs = (unadjustedFrameTimeNs - syncFrameTimeNs) / frameCount;
|
||||
long averageFrameDurationNs = (framePresentationTimeNs - syncFramePresentationTimeNs)
|
||||
/ frameCount;
|
||||
// Project the adjusted frame time forward using the average.
|
||||
long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameTimeNs;
|
||||
long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameDurationNs;
|
||||
|
||||
if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
|
||||
haveSync = false;
|
||||
} else {
|
||||
adjustedFrameTimeNs = candidateAdjustedFrameTimeNs;
|
||||
adjustedReleaseTimeNs = syncReleaseTimeNs + adjustedFrameTimeNs - syncFrameTimeNs;
|
||||
adjustedReleaseTimeNs = syncUnadjustedReleaseTimeNs + adjustedFrameTimeNs
|
||||
- syncFramePresentationTimeNs;
|
||||
}
|
||||
} else {
|
||||
// We're synced but haven't waited the required number of frames to apply an adjustment.
|
||||
// Check drift anyway.
|
||||
if (isDriftTooLarge(unadjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
|
||||
if (isDriftTooLarge(framePresentationTimeNs, unadjustedReleaseTimeNs)) {
|
||||
haveSync = false;
|
||||
}
|
||||
}
|
||||
@ -130,14 +157,14 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
|
||||
// If we need to sync, do so now.
|
||||
if (!haveSync) {
|
||||
syncFrameTimeNs = unadjustedFrameTimeNs;
|
||||
syncReleaseTimeNs = unadjustedReleaseTimeNs;
|
||||
syncFramePresentationTimeNs = framePresentationTimeNs;
|
||||
syncUnadjustedReleaseTimeNs = unadjustedReleaseTimeNs;
|
||||
frameCount = 0;
|
||||
haveSync = true;
|
||||
onSynced();
|
||||
}
|
||||
|
||||
lastUnadjustedFrameTimeUs = unadjustedFrameTimeUs;
|
||||
lastFramePresentationTimeUs = framePresentationTimeUs;
|
||||
pendingAdjustedFrameTimeNs = adjustedFrameTimeNs;
|
||||
|
||||
if (sampledVsyncTimeNs == 0) {
|
||||
@ -155,8 +182,8 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
}
|
||||
|
||||
private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) {
|
||||
long elapsedFrameTimeNs = frameTimeNs - syncFrameTimeNs;
|
||||
long elapsedReleaseTimeNs = releaseTimeNs - syncReleaseTimeNs;
|
||||
long elapsedFrameTimeNs = frameTimeNs - syncFramePresentationTimeNs;
|
||||
long elapsedReleaseTimeNs = releaseTimeNs - syncUnadjustedReleaseTimeNs;
|
||||
return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS;
|
||||
}
|
||||
|
||||
@ -177,4 +204,9 @@ public final class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelpe
|
||||
return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs;
|
||||
}
|
||||
|
||||
private static float getDefaultDisplayRefreshRate(Context context) {
|
||||
WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
|
||||
return manager.getDefaultDisplay().getRefreshRate();
|
||||
}
|
||||
|
||||
}
|
@ -217,7 +217,7 @@ public final class H264DashTest extends ActivityInstrumentationTestCase2<HostAct
|
||||
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
||||
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID,
|
||||
MIN_LOADABLE_RETRY_COUNT);
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(
|
||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(host,
|
||||
videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50);
|
||||
videoCounters = videoRenderer.codecCounters;
|
||||
player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface);
|
||||
|
Loading…
x
Reference in New Issue
Block a user