mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Video processing offset in AnalyticsListener
PiperOrigin-RevId: 295146481
This commit is contained in:
parent
5104548204
commit
0a612ce34a
@ -3,6 +3,8 @@
|
||||
### dev-v2 (not yet released) ###
|
||||
|
||||
* Core library:
|
||||
* Add API in `AnalyticsListener` to report video frame processing offset.
|
||||
`MediaCodecVideoRenderer` reports the event.
|
||||
* Add fields `videoFrameProcessingOffsetUsSum` and
|
||||
`videoFrameProcessingOffsetUsCount` in `DecoderCounters` to compute
|
||||
the average video frame processing offset.
|
||||
|
@ -1775,6 +1775,15 @@ public class SimpleExoPlayer extends BasePlayer
|
||||
videoDecoderCounters = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
|
||||
videoDebugListener.onVideoFrameProcessingOffset(
|
||||
totalProcessingOffsetUs, frameCount, format);
|
||||
}
|
||||
}
|
||||
|
||||
// AudioRendererEventListener implementation
|
||||
|
||||
@Override
|
||||
|
@ -281,6 +281,15 @@ public class AnalyticsCollector
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
EventTime eventTime = generatePlayingMediaPeriodEventTime();
|
||||
for (AnalyticsListener listener : listeners) {
|
||||
listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount, format);
|
||||
}
|
||||
}
|
||||
|
||||
// VideoListener implementation.
|
||||
|
||||
@Override
|
||||
|
@ -461,6 +461,30 @@ public interface AnalyticsListener {
|
||||
*/
|
||||
default void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {}
|
||||
|
||||
/**
|
||||
* Called when there is an update to the video frame processing offset reported by a video
|
||||
* renderer.
|
||||
*
|
||||
* <p>Video processing offset represents how early a video frame is processed compared to the
|
||||
* player's current position. For each video frame, the offset is calculated as <em>P<sub>vf</sub>
|
||||
* - P<sub>pl</sub></em> where <em>P<sub>vf</sub></em> is the presentation timestamp of the video
|
||||
* frame and <em>P<sub>pl</sub></em> is the current position of the player. Positive values
|
||||
* indicate the frame was processed early enough whereas negative values indicate that the
|
||||
* player's position had progressed beyond the frame's timestamp when the frame was processed (and
|
||||
* the frame was probably dropped).
|
||||
*
|
||||
* <p>The renderer reports the sum of video processing offset samples (one sample per processed
|
||||
* video frame: dropped, skipped or rendered) and the total number of samples (frames).
|
||||
*
|
||||
* @param eventTime The event time.
|
||||
* @param totalProcessingOffsetUs The sum of video frame processing offset samples for all video
|
||||
* frames processed by the renderer in microseconds.
|
||||
* @param frameCount The number to samples included in the {@code totalProcessingOffsetUs}.
|
||||
* @param format The current output {@link Format} rendered by the video renderer.
|
||||
*/
|
||||
default void onVideoFrameProcessingOffset(
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {}
|
||||
|
||||
/**
|
||||
* Called before a frame is rendered for the first time since setting the surface, and each time
|
||||
* there's a change in the size or pixel aspect ratio of the video being rendered.
|
||||
|
@ -15,8 +15,6 @@
|
||||
*/
|
||||
package com.google.android.exoplayer2.decoder;
|
||||
|
||||
import com.google.android.exoplayer2.util.Util;
|
||||
|
||||
/**
|
||||
* Maintains decoder event counts, for debugging purposes only.
|
||||
* <p>
|
||||
@ -84,14 +82,14 @@ public final class DecoderCounters {
|
||||
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of
|
||||
* updating it directly.
|
||||
*/
|
||||
public long videoFrameProcessingOffsetUsSum;
|
||||
public long totalVideoFrameProcessingOffsetUs;
|
||||
/**
|
||||
* The number of video frame processing offset samples added.
|
||||
*
|
||||
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of
|
||||
* updating it directly.
|
||||
*/
|
||||
public int videoFrameProcessingOffsetUsCount;
|
||||
public int videoFrameProcessingOffsetCount;
|
||||
|
||||
/**
|
||||
* Should be called to ensure counter values are made visible across threads. The playback thread
|
||||
@ -121,15 +119,14 @@ public final class DecoderCounters {
|
||||
droppedToKeyframeCount += other.droppedToKeyframeCount;
|
||||
|
||||
addVideoFrameProcessingOffsetSamples(
|
||||
other.videoFrameProcessingOffsetUsSum, other.videoFrameProcessingOffsetUsCount);
|
||||
other.totalVideoFrameProcessingOffsetUs, other.videoFrameProcessingOffsetCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a video frame processing offset sample to {@link #videoFrameProcessingOffsetUsSum} and
|
||||
* increases {@link #videoFrameProcessingOffsetUsCount} by one.
|
||||
* Adds a video frame processing offset sample to {@link #totalVideoFrameProcessingOffsetUs} and
|
||||
* increases {@link #videoFrameProcessingOffsetCount} by one.
|
||||
*
|
||||
* <p>This method checks if adding {@code sampleUs} to {@link #videoFrameProcessingOffsetUsSum}
|
||||
* will cause an overflow, in which case this method is a no-op.
|
||||
* <p>Convenience method to ensure both fields are updated when adding a sample.
|
||||
*
|
||||
* @param sampleUs The sample in microseconds.
|
||||
*/
|
||||
@ -138,12 +135,7 @@ public final class DecoderCounters {
|
||||
}
|
||||
|
||||
private void addVideoFrameProcessingOffsetSamples(long sampleUs, int count) {
|
||||
long overflowFlag = videoFrameProcessingOffsetUsSum > 0 ? Long.MIN_VALUE : Long.MAX_VALUE;
|
||||
long newSampleSum =
|
||||
Util.addWithOverflowDefault(videoFrameProcessingOffsetUsSum, sampleUs, overflowFlag);
|
||||
if (newSampleSum != overflowFlag) {
|
||||
videoFrameProcessingOffsetUsCount += count;
|
||||
videoFrameProcessingOffsetUsSum = newSampleSum;
|
||||
}
|
||||
totalVideoFrameProcessingOffsetUs += sampleUs;
|
||||
videoFrameProcessingOffsetCount += count;
|
||||
}
|
||||
}
|
||||
|
@ -679,6 +679,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final Format getCurrentOutputFormat() {
|
||||
return outputFormat;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final MediaCodec getCodec() {
|
||||
return codec;
|
||||
|
@ -138,6 +138,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
private int consecutiveDroppedFrameCount;
|
||||
private int buffersInCodecCount;
|
||||
private long lastRenderTimeUs;
|
||||
private long totalVideoFrameProcessingOffsetUs;
|
||||
private int videoFrameProcessingOffsetCount;
|
||||
|
||||
private int pendingRotationDegrees;
|
||||
private float pendingPixelWidthHeightRatio;
|
||||
@ -510,12 +512,15 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
droppedFrames = 0;
|
||||
droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
|
||||
lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
|
||||
totalVideoFrameProcessingOffsetUs = 0;
|
||||
videoFrameProcessingOffsetCount = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStopped() {
|
||||
joiningDeadlineMs = C.TIME_UNSET;
|
||||
maybeNotifyDroppedFrames();
|
||||
maybeNotifyVideoFrameProcessingOffset();
|
||||
super.onStopped();
|
||||
}
|
||||
|
||||
@ -751,6 +756,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
+ 1
|
||||
: outputMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
processOutputFormat(codec, mediaFormatWidth, mediaFormatHeight);
|
||||
maybeNotifyVideoFrameProcessingOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1216,6 +1222,22 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeNotifyVideoFrameProcessingOffset() {
|
||||
Format outputFormat = getCurrentOutputFormat();
|
||||
if (outputFormat != null) {
|
||||
long totalOffsetDelta =
|
||||
decoderCounters.totalVideoFrameProcessingOffsetUs - totalVideoFrameProcessingOffsetUs;
|
||||
int countDelta =
|
||||
decoderCounters.videoFrameProcessingOffsetCount - videoFrameProcessingOffsetCount;
|
||||
if (countDelta != 0) {
|
||||
eventDispatcher.reportVideoFrameProcessingOffset(
|
||||
totalOffsetDelta, countDelta, outputFormat);
|
||||
totalVideoFrameProcessingOffsetUs = decoderCounters.totalVideoFrameProcessingOffsetUs;
|
||||
videoFrameProcessingOffsetCount = decoderCounters.videoFrameProcessingOffsetCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isBufferLate(long earlyUs) {
|
||||
// Class a buffer as late if it should have been presented more than 30 ms ago.
|
||||
return earlyUs < -30000;
|
||||
|
@ -71,6 +71,28 @@ public interface VideoRendererEventListener {
|
||||
*/
|
||||
default void onDroppedFrames(int count, long elapsedMs) {}
|
||||
|
||||
/**
|
||||
* Called to report the video processing offset of video frames processed by the video renderer.
|
||||
*
|
||||
* <p>Video processing offset represents how early a video frame is processed compared to the
|
||||
* player's current position. For each video frame, the offset is calculated as <em>P<sub>vf</sub>
|
||||
* - P<sub>pl</sub></em> where <em>P<sub>vf</sub></em> is the presentation timestamp of the video
|
||||
* frame and <em>P<sub>pl</sub></em> is the current position of the player. Positive values
|
||||
* indicate the frame was processed early enough whereas negative values indicate that the
|
||||
* player's position had progressed beyond the frame's timestamp when the frame was processed (and
|
||||
* the frame was probably dropped).
|
||||
*
|
||||
* <p>The renderer reports the sum of video processing offset samples (one sample per processed
|
||||
* video frame: dropped, skipped or rendered) and the total number of samples.
|
||||
*
|
||||
* @param totalProcessingOffsetUs The sum of all video frame processing offset samples for the
|
||||
* video frames processed by the renderer in microseconds.
|
||||
* @param frameCount The number of samples included in the {@code totalProcessingOffsetUs}.
|
||||
* @param format The {@link Format} that is currently output.
|
||||
*/
|
||||
default void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {}
|
||||
|
||||
/**
|
||||
* Called before a frame is rendered for the first time since setting the surface, and each time
|
||||
* there's a change in the size, rotation or pixel aspect ratio of the video being rendered.
|
||||
@ -159,6 +181,17 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
/** Invokes {@link VideoRendererEventListener#onVideoFrameProcessingOffset}. */
|
||||
public void reportVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
if (handler != null) {
|
||||
handler.post(
|
||||
() ->
|
||||
castNonNull(listener)
|
||||
.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount, format));
|
||||
}
|
||||
}
|
||||
|
||||
/** Invokes {@link VideoRendererEventListener#onVideoSizeChanged(int, int, int, float)}. */
|
||||
public void videoSizeChanged(
|
||||
int width,
|
||||
|
@ -107,6 +107,7 @@ public final class AnalyticsCollectorTest {
|
||||
private static final int EVENT_DRM_KEYS_REMOVED = 36;
|
||||
private static final int EVENT_DRM_SESSION_ACQUIRED = 37;
|
||||
private static final int EVENT_DRM_SESSION_RELEASED = 38;
|
||||
private static final int EVENT_VIDEO_FRAME_PROCESSING_OFFSET = 39;
|
||||
|
||||
private static final int TIMEOUT_MS = 10000;
|
||||
private static final Timeline SINGLE_PERIOD_TIMELINE = new FakeTimeline(/* windowCount= */ 1);
|
||||
@ -181,6 +182,7 @@ public final class AnalyticsCollectorTest {
|
||||
assertThat(listener.getEvents(EVENT_DROPPED_VIDEO_FRAMES)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_SIZE_CHANGED)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET)).containsExactly(period0);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -241,6 +243,7 @@ public final class AnalyticsCollectorTest {
|
||||
assertThat(listener.getEvents(EVENT_DROPPED_VIDEO_FRAMES)).containsExactly(period1);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_SIZE_CHANGED)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET)).containsExactly(period1);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -295,6 +298,7 @@ public final class AnalyticsCollectorTest {
|
||||
assertThat(listener.getEvents(EVENT_DROPPED_VIDEO_FRAMES)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_SIZE_CHANGED)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME)).containsExactly(period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET)).containsExactly(period0);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -442,6 +446,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(period0, period1Seq2, period1Seq2);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_SIZE_CHANGED)).containsExactly(period0, period0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME)).containsExactly(period0, period0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(period0, period1Seq2, period1Seq2);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -518,6 +524,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(period0Seq0, period0Seq1);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME))
|
||||
.containsExactly(period0Seq0, period0Seq1);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(period0Seq1);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -587,6 +595,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(period0Seq0, period0Seq0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME))
|
||||
.containsExactly(period0Seq0, period0Seq0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(period0Seq0);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -665,6 +675,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(window0Period1Seq0, period1Seq0);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME))
|
||||
.containsExactly(window0Period1Seq0, period1Seq0);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(window0Period1Seq0);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -735,6 +747,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(period0Seq0, period0Seq1);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME))
|
||||
.containsExactly(period0Seq0, period0Seq1);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(period0Seq1);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -952,6 +966,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(contentAfterPreroll, contentAfterMidroll, contentAfterPostroll);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_SIZE_CHANGED)).containsExactly(prerollAd);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME)).containsExactly(prerollAd);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(contentAfterPreroll, contentAfterMidroll, contentAfterPostroll);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -1069,6 +1085,8 @@ public final class AnalyticsCollectorTest {
|
||||
.containsExactly(contentBeforeMidroll, midrollAd);
|
||||
assertThat(listener.getEvents(EVENT_RENDERED_FIRST_FRAME))
|
||||
.containsExactly(contentBeforeMidroll, midrollAd);
|
||||
assertThat(listener.getEvents(EVENT_VIDEO_FRAME_PROCESSING_OFFSET))
|
||||
.containsExactly(contentAfterMidroll);
|
||||
listener.assertNoMoreEvents();
|
||||
}
|
||||
|
||||
@ -1196,6 +1214,8 @@ public final class AnalyticsCollectorTest {
|
||||
protected void onStopped() throws ExoPlaybackException {
|
||||
super.onStopped();
|
||||
eventDispatcher.droppedFrames(/* droppedFrameCount= */ 0, /* elapsedMs= */ 0);
|
||||
eventDispatcher.reportVideoFrameProcessingOffset(
|
||||
/* totalProcessingOffsetUs= */ 400000, /* frameCount= */ 10, this.format);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1561,6 +1581,12 @@ public final class AnalyticsCollectorTest {
|
||||
reportedEvents.add(new ReportedEvent(EVENT_DRM_SESSION_RELEASED, eventTime));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoFrameProcessingOffset(
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
reportedEvents.add(new ReportedEvent(EVENT_VIDEO_FRAME_PROCESSING_OFFSET, eventTime));
|
||||
}
|
||||
|
||||
private static final class ReportedEvent {
|
||||
|
||||
public final int eventType;
|
||||
|
@ -1,96 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2020 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.google.android.exoplayer2.decoder;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Unit tests for {@link DecoderCounters}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class DecoderCountersTest {
|
||||
private DecoderCounters decoderCounters;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
decoderCounters = new DecoderCounters();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void maybeAddVideoFrameProcessingOffsetSample_addsSamples() {
|
||||
long sampleSum = 0;
|
||||
for (int i = 0; i < 100; i++) {
|
||||
long sample = (i + 10) * 10L;
|
||||
sampleSum += sample;
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(sample);
|
||||
}
|
||||
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsSum).isEqualTo(sampleSum);
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsCount).isEqualTo(100);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void addVideoFrameProcessingOffsetSample_sumReachesMaxLong_addsValues() {
|
||||
long highSampleValue = Long.MAX_VALUE - 10;
|
||||
long additionalSample = Long.MAX_VALUE - highSampleValue;
|
||||
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(highSampleValue);
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(additionalSample);
|
||||
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsSum).isEqualTo(Long.MAX_VALUE);
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsCount).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void addVideoFrameProcessingOffsetSample_sumOverflows_isNoOp() {
|
||||
long highSampleValue = Long.MAX_VALUE - 10;
|
||||
long additionalSample = Long.MAX_VALUE - highSampleValue + 10;
|
||||
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(highSampleValue);
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(additionalSample);
|
||||
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsSum).isEqualTo(highSampleValue);
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsCount).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void addVideoFrameProcessingOffsetSample_sumReachesMinLong_addsValues() {
|
||||
long lowSampleValue = Long.MIN_VALUE + 10;
|
||||
long additionalSample = Long.MIN_VALUE - lowSampleValue;
|
||||
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(lowSampleValue);
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(additionalSample);
|
||||
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsSum).isEqualTo(Long.MIN_VALUE);
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsCount).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void addVideoFrameProcessingOffsetSample_sumUnderflows_isNoOp() {
|
||||
long lowSampleValue = Long.MIN_VALUE + 10;
|
||||
long additionalSample = Long.MIN_VALUE - lowSampleValue - 10;
|
||||
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(lowSampleValue);
|
||||
decoderCounters.addVideoFrameProcessingOffsetSample(additionalSample);
|
||||
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsSum).isEqualTo(lowSampleValue);
|
||||
assertThat(decoderCounters.videoFrameProcessingOffsetUsCount).isEqualTo(1);
|
||||
}
|
||||
}
|
@ -158,7 +158,9 @@ public class DebugTextViewHelper implements Player.EventListener, Runnable {
|
||||
+ getPixelAspectRatioString(format.pixelWidthHeightRatio)
|
||||
+ getDecoderCountersBufferCountString(decoderCounters)
|
||||
+ " vfpo: "
|
||||
+ getVideoFrameProcessingOffsetAverageString(decoderCounters)
|
||||
+ getVideoFrameProcessingOffsetAverageString(
|
||||
decoderCounters.totalVideoFrameProcessingOffsetUs,
|
||||
decoderCounters.videoFrameProcessingOffsetCount)
|
||||
+ ")";
|
||||
}
|
||||
|
||||
@ -199,13 +201,12 @@ public class DebugTextViewHelper implements Player.EventListener, Runnable {
|
||||
: (" par:" + String.format(Locale.US, "%.02f", pixelAspectRatio));
|
||||
}
|
||||
|
||||
private static String getVideoFrameProcessingOffsetAverageString(DecoderCounters counters) {
|
||||
counters.ensureUpdated();
|
||||
int sampleCount = counters.videoFrameProcessingOffsetUsCount;
|
||||
if (sampleCount == 0) {
|
||||
private static String getVideoFrameProcessingOffsetAverageString(
|
||||
long totalOffsetUs, int frameCount) {
|
||||
if (frameCount == 0) {
|
||||
return "N/A";
|
||||
} else {
|
||||
long averageUs = (long) ((double) counters.videoFrameProcessingOffsetUsSum / sampleCount);
|
||||
long averageUs = (long) ((double) totalOffsetUs / frameCount);
|
||||
return String.valueOf(averageUs);
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ public final class DecoderCountersUtil {
|
||||
|
||||
public static void assertVideoFrameProcessingOffsetSampleCount(
|
||||
String name, DecoderCounters counters, int minCount, int maxCount) {
|
||||
int actual = counters.videoFrameProcessingOffsetUsCount;
|
||||
int actual = counters.videoFrameProcessingOffsetCount;
|
||||
assertWithMessage(
|
||||
"Codec("
|
||||
+ name
|
||||
|
Loading…
x
Reference in New Issue
Block a user