Fix VPFO counter reporting

DecoderCounters are reset in onEnabled, but the way the local
counters in MediaCodecVideoRenderers were reset assumed the
DecoderCounters were reset in onStarted.

PiperOrigin-RevId: 320440991
This commit is contained in:
olly 2020-07-09 19:45:16 +01:00 committed by kim-vde
parent 351b54e97f
commit fd3e10ed76
3 changed files with 51 additions and 48 deletions

View File

@ -515,22 +515,17 @@ public interface AnalyticsListener {
* Called when there is an update to the video frame processing offset reported by a video
* renderer.
*
* <p>Video processing offset represents how early a video frame is processed compared to the
* player's current position. For each video frame, the offset is calculated as <em>P<sub>vf</sub>
* - P<sub>pl</sub></em> where <em>P<sub>vf</sub></em> is the presentation timestamp of the video
* frame and <em>P<sub>pl</sub></em> is the current position of the player. Positive values
* indicate the frame was processed early enough whereas negative values indicate that the
* player's position had progressed beyond the frame's timestamp when the frame was processed (and
* the frame was probably dropped).
*
* <p>The renderer reports the sum of video processing offset samples (one sample per processed
* video frame: dropped, skipped or rendered) and the total number of samples (frames).
* <p>The processing offset for a video frame is the difference between the time at which the
* frame became available to render, and the time at which it was scheduled to be rendered. A
* positive value indicates the frame became available early enough, whereas a negative value
* indicates that the frame wasn't available until after the time at which it should have been
* rendered.
*
* @param eventTime The event time.
* @param totalProcessingOffsetUs The sum of video frame processing offset samples for all video
* frames processed by the renderer in microseconds.
* @param frameCount The number to samples included in the {@code totalProcessingOffsetUs}.
* @param format The current output {@link Format} rendered by the video renderer.
* @param totalProcessingOffsetUs The sum of the video frame processing offsets for frames
* rendered since the last call to this method.
* @param frameCount The number to samples included in {@code totalProcessingOffsetUs}.
* @param format The video {@link Format} being rendered.
*/
default void onVideoFrameProcessingOffset(
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {}

View File

@ -74,19 +74,22 @@ public final class DecoderCounters {
*/
public int droppedToKeyframeCount;
/**
* The sum of video frame processing offset samples in microseconds.
* The sum of the video frame processing offsets in microseconds.
*
* <p>Video frame processing offset measures how early a video frame was processed by a video
* renderer compared to the player's current position.
* <p>The processing offset for a video frame is the difference between the time at which the
* frame became available to render, and the time at which it was scheduled to be rendered. A
* positive value indicates the frame became available early enough, whereas a negative value
* indicates that the frame wasn't available until after the time at which it should have been
* rendered.
*
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of
* <p>Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of
* updating it directly.
*/
public long totalVideoFrameProcessingOffsetUs;
/**
* The number of video frame processing offset samples added.
* The number of video frame processing offsets added.
*
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of
* <p>Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of
* updating it directly.
*/
public int videoFrameProcessingOffsetCount;
@ -117,25 +120,24 @@ public final class DecoderCounters {
maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount,
other.maxConsecutiveDroppedBufferCount);
droppedToKeyframeCount += other.droppedToKeyframeCount;
addVideoFrameProcessingOffsetSamples(
addVideoFrameProcessingOffsets(
other.totalVideoFrameProcessingOffsetUs, other.videoFrameProcessingOffsetCount);
}
/**
* Adds a video frame processing offset sample to {@link #totalVideoFrameProcessingOffsetUs} and
* Adds a video frame processing offset to {@link #totalVideoFrameProcessingOffsetUs} and
* increases {@link #videoFrameProcessingOffsetCount} by one.
*
* <p>Convenience method to ensure both fields are updated when adding a sample.
* <p>Convenience method to ensure both fields are updated when adding a single offset.
*
* @param sampleUs The sample in microseconds.
* @param processingOffsetUs The video frame processing offset in microseconds.
*/
public void addVideoFrameProcessingOffsetSample(long sampleUs) {
addVideoFrameProcessingOffsetSamples(sampleUs, /* count= */ 1);
public void addVideoFrameProcessingOffset(long processingOffsetUs) {
addVideoFrameProcessingOffsets(processingOffsetUs, /* count= */ 1);
}
private void addVideoFrameProcessingOffsetSamples(long sampleUs, int count) {
totalVideoFrameProcessingOffsetUs += sampleUs;
private void addVideoFrameProcessingOffsets(long totalProcessingOffsetUs, int count) {
totalVideoFrameProcessingOffsetUs += totalProcessingOffsetUs;
videoFrameProcessingOffsetCount += count;
}
}

View File

@ -40,6 +40,7 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter;
@ -786,7 +787,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Skip frames in sync with playback, so we'll be at the right frame if the mode changes.
if (isBufferLate(earlyUs)) {
skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
}
return false;
@ -813,7 +814,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} else {
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
}
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
}
@ -846,7 +847,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} else {
dropOutputBuffer(codec, bufferIndex, presentationTimeUs);
}
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
}
@ -856,7 +857,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
}
} else {
@ -876,7 +877,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
}
}
@ -1032,8 +1033,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
/**
* Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were
* dropped.
* Updates local counters and {@link DecoderCounters} to reflect that {@code droppedBufferCount}
* additional buffers were dropped.
*
* @param droppedBufferCount The number of additional dropped buffers.
*/
@ -1048,6 +1049,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
}
/**
* Updates local counters and {@link DecoderCounters} with a new video frame processing offset.
*
* @param processingOffsetUs The video frame processing offset.
*/
protected void updateVideoFrameProcessingOffsetCounters(long processingOffsetUs) {
decoderCounters.addVideoFrameProcessingOffset(processingOffsetUs);
totalVideoFrameProcessingOffsetUs += processingOffsetUs;
videoFrameProcessingOffsetCount++;
}
/**
* Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is less than 21.
@ -1215,18 +1227,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
private void maybeNotifyVideoFrameProcessingOffset() {
Format outputFormat = getCurrentOutputFormat();
if (outputFormat != null) {
long totalOffsetDelta =
decoderCounters.totalVideoFrameProcessingOffsetUs - totalVideoFrameProcessingOffsetUs;
int countDelta =
decoderCounters.videoFrameProcessingOffsetCount - videoFrameProcessingOffsetCount;
if (countDelta != 0) {
eventDispatcher.reportVideoFrameProcessingOffset(
totalOffsetDelta, countDelta, outputFormat);
totalVideoFrameProcessingOffsetUs = decoderCounters.totalVideoFrameProcessingOffsetUs;
videoFrameProcessingOffsetCount = decoderCounters.videoFrameProcessingOffsetCount;
}
@Nullable Format outputFormat = getCurrentOutputFormat();
if (outputFormat != null && videoFrameProcessingOffsetCount != 0) {
eventDispatcher.reportVideoFrameProcessingOffset(
totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount, outputFormat);
totalVideoFrameProcessingOffsetUs = 0;
videoFrameProcessingOffsetCount = 0;
}
}