Simplify output format propagation
PiperOrigin-RevId: 324805335
This commit is contained in:
parent
4d03d30890
commit
71fd335bcd
@ -2188,11 +2188,9 @@ public class SimpleExoPlayer extends BasePlayer
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
public void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
|
||||
for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
|
||||
videoDebugListener.onVideoFrameProcessingOffset(
|
||||
totalProcessingOffsetUs, frameCount, format);
|
||||
videoDebugListener.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -319,11 +319,10 @@ public class AnalyticsCollector
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
public final void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
|
||||
EventTime eventTime = generatePlayingMediaPeriodEventTime();
|
||||
for (AnalyticsListener listener : listeners) {
|
||||
listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount, format);
|
||||
listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -591,10 +591,9 @@ public interface AnalyticsListener {
|
||||
* @param totalProcessingOffsetUs The sum of the video frame processing offsets for frames
|
||||
* rendered since the last call to this method.
|
||||
* @param frameCount The number to samples included in {@code totalProcessingOffsetUs}.
|
||||
* @param format The video {@link Format} being rendered.
|
||||
*/
|
||||
default void onVideoFrameProcessingOffset(
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {}
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount) {}
|
||||
|
||||
/**
|
||||
* Called when a frame is rendered for the first time since setting the surface, or since the
|
||||
|
@ -91,7 +91,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
private boolean codecNeedsDiscardChannelsWorkaround;
|
||||
private boolean codecNeedsEosBufferTimestampWorkaround;
|
||||
@Nullable private Format codecPassthroughFormat;
|
||||
@Nullable private Format inputFormat;
|
||||
private long currentPositionUs;
|
||||
private boolean allowFirstBufferPositionDiscontinuity;
|
||||
private boolean allowPositionDiscontinuity;
|
||||
@ -379,29 +378,23 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
@Override
|
||||
protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
|
||||
super.onInputFormatChanged(formatHolder);
|
||||
inputFormat = formatHolder.format;
|
||||
eventDispatcher.inputFormatChanged(inputFormat);
|
||||
eventDispatcher.inputFormatChanged(formatHolder.format);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onOutputFormatChanged(Format outputFormat) throws ExoPlaybackException {
|
||||
configureOutput(outputFormat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configureOutput(Format outputFormat) throws ExoPlaybackException {
|
||||
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
|
||||
throws ExoPlaybackException {
|
||||
Format audioSinkInputFormat;
|
||||
@Nullable int[] channelMap = null;
|
||||
if (codecPassthroughFormat != null) { // Raw codec passthrough
|
||||
audioSinkInputFormat = codecPassthroughFormat;
|
||||
} else if (getCodec() == null) { // Codec bypass passthrough
|
||||
audioSinkInputFormat = outputFormat;
|
||||
audioSinkInputFormat = format;
|
||||
} else {
|
||||
MediaFormat mediaFormat = getCodec().getOutputFormat();
|
||||
@C.PcmEncoding int pcmEncoding;
|
||||
if (MimeTypes.AUDIO_RAW.equals(outputFormat.sampleMimeType)) {
|
||||
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) {
|
||||
// For PCM streams, the encoder passes through int samples despite set to float mode.
|
||||
pcmEncoding = outputFormat.pcmEncoding;
|
||||
pcmEncoding = format.pcmEncoding;
|
||||
} else if (Util.SDK_INT >= 24 && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) {
|
||||
pcmEncoding = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING);
|
||||
} else if (mediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) {
|
||||
@ -409,22 +402,25 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
} else {
|
||||
// If the format is anything other than PCM then we assume that the audio decoder will
|
||||
// output 16-bit PCM.
|
||||
pcmEncoding = C.ENCODING_PCM_16BIT;
|
||||
pcmEncoding =
|
||||
MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)
|
||||
? format.pcmEncoding
|
||||
: C.ENCODING_PCM_16BIT;
|
||||
}
|
||||
audioSinkInputFormat =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(MimeTypes.AUDIO_RAW)
|
||||
.setPcmEncoding(pcmEncoding)
|
||||
.setEncoderDelay(outputFormat.encoderDelay)
|
||||
.setEncoderPadding(outputFormat.encoderPadding)
|
||||
.setEncoderDelay(format.encoderDelay)
|
||||
.setEncoderPadding(format.encoderPadding)
|
||||
.setChannelCount(mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT))
|
||||
.setSampleRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE))
|
||||
.build();
|
||||
if (codecNeedsDiscardChannelsWorkaround
|
||||
&& audioSinkInputFormat.channelCount == 6
|
||||
&& outputFormat.channelCount < 6) {
|
||||
channelMap = new int[outputFormat.channelCount];
|
||||
for (int i = 0; i < outputFormat.channelCount; i++) {
|
||||
&& format.channelCount < 6) {
|
||||
channelMap = new int[format.channelCount];
|
||||
for (int i = 0; i < format.channelCount; i++) {
|
||||
channelMap[i] = i;
|
||||
}
|
||||
}
|
||||
@ -432,7 +428,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
try {
|
||||
audioSink.configure(audioSinkInputFormat, /* specifiedBufferSize= */ 0, channelMap);
|
||||
} catch (AudioSink.ConfigurationException e) {
|
||||
throw createRendererException(e, outputFormat);
|
||||
throw createRendererException(e, format);
|
||||
}
|
||||
}
|
||||
|
||||
@ -621,8 +617,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|
||||
try {
|
||||
audioSink.playToEndOfStream();
|
||||
} catch (AudioSink.WriteException e) {
|
||||
Format outputFormat = getCurrentOutputFormat();
|
||||
throw createRendererException(e, outputFormat != null ? outputFormat : inputFormat);
|
||||
@Nullable Format outputFormat = getOutputFormat();
|
||||
throw createRendererException(e, outputFormat != null ? outputFormat : getInputFormat());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -364,7 +364,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
private float operatingRate;
|
||||
@Nullable private MediaCodec codec;
|
||||
@Nullable private MediaCodecAdapter codecAdapter;
|
||||
@Nullable private Format codecFormat;
|
||||
@Nullable private Format codecInputFormat;
|
||||
@Nullable private MediaFormat codecOutputMediaFormat;
|
||||
private boolean codecOutputMediaFormatChanged;
|
||||
private float codecOperatingRate;
|
||||
@Nullable private ArrayDeque<MediaCodecInfo> availableCodecInfos;
|
||||
@Nullable private DecoderInitializationException preferredDecoderInitializationException;
|
||||
@ -409,7 +411,6 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
protected DecoderCounters decoderCounters;
|
||||
private long outputStreamOffsetUs;
|
||||
private int pendingOutputStreamOffsetCount;
|
||||
private boolean receivedOutputMediaFormatChange;
|
||||
|
||||
/**
|
||||
* @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*}
|
||||
@ -613,35 +614,40 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
*
|
||||
* @param exception The exception.
|
||||
*/
|
||||
protected void setPendingPlaybackException(ExoPlaybackException exception) {
|
||||
protected final void setPendingPlaybackException(ExoPlaybackException exception) {
|
||||
pendingPlaybackException = exception;
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls the pending output format queue for a given buffer timestamp. If a format is present, it
|
||||
* is removed and returned. Otherwise returns {@code null}. Subclasses should only call this
|
||||
* method if they are taking over responsibility for output format propagation (e.g., when using
|
||||
* video tunneling).
|
||||
* Updates the output formats for the specified output buffer timestamp, calling {@link
|
||||
* #onOutputFormatChanged} if a change has occurred.
|
||||
*
|
||||
* <p>Subclasses should only call this method if operating in a mode where buffers are not
|
||||
* dequeued from the decoder, for example when using video tunneling).
|
||||
*
|
||||
* @throws ExoPlaybackException Thrown if an error occurs as a result of the output format change.
|
||||
*/
|
||||
protected final void updateOutputFormatForTime(long presentationTimeUs)
|
||||
throws ExoPlaybackException {
|
||||
boolean outputFormatChanged = false;
|
||||
@Nullable Format format = formatQueue.pollFloor(presentationTimeUs);
|
||||
if (format != null) {
|
||||
outputFormat = format;
|
||||
onOutputFormatChanged(outputFormat);
|
||||
} else if (receivedOutputMediaFormatChange && outputFormat != null) {
|
||||
// No Format change with the MediaFormat change, so we need to update based on the existing
|
||||
// Format.
|
||||
configureOutput(outputFormat);
|
||||
outputFormatChanged = true;
|
||||
}
|
||||
if (outputFormatChanged || (codecOutputMediaFormatChanged && outputFormat != null)) {
|
||||
onOutputFormatChanged(outputFormat, codecOutputMediaFormat);
|
||||
codecOutputMediaFormatChanged = false;
|
||||
}
|
||||
|
||||
receivedOutputMediaFormatChange = false;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final Format getCurrentOutputFormat() {
|
||||
protected Format getInputFormat() {
|
||||
return inputFormat;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final Format getOutputFormat() {
|
||||
return outputFormat;
|
||||
}
|
||||
|
||||
@ -650,6 +656,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
return codec;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final MediaFormat getCodecOutputMediaFormat() {
|
||||
return codecOutputMediaFormat;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected final MediaCodecInfo getCodecInfo() {
|
||||
return codecInfo;
|
||||
@ -905,11 +916,14 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
protected void resetCodecStateForRelease() {
|
||||
resetCodecStateForFlush();
|
||||
|
||||
pendingPlaybackException = null;
|
||||
c2Mp3TimestampTracker = null;
|
||||
availableCodecInfos = null;
|
||||
codecInfo = null;
|
||||
codecFormat = null;
|
||||
codecInputFormat = null;
|
||||
codecOutputMediaFormat = null;
|
||||
codecOutputMediaFormatChanged = false;
|
||||
codecHasOutputMediaFormat = false;
|
||||
pendingPlaybackException = null;
|
||||
codecOperatingRate = CODEC_OPERATING_RATE_UNSET;
|
||||
codecAdaptationWorkaroundMode = ADAPTATION_WORKAROUND_MODE_NEVER;
|
||||
codecNeedsReconfigureWorkaround = false;
|
||||
@ -920,7 +934,6 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
codecNeedsEosOutputExceptionWorkaround = false;
|
||||
codecNeedsMonoChannelCountWorkaround = false;
|
||||
codecNeedsEosPropagation = false;
|
||||
c2Mp3TimestampTracker = null;
|
||||
codecReconfigured = false;
|
||||
codecReconfigurationState = RECONFIGURATION_STATE_NONE;
|
||||
resetCodecBuffers();
|
||||
@ -1110,16 +1123,17 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
this.codecAdapter = codecAdapter;
|
||||
this.codecInfo = codecInfo;
|
||||
this.codecOperatingRate = codecOperatingRate;
|
||||
codecFormat = inputFormat;
|
||||
codecInputFormat = inputFormat;
|
||||
codecAdaptationWorkaroundMode = codecAdaptationWorkaroundMode(codecName);
|
||||
codecNeedsReconfigureWorkaround = codecNeedsReconfigureWorkaround(codecName);
|
||||
codecNeedsDiscardToSpsWorkaround = codecNeedsDiscardToSpsWorkaround(codecName, codecFormat);
|
||||
codecNeedsDiscardToSpsWorkaround =
|
||||
codecNeedsDiscardToSpsWorkaround(codecName, codecInputFormat);
|
||||
codecNeedsFlushWorkaround = codecNeedsFlushWorkaround(codecName);
|
||||
codecNeedsSosFlushWorkaround = codecNeedsSosFlushWorkaround(codecName);
|
||||
codecNeedsEosFlushWorkaround = codecNeedsEosFlushWorkaround(codecName);
|
||||
codecNeedsEosOutputExceptionWorkaround = codecNeedsEosOutputExceptionWorkaround(codecName);
|
||||
codecNeedsMonoChannelCountWorkaround =
|
||||
codecNeedsMonoChannelCountWorkaround(codecName, codecFormat);
|
||||
codecNeedsMonoChannelCountWorkaround(codecName, codecInputFormat);
|
||||
codecNeedsEosPropagation =
|
||||
codecNeedsEosPropagationWorkaround(codecInfo) || getCodecNeedsEosPropagation();
|
||||
if ("c2.android.mp3.decoder".equals(codecInfo.name)) {
|
||||
@ -1234,8 +1248,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
// For adaptive reconfiguration, decoders expect all reconfiguration data to be supplied at
|
||||
// the start of the buffer that also contains the first frame in the new format.
|
||||
if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) {
|
||||
for (int i = 0; i < codecFormat.initializationData.size(); i++) {
|
||||
byte[] data = codecFormat.initializationData.get(i);
|
||||
for (int i = 0; i < codecInputFormat.initializationData.size(); i++) {
|
||||
byte[] data = codecInputFormat.initializationData.get(i);
|
||||
buffer.data.put(data);
|
||||
}
|
||||
codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING;
|
||||
@ -1270,7 +1284,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) {
|
||||
// We received a new format immediately before the end of the stream. We need to clear
|
||||
// the corresponding reconfiguration data from the current buffer, but re-write it into
|
||||
// a subsequent buffer if there are any (e.g. if the user seeks backwards).
|
||||
// a subsequent buffer if there are any (for example, if the user seeks backwards).
|
||||
buffer.clear();
|
||||
codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING;
|
||||
}
|
||||
@ -1393,6 +1407,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
* @param formatHolder A {@link FormatHolder} that holds the new {@link Format}.
|
||||
* @throws ExoPlaybackException If an error occurs re-initializing the {@link MediaCodec}.
|
||||
*/
|
||||
@CallSuper
|
||||
protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
|
||||
waitingForFirstSampleInFormat = true;
|
||||
Format newFormat = Assertions.checkNotNull(formatHolder.format);
|
||||
@ -1426,12 +1441,12 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (canKeepCodec(codec, codecInfo, codecFormat, newFormat)) {
|
||||
switch (canKeepCodec(codec, codecInfo, codecInputFormat, newFormat)) {
|
||||
case KEEP_CODEC_RESULT_NO:
|
||||
drainAndReinitializeCodec();
|
||||
break;
|
||||
case KEEP_CODEC_RESULT_YES_WITH_FLUSH:
|
||||
codecFormat = newFormat;
|
||||
codecInputFormat = newFormat;
|
||||
updateCodecOperatingRate();
|
||||
if (sourceDrmSession != codecDrmSession) {
|
||||
drainAndUpdateCodecDrmSession();
|
||||
@ -1448,9 +1463,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
codecNeedsAdaptationWorkaroundBuffer =
|
||||
codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_ALWAYS
|
||||
|| (codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION
|
||||
&& newFormat.width == codecFormat.width
|
||||
&& newFormat.height == codecFormat.height);
|
||||
codecFormat = newFormat;
|
||||
&& newFormat.width == codecInputFormat.width
|
||||
&& newFormat.height == codecInputFormat.height);
|
||||
codecInputFormat = newFormat;
|
||||
updateCodecOperatingRate();
|
||||
if (sourceDrmSession != codecDrmSession) {
|
||||
drainAndUpdateCodecDrmSession();
|
||||
@ -1458,7 +1473,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
}
|
||||
break;
|
||||
case KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION:
|
||||
codecFormat = newFormat;
|
||||
codecInputFormat = newFormat;
|
||||
updateCodecOperatingRate();
|
||||
if (sourceDrmSession != codecDrmSession) {
|
||||
drainAndUpdateCodecDrmSession();
|
||||
@ -1470,40 +1485,18 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when the output {@link MediaFormat} of the {@link MediaCodec} changes.
|
||||
* Called when one of the output formats changes.
|
||||
*
|
||||
* <p>The default implementation is a no-op.
|
||||
*
|
||||
* @param codec The {@link MediaCodec} instance.
|
||||
* @param outputMediaFormat The new output {@link MediaFormat}.
|
||||
* @throws ExoPlaybackException Thrown if an error occurs handling the new output media format.
|
||||
*/
|
||||
protected void onOutputMediaFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat)
|
||||
throws ExoPlaybackException {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when the output {@link Format} changes.
|
||||
*
|
||||
* <p>The default implementation is a no-op.
|
||||
*
|
||||
* @param outputFormat The new output {@link Format}.
|
||||
* @throws ExoPlaybackException Thrown if an error occurs handling the new output format.
|
||||
*/
|
||||
protected void onOutputFormatChanged(Format outputFormat) throws ExoPlaybackException {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the renderer output based on a {@link Format}.
|
||||
*
|
||||
* <p>The default implementation is a no-op.
|
||||
*
|
||||
* @param outputFormat The format to configure the output with.
|
||||
* @param format The input {@link Format} to which future output now corresponds. If the renderer
|
||||
* is in bypass mode, this is also the output format.
|
||||
* @param mediaFormat The codec output {@link MediaFormat}, or {@code null} if the renderer is in
|
||||
* bypass mode.
|
||||
* @throws ExoPlaybackException Thrown if an error occurs configuring the output.
|
||||
*/
|
||||
protected void configureOutput(Format outputFormat) throws ExoPlaybackException {
|
||||
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
|
||||
throws ExoPlaybackException {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@ -1633,7 +1626,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
}
|
||||
|
||||
float newCodecOperatingRate =
|
||||
getCodecOperatingRateV23(operatingRate, codecFormat, getStreamFormats());
|
||||
getCodecOperatingRateV23(operatingRate, codecInputFormat, getStreamFormats());
|
||||
if (codecOperatingRate == newCodecOperatingRate) {
|
||||
// No change.
|
||||
} else if (newCodecOperatingRate == CODEC_OPERATING_RATE_UNSET) {
|
||||
@ -1721,8 +1714,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
|
||||
if (outputIndex < 0) {
|
||||
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED /* (-2) */) {
|
||||
processOutputMediaFormat();
|
||||
receivedOutputMediaFormatChange = true;
|
||||
processOutputMediaFormatChanged();
|
||||
return true;
|
||||
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED /* (-3) */) {
|
||||
processOutputBuffersChanged();
|
||||
@ -1750,6 +1742,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
|
||||
this.outputIndex = outputIndex;
|
||||
outputBuffer = getOutputBuffer(outputIndex);
|
||||
|
||||
// The dequeued buffer is a media buffer. Do some initial setup.
|
||||
// It will be processed by calling processOutputBuffer (possibly multiple times).
|
||||
if (outputBuffer != null) {
|
||||
@ -1815,8 +1808,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Processes a new output {@link MediaFormat}. */
|
||||
private void processOutputMediaFormat() throws ExoPlaybackException {
|
||||
/** Processes a change in the decoder output {@link MediaFormat}. */
|
||||
private void processOutputMediaFormatChanged() {
|
||||
codecHasOutputMediaFormat = true;
|
||||
MediaFormat mediaFormat = codecAdapter.getOutputFormat();
|
||||
if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER
|
||||
@ -1830,7 +1823,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
if (codecNeedsMonoChannelCountWorkaround) {
|
||||
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
|
||||
}
|
||||
onOutputMediaFormatChanged(codec, mediaFormat);
|
||||
codecOutputMediaFormat = mediaFormat;
|
||||
codecOutputMediaFormatChanged = true;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1874,7 +1868,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
* by the source.
|
||||
* @param isLastBuffer Whether the buffer is the last sample of the current stream.
|
||||
* @param format The {@link Format} associated with the buffer.
|
||||
* @return Whether the output buffer was fully processed (e.g. rendered or skipped).
|
||||
* @return Whether the output buffer was fully processed (for example, rendered or skipped).
|
||||
* @throws ExoPlaybackException If an error occurs processing the output buffer.
|
||||
*/
|
||||
protected abstract boolean processOutputBuffer(
|
||||
@ -2121,7 +2115,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
|
||||
if (!batchBuffer.isEmpty() && waitingForFirstSampleInFormat) {
|
||||
// This is the first buffer in a new format, the output format must be updated.
|
||||
outputFormat = Assertions.checkNotNull(inputFormat);
|
||||
onOutputFormatChanged(outputFormat);
|
||||
onOutputFormatChanged(outputFormat, /* mediaFormat= */ null);
|
||||
waitingForFirstSampleInFormat = false;
|
||||
}
|
||||
|
||||
|
@ -153,9 +153,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
private long totalVideoFrameProcessingOffsetUs;
|
||||
private int videoFrameProcessingOffsetCount;
|
||||
|
||||
@Nullable private MediaFormat currentMediaFormat;
|
||||
private int mediaFormatWidth;
|
||||
private int mediaFormatHeight;
|
||||
private int currentWidth;
|
||||
private int currentHeight;
|
||||
private int currentUnappliedRotationDegrees;
|
||||
@ -262,8 +259,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
currentHeight = Format.NO_VALUE;
|
||||
currentPixelWidthHeightRatio = Format.NO_VALUE;
|
||||
scalingMode = VIDEO_SCALING_MODE_DEFAULT;
|
||||
mediaFormatWidth = Format.NO_VALUE;
|
||||
mediaFormatHeight = Format.NO_VALUE;
|
||||
clearReportedVideoSize();
|
||||
}
|
||||
|
||||
@ -449,7 +444,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
|
||||
@Override
|
||||
protected void onDisabled() {
|
||||
currentMediaFormat = null;
|
||||
clearReportedVideoSize();
|
||||
clearRenderedFirstFrame();
|
||||
frameReleaseTimeHelper.disable();
|
||||
@ -668,51 +662,37 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onOutputMediaFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) {
|
||||
currentMediaFormat = outputMediaFormat;
|
||||
boolean hasCrop =
|
||||
outputMediaFormat.containsKey(KEY_CROP_RIGHT)
|
||||
&& outputMediaFormat.containsKey(KEY_CROP_LEFT)
|
||||
&& outputMediaFormat.containsKey(KEY_CROP_BOTTOM)
|
||||
&& outputMediaFormat.containsKey(KEY_CROP_TOP);
|
||||
mediaFormatWidth =
|
||||
hasCrop
|
||||
? outputMediaFormat.getInteger(KEY_CROP_RIGHT)
|
||||
- outputMediaFormat.getInteger(KEY_CROP_LEFT)
|
||||
+ 1
|
||||
: outputMediaFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
mediaFormatHeight =
|
||||
hasCrop
|
||||
? outputMediaFormat.getInteger(KEY_CROP_BOTTOM)
|
||||
- outputMediaFormat.getInteger(KEY_CROP_TOP)
|
||||
+ 1
|
||||
: outputMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
|
||||
// Must be applied each time the output MediaFormat changes.
|
||||
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) {
|
||||
@Nullable MediaCodec codec = getCodec();
|
||||
if (codec != null) {
|
||||
// Must be applied each time the output format changes.
|
||||
codec.setVideoScalingMode(scalingMode);
|
||||
maybeNotifyVideoFrameProcessingOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onOutputFormatChanged(Format outputFormat) {
|
||||
configureOutput(outputFormat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configureOutput(Format outputFormat) {
|
||||
if (tunneling) {
|
||||
currentWidth = outputFormat.width;
|
||||
currentHeight = outputFormat.height;
|
||||
currentWidth = format.width;
|
||||
currentHeight = format.height;
|
||||
} else {
|
||||
currentWidth = mediaFormatWidth;
|
||||
currentHeight = mediaFormatHeight;
|
||||
Assertions.checkNotNull(mediaFormat);
|
||||
boolean hasCrop =
|
||||
mediaFormat.containsKey(KEY_CROP_RIGHT)
|
||||
&& mediaFormat.containsKey(KEY_CROP_LEFT)
|
||||
&& mediaFormat.containsKey(KEY_CROP_BOTTOM)
|
||||
&& mediaFormat.containsKey(KEY_CROP_TOP);
|
||||
currentWidth =
|
||||
hasCrop
|
||||
? mediaFormat.getInteger(KEY_CROP_RIGHT) - mediaFormat.getInteger(KEY_CROP_LEFT) + 1
|
||||
: mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
currentHeight =
|
||||
hasCrop
|
||||
? mediaFormat.getInteger(KEY_CROP_BOTTOM) - mediaFormat.getInteger(KEY_CROP_TOP) + 1
|
||||
: mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
}
|
||||
currentPixelWidthHeightRatio = outputFormat.pixelWidthHeightRatio;
|
||||
currentPixelWidthHeightRatio = format.pixelWidthHeightRatio;
|
||||
if (Util.SDK_INT >= 21) {
|
||||
// On API level 21 and above the decoder applies the rotation when rendering to the surface.
|
||||
// Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need
|
||||
// to flip the width, height and pixel aspect ratio to reflect the rotation that was applied.
|
||||
if (outputFormat.rotationDegrees == 90 || outputFormat.rotationDegrees == 270) {
|
||||
if (format.rotationDegrees == 90 || format.rotationDegrees == 270) {
|
||||
int rotatedHeight = currentWidth;
|
||||
currentWidth = currentHeight;
|
||||
currentHeight = rotatedHeight;
|
||||
@ -720,9 +700,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
} else {
|
||||
// On API level 20 and below the decoder does not apply the rotation.
|
||||
currentUnappliedRotationDegrees = outputFormat.rotationDegrees;
|
||||
currentUnappliedRotationDegrees = format.rotationDegrees;
|
||||
}
|
||||
currentFrameRate = outputFormat.frameRate;
|
||||
currentFrameRate = format.frameRate;
|
||||
updateSurfaceFrameRate(/* isNewSurface= */ false);
|
||||
}
|
||||
|
||||
@ -811,7 +791,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
|| (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs)));
|
||||
if (forceRenderOutputBuffer) {
|
||||
long releaseTimeNs = System.nanoTime();
|
||||
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format, currentMediaFormat);
|
||||
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
|
||||
if (Util.SDK_INT >= 21) {
|
||||
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs);
|
||||
} else {
|
||||
@ -857,8 +837,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
if (Util.SDK_INT >= 21) {
|
||||
// Let the underlying framework time the release.
|
||||
if (earlyUs < 50000) {
|
||||
notifyFrameMetadataListener(
|
||||
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
|
||||
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
|
||||
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
|
||||
updateVideoFrameProcessingOffsetCounters(earlyUs);
|
||||
return true;
|
||||
@ -877,8 +856,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
notifyFrameMetadataListener(
|
||||
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
|
||||
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
|
||||
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
|
||||
updateVideoFrameProcessingOffsetCounters(earlyUs);
|
||||
return true;
|
||||
@ -890,10 +868,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
|
||||
private void notifyFrameMetadataListener(
|
||||
long presentationTimeUs, long releaseTimeNs, Format format, MediaFormat mediaFormat) {
|
||||
long presentationTimeUs, long releaseTimeNs, Format format) {
|
||||
if (frameMetadataListener != null) {
|
||||
frameMetadataListener.onVideoFrameAboutToBeRendered(
|
||||
presentationTimeUs, releaseTimeNs, format, mediaFormat);
|
||||
presentationTimeUs, releaseTimeNs, format, getCodecOutputMediaFormat());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1230,10 +1208,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||
}
|
||||
|
||||
private void maybeNotifyVideoFrameProcessingOffset() {
|
||||
@Nullable Format outputFormat = getCurrentOutputFormat();
|
||||
if (outputFormat != null && videoFrameProcessingOffsetCount != 0) {
|
||||
if (videoFrameProcessingOffsetCount != 0) {
|
||||
eventDispatcher.reportVideoFrameProcessingOffset(
|
||||
totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount, outputFormat);
|
||||
totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount);
|
||||
totalVideoFrameProcessingOffsetUs = 0;
|
||||
videoFrameProcessingOffsetCount = 0;
|
||||
}
|
||||
|
@ -88,10 +88,8 @@ public interface VideoRendererEventListener {
|
||||
* @param totalProcessingOffsetUs The sum of all video frame processing offset samples for the
|
||||
* video frames processed by the renderer in microseconds.
|
||||
* @param frameCount The number of samples included in the {@code totalProcessingOffsetUs}.
|
||||
* @param format The {@link Format} that is currently output.
|
||||
*/
|
||||
default void onVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {}
|
||||
default void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {}
|
||||
|
||||
/**
|
||||
* Called before a frame is rendered for the first time since setting the surface, and each time
|
||||
@ -182,13 +180,12 @@ public interface VideoRendererEventListener {
|
||||
}
|
||||
|
||||
/** Invokes {@link VideoRendererEventListener#onVideoFrameProcessingOffset}. */
|
||||
public void reportVideoFrameProcessingOffset(
|
||||
long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
public void reportVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
|
||||
if (handler != null) {
|
||||
handler.post(
|
||||
() ->
|
||||
castNonNull(listener)
|
||||
.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount, format));
|
||||
.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1955,7 +1955,7 @@ public final class AnalyticsCollectorTest {
|
||||
|
||||
@Override
|
||||
public void onVideoFrameProcessingOffset(
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {
|
||||
EventTime eventTime, long totalProcessingOffsetUs, int frameCount) {
|
||||
reportedEvents.add(new ReportedEvent(EVENT_VIDEO_FRAME_PROCESSING_OFFSET, eventTime));
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,9 @@ import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
import android.os.SystemClock;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.android.exoplayer2.C;
|
||||
@ -216,15 +218,16 @@ public class MediaCodecAudioRendererTest {
|
||||
/* eventHandler= */ null,
|
||||
/* eventListener= */ null) {
|
||||
@Override
|
||||
protected void onOutputFormatChanged(Format outputFormat) throws ExoPlaybackException {
|
||||
super.onOutputFormatChanged(outputFormat);
|
||||
if (!outputFormat.equals(AUDIO_AAC)) {
|
||||
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
|
||||
throws ExoPlaybackException {
|
||||
super.onOutputFormatChanged(format, mediaFormat);
|
||||
if (!format.equals(AUDIO_AAC)) {
|
||||
setPendingPlaybackException(
|
||||
ExoPlaybackException.createForRenderer(
|
||||
new AudioSink.ConfigurationException("Test"),
|
||||
"rendererName",
|
||||
/* rendererIndex= */ 0,
|
||||
outputFormat,
|
||||
format,
|
||||
FORMAT_HANDLED));
|
||||
}
|
||||
}
|
||||
@ -254,8 +257,11 @@ public class MediaCodecAudioRendererTest {
|
||||
exceptionThrowingRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
|
||||
exceptionThrowingRenderer.render(/* positionUs= */ 250, SystemClock.elapsedRealtime() * 1000);
|
||||
|
||||
MediaFormat mediaFormat = new MediaFormat();
|
||||
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
|
||||
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 32_000);
|
||||
// Simulating the exception being thrown when not traceable back to render.
|
||||
exceptionThrowingRenderer.onOutputFormatChanged(changedFormat);
|
||||
exceptionThrowingRenderer.onOutputFormatChanged(changedFormat, mediaFormat);
|
||||
|
||||
assertThrows(
|
||||
ExoPlaybackException.class,
|
||||
|
@ -29,6 +29,7 @@ import static org.mockito.Mockito.verify;
|
||||
import static org.robolectric.Shadows.shadowOf;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.SystemClock;
|
||||
@ -37,7 +38,6 @@ import androidx.annotation.Nullable;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.android.exoplayer2.C;
|
||||
import com.google.android.exoplayer2.ExoPlaybackException;
|
||||
import com.google.android.exoplayer2.Format;
|
||||
import com.google.android.exoplayer2.Renderer;
|
||||
import com.google.android.exoplayer2.RendererCapabilities;
|
||||
@ -113,9 +113,9 @@ public class MediaCodecVideoRendererTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onOutputFormatChanged(Format outputFormat) {
|
||||
super.onOutputFormatChanged(outputFormat);
|
||||
currentOutputFormat = outputFormat;
|
||||
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) {
|
||||
super.onOutputFormatChanged(format, mediaFormat);
|
||||
currentOutputFormat = format;
|
||||
}
|
||||
};
|
||||
|
||||
@ -458,59 +458,4 @@ public class MediaCodecVideoRendererTest {
|
||||
shadowLooper.idle();
|
||||
verify(eventListener, times(2)).onRenderedFirstFrame(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void onVideoFrameProcessingOffset_isCalledAfterOutputFormatChanges()
|
||||
throws ExoPlaybackException {
|
||||
Format mp4Uhd = VIDEO_H264.buildUpon().setWidth(3840).setHeight(2160).build();
|
||||
FakeSampleStream fakeSampleStream =
|
||||
new FakeSampleStream(
|
||||
/* mediaSourceEventDispatcher= */ null,
|
||||
DrmSessionManager.DUMMY,
|
||||
new DrmSessionEventListener.EventDispatcher(),
|
||||
/* initialFormat= */ mp4Uhd,
|
||||
ImmutableList.of(
|
||||
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
|
||||
format(VIDEO_H264),
|
||||
oneByteSample(/* timeUs= */ 50, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 100),
|
||||
format(mp4Uhd),
|
||||
oneByteSample(/* timeUs= */ 150, C.BUFFER_FLAG_KEY_FRAME),
|
||||
oneByteSample(/* timeUs= */ 200),
|
||||
oneByteSample(/* timeUs= */ 250),
|
||||
format(VIDEO_H264),
|
||||
oneByteSample(/* timeUs= */ 300, C.BUFFER_FLAG_KEY_FRAME),
|
||||
FakeSampleStreamItem.END_OF_STREAM_ITEM));
|
||||
|
||||
mediaCodecVideoRenderer.enable(
|
||||
RendererConfiguration.DEFAULT,
|
||||
new Format[] {mp4Uhd},
|
||||
fakeSampleStream,
|
||||
/* positionUs= */ 0,
|
||||
/* joining= */ false,
|
||||
/* mayRenderStartOfStream= */ true,
|
||||
/* offsetUs */ 0);
|
||||
|
||||
mediaCodecVideoRenderer.setCurrentStreamFinal();
|
||||
mediaCodecVideoRenderer.start();
|
||||
|
||||
int positionUs = 10;
|
||||
do {
|
||||
mediaCodecVideoRenderer.render(positionUs, SystemClock.elapsedRealtime() * 1000);
|
||||
positionUs += 10;
|
||||
} while (!mediaCodecVideoRenderer.isEnded());
|
||||
mediaCodecVideoRenderer.stop();
|
||||
shadowOf(testMainLooper).idle();
|
||||
|
||||
InOrder orderVerifier = inOrder(eventListener);
|
||||
orderVerifier.verify(eventListener).onVideoFrameProcessingOffset(anyLong(), eq(1), eq(mp4Uhd));
|
||||
orderVerifier
|
||||
.verify(eventListener)
|
||||
.onVideoFrameProcessingOffset(anyLong(), eq(2), eq(VIDEO_H264));
|
||||
orderVerifier.verify(eventListener).onVideoFrameProcessingOffset(anyLong(), eq(3), eq(mp4Uhd));
|
||||
orderVerifier
|
||||
.verify(eventListener)
|
||||
.onVideoFrameProcessingOffset(anyLong(), eq(1), eq(VIDEO_H264));
|
||||
orderVerifier.verifyNoMoreInteractions();
|
||||
}
|
||||
}
|
||||
|
@ -67,9 +67,7 @@ public class FakeVideoRenderer extends FakeRenderer {
|
||||
super.onStopped();
|
||||
eventDispatcher.droppedFrames(/* droppedFrameCount= */ 0, /* elapsedMs= */ 0);
|
||||
eventDispatcher.reportVideoFrameProcessingOffset(
|
||||
/* totalProcessingOffsetUs= */ 400000,
|
||||
/* frameCount= */ 10,
|
||||
Assertions.checkNotNull(format));
|
||||
/* totalProcessingOffsetUs= */ 400000, /* frameCount= */ 10);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
Loading…
x
Reference in New Issue
Block a user