Rename inputFormat to firstInputFormat in multiple places

With multi-asset, the sample pipelines can process more than one
MediaItem. The renaming makes it clear that the format passed to the
SamplePipeline constructors is the one corresponding to the first
MediaItem. Indeed, the first format is the one used to configure the
SamplePipelines.

PiperOrigin-RevId: 506879260
This commit is contained in:
kimvde 2023-02-03 13:39:00 +00:00 committed by microkatz
parent ed5af63227
commit 4094aef9a5
4 changed files with 40 additions and 32 deletions

View File

@ -61,7 +61,7 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/260618558): Move silent audio generation upstream of this component. // TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline( public AudioSamplePipeline(
Format inputFormat, Format firstInputFormat,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
@ -72,10 +72,11 @@ import org.checkerframework.dataflow.qual.Pure;
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
FallbackListener fallbackListener) FallbackListener fallbackListener)
throws TransformationException { throws TransformationException {
super(inputFormat, streamStartPositionUs, muxerWrapper); super(firstInputFormat, streamStartPositionUs, muxerWrapper);
if (generateSilentAudioDurationUs != C.TIME_UNSET) { if (generateSilentAudioDurationUs != C.TIME_UNSET) {
silentAudioGenerator = new SilentAudioGenerator(inputFormat, generateSilentAudioDurationUs); silentAudioGenerator =
new SilentAudioGenerator(firstInputFormat, generateSilentAudioDurationUs);
} else { } else {
silentAudioGenerator = null; silentAudioGenerator = null;
} }
@ -95,7 +96,7 @@ import org.checkerframework.dataflow.qual.Pure;
if (flattenForSlowMotion) { if (flattenForSlowMotion) {
audioProcessors = audioProcessors =
new ImmutableList.Builder<AudioProcessor>() new ImmutableList.Builder<AudioProcessor>()
.add(new SpeedChangingAudioProcessor(new SegmentSpeedProvider(inputFormat))) .add(new SpeedChangingAudioProcessor(new SegmentSpeedProvider(firstInputFormat)))
.addAll(audioProcessors) .addAll(audioProcessors)
.build(); .build();
} }
@ -103,8 +104,8 @@ import org.checkerframework.dataflow.qual.Pure;
audioProcessingPipeline = new AudioProcessingPipeline(audioProcessors); audioProcessingPipeline = new AudioProcessingPipeline(audioProcessors);
AudioFormat pipelineInputAudioFormat = AudioFormat pipelineInputAudioFormat =
new AudioFormat( new AudioFormat(
inputFormat.sampleRate, firstInputFormat.sampleRate,
inputFormat.channelCount, firstInputFormat.channelCount,
// The decoder uses ENCODING_PCM_16BIT by default. // The decoder uses ENCODING_PCM_16BIT by default.
// https://developer.android.com/reference/android/media/MediaCodec#raw-audio-buffers // https://developer.android.com/reference/android/media/MediaCodec#raw-audio-buffers
C.ENCODING_PCM_16BIT); C.ENCODING_PCM_16BIT);
@ -121,7 +122,7 @@ import org.checkerframework.dataflow.qual.Pure;
String requestedMimeType = String requestedMimeType =
transformationRequest.audioMimeType != null transformationRequest.audioMimeType != null
? transformationRequest.audioMimeType ? transformationRequest.audioMimeType
: checkNotNull(inputFormat.sampleMimeType); : checkNotNull(firstInputFormat.sampleMimeType);
Format requestedOutputFormat = Format requestedOutputFormat =
new Format.Builder() new Format.Builder()
.setSampleMimeType(requestedMimeType) .setSampleMimeType(requestedMimeType)

View File

@ -37,10 +37,11 @@ import androidx.media3.decoder.DecoderInputBuffer;
private boolean muxerWrapperTrackAdded; private boolean muxerWrapperTrackAdded;
public SamplePipeline(Format inputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) { public SamplePipeline(
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
this.streamStartPositionUs = streamStartPositionUs; this.streamStartPositionUs = streamStartPositionUs;
this.muxerWrapper = muxerWrapper; this.muxerWrapper = muxerWrapper;
trackType = MimeTypes.getTrackType(inputFormat.sampleMimeType); trackType = MimeTypes.getTrackType(firstInputFormat.sampleMimeType);
} }
protected static TransformationException createNoSupportedMimeTypeException( protected static TransformationException createNoSupportedMimeTypeException(

View File

@ -361,12 +361,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public SampleConsumer onTrackAdded( public SampleConsumer onTrackAdded(
Format format, Format firstInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs)
throws TransformationException { throws TransformationException {
int trackType = MimeTypes.getTrackType(format.sampleMimeType); int trackType = MimeTypes.getTrackType(firstInputFormat.sampleMimeType);
if (!trackAdded) { if (!trackAdded) {
if (generateSilentAudio) { if (generateSilentAudio) {
if (trackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) { if (trackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
@ -384,7 +384,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
SamplePipeline samplePipeline = SamplePipeline samplePipeline =
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs); getSamplePipeline(
firstInputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
compositeAssetLoader.addOnMediaItemChangedListener(samplePipeline, trackType); compositeAssetLoader.addOnMediaItemChangedListener(samplePipeline, trackType);
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget(); internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
@ -452,17 +453,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Private methods. // Private methods.
private SamplePipeline getSamplePipeline( private SamplePipeline getSamplePipeline(
Format inputFormat, Format firstInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs)
throws TransformationException { throws TransformationException {
checkState(supportedOutputTypes != 0); checkState(supportedOutputTypes != 0);
boolean isAudio = MimeTypes.isAudio(inputFormat.sampleMimeType); boolean isAudio = MimeTypes.isAudio(firstInputFormat.sampleMimeType);
boolean shouldTranscode = boolean shouldTranscode =
isAudio isAudio
? shouldTranscodeAudio(inputFormat) ? shouldTranscodeAudio(firstInputFormat)
: shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs); : shouldTranscodeVideo(firstInputFormat, streamStartPositionUs, streamOffsetUs);
boolean assetLoaderNeverDecodes = (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) == 0; boolean assetLoaderNeverDecodes = (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) == 0;
checkState(!shouldTranscode || !assetLoaderNeverDecodes); checkState(!shouldTranscode || !assetLoaderNeverDecodes);
boolean assetLoaderAlwaysDecodes = boolean assetLoaderAlwaysDecodes =
@ -470,7 +471,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes; boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
if (isAudio && shouldUseTranscodingPipeline) { if (isAudio && shouldUseTranscodingPipeline) {
return new AudioSamplePipeline( return new AudioSamplePipeline(
inputFormat, firstInputFormat,
streamStartPositionUs, streamStartPositionUs,
streamOffsetUs, streamOffsetUs,
transformationRequest, transformationRequest,
@ -483,7 +484,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else if (shouldUseTranscodingPipeline) { } else if (shouldUseTranscodingPipeline) {
return new VideoSamplePipeline( return new VideoSamplePipeline(
context, context,
inputFormat, firstInputFormat,
streamStartPositionUs, streamStartPositionUs,
streamOffsetUs, streamOffsetUs,
transformationRequest, transformationRequest,
@ -496,7 +497,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
debugViewProvider); debugViewProvider);
} else { } else {
return new EncodedSamplePipeline( return new EncodedSamplePipeline(
inputFormat, firstInputFormat,
streamStartPositionUs, streamStartPositionUs,
transformationRequest, transformationRequest,
muxerWrapper, muxerWrapper,

View File

@ -69,7 +69,7 @@ import org.checkerframework.dataflow.qual.Pure;
public VideoSamplePipeline( public VideoSamplePipeline(
Context context, Context context,
Format inputFormat, Format firstInputFormat,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
@ -81,21 +81,22 @@ import org.checkerframework.dataflow.qual.Pure;
FallbackListener fallbackListener, FallbackListener fallbackListener,
DebugViewProvider debugViewProvider) DebugViewProvider debugViewProvider)
throws TransformationException { throws TransformationException {
super(inputFormat, streamStartPositionUs, muxerWrapper); super(firstInputFormat, streamStartPositionUs, muxerWrapper);
boolean isGlToneMapping = false; boolean isGlToneMapping = false;
if (ColorInfo.isTransferHdr(inputFormat.colorInfo)) { if (ColorInfo.isTransferHdr(firstInputFormat.colorInfo)) {
if (transformationRequest.hdrMode == HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR) { if (transformationRequest.hdrMode == HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR) {
if (SDK_INT < 29) { if (SDK_INT < 29) {
throw TransformationException.createForCodec( throw TransformationException.createForCodec(
new IllegalArgumentException("Interpreting HDR video as SDR is not supported."), new IllegalArgumentException("Interpreting HDR video as SDR is not supported."),
/* isVideo= */ true, /* isVideo= */ true,
/* isDecoder= */ true, /* isDecoder= */ true,
inputFormat, firstInputFormat,
/* mediaCodecName= */ null, /* mediaCodecName= */ null,
TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED); TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED);
} }
inputFormat = inputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build(); firstInputFormat =
firstInputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
} else if (transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL) { } else if (transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL) {
if (SDK_INT < 29) { if (SDK_INT < 29) {
throw TransformationException.createForCodec( throw TransformationException.createForCodec(
@ -103,7 +104,7 @@ import org.checkerframework.dataflow.qual.Pure;
"OpenGL-based HDR to SDR tone mapping is not supported."), "OpenGL-based HDR to SDR tone mapping is not supported."),
/* isVideo= */ true, /* isVideo= */ true,
/* isDecoder= */ true, /* isDecoder= */ true,
inputFormat, firstInputFormat,
/* mediaCodecName= */ null, /* mediaCodecName= */ null,
TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED); TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED);
} }
@ -113,7 +114,7 @@ import org.checkerframework.dataflow.qual.Pure;
new IllegalArgumentException("HDR editing and tone mapping is not supported."), new IllegalArgumentException("HDR editing and tone mapping is not supported."),
/* isVideo= */ true, /* isVideo= */ true,
/* isDecoder= */ false, /* isDecoder= */ false,
inputFormat, firstInputFormat,
/* mediaCodecName= */ null, /* mediaCodecName= */ null,
TransformationException.ERROR_CODE_HDR_ENCODING_UNSUPPORTED); TransformationException.ERROR_CODE_HDR_ENCODING_UNSUPPORTED);
} }
@ -127,7 +128,7 @@ import org.checkerframework.dataflow.qual.Pure;
encoderWrapper = encoderWrapper =
new EncoderWrapper( new EncoderWrapper(
encoderFactory, encoderFactory,
inputFormat, firstInputFormat,
muxerWrapper.getSupportedSampleMimeTypes(C.TRACK_TYPE_VIDEO), muxerWrapper.getSupportedSampleMimeTypes(C.TRACK_TYPE_VIDEO),
transformationRequest, transformationRequest,
fallbackListener); fallbackListener);
@ -136,7 +137,7 @@ import org.checkerframework.dataflow.qual.Pure;
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor, // If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping. // possibly by tone mapping.
frameProcessorInputColor = frameProcessorInputColor =
isGlToneMapping ? checkNotNull(inputFormat.colorInfo) : encoderInputColor; isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with // For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as // C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder. // C.COLOR_TRANSFER_SDR to the encoder.
@ -199,14 +200,18 @@ import org.checkerframework.dataflow.qual.Pure;
throw TransformationException.createForFrameProcessingException( throw TransformationException.createForFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED); e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED);
} }
// The decoder rotates encoded frames for display by inputFormat.rotationDegrees. // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = int decodedWidth =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height; (firstInputFormat.rotationDegrees % 180 == 0)
? firstInputFormat.width
: firstInputFormat.height;
int decodedHeight = int decodedHeight =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width; (firstInputFormat.rotationDegrees % 180 == 0)
? firstInputFormat.height
: firstInputFormat.width;
firstFrameInfo = firstFrameInfo =
new FrameInfo.Builder(decodedWidth, decodedHeight) new FrameInfo.Builder(decodedWidth, decodedHeight)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(firstInputFormat.pixelWidthHeightRatio)
.setStreamOffsetUs(streamOffsetUs) .setStreamOffsetUs(streamOffsetUs)
.build(); .build();
} }