mirror of
https://github.com/androidx/media.git
synced 2025-05-09 16:40:55 +08:00
Rename inputFormat to firstInputFormat in multiple places
With multi-asset, the sample pipelines can process more than one MediaItem. The renaming makes it clear that the format passed to the SamplePipeline constructors is the one corresponding to the first MediaItem. Indeed, the first format is the one used to configure the SamplePipelines. PiperOrigin-RevId: 506879260
This commit is contained in:
parent
ed5af63227
commit
4094aef9a5
@ -61,7 +61,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
||||
public AudioSamplePipeline(
|
||||
Format inputFormat,
|
||||
Format firstInputFormat,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs,
|
||||
TransformationRequest transformationRequest,
|
||||
@ -72,10 +72,11 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
MuxerWrapper muxerWrapper,
|
||||
FallbackListener fallbackListener)
|
||||
throws TransformationException {
|
||||
super(inputFormat, streamStartPositionUs, muxerWrapper);
|
||||
super(firstInputFormat, streamStartPositionUs, muxerWrapper);
|
||||
|
||||
if (generateSilentAudioDurationUs != C.TIME_UNSET) {
|
||||
silentAudioGenerator = new SilentAudioGenerator(inputFormat, generateSilentAudioDurationUs);
|
||||
silentAudioGenerator =
|
||||
new SilentAudioGenerator(firstInputFormat, generateSilentAudioDurationUs);
|
||||
} else {
|
||||
silentAudioGenerator = null;
|
||||
}
|
||||
@ -95,7 +96,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
if (flattenForSlowMotion) {
|
||||
audioProcessors =
|
||||
new ImmutableList.Builder<AudioProcessor>()
|
||||
.add(new SpeedChangingAudioProcessor(new SegmentSpeedProvider(inputFormat)))
|
||||
.add(new SpeedChangingAudioProcessor(new SegmentSpeedProvider(firstInputFormat)))
|
||||
.addAll(audioProcessors)
|
||||
.build();
|
||||
}
|
||||
@ -103,8 +104,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
audioProcessingPipeline = new AudioProcessingPipeline(audioProcessors);
|
||||
AudioFormat pipelineInputAudioFormat =
|
||||
new AudioFormat(
|
||||
inputFormat.sampleRate,
|
||||
inputFormat.channelCount,
|
||||
firstInputFormat.sampleRate,
|
||||
firstInputFormat.channelCount,
|
||||
// The decoder uses ENCODING_PCM_16BIT by default.
|
||||
// https://developer.android.com/reference/android/media/MediaCodec#raw-audio-buffers
|
||||
C.ENCODING_PCM_16BIT);
|
||||
@ -121,7 +122,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
String requestedMimeType =
|
||||
transformationRequest.audioMimeType != null
|
||||
? transformationRequest.audioMimeType
|
||||
: checkNotNull(inputFormat.sampleMimeType);
|
||||
: checkNotNull(firstInputFormat.sampleMimeType);
|
||||
Format requestedOutputFormat =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(requestedMimeType)
|
||||
|
@ -37,10 +37,11 @@ import androidx.media3.decoder.DecoderInputBuffer;
|
||||
|
||||
private boolean muxerWrapperTrackAdded;
|
||||
|
||||
public SamplePipeline(Format inputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
|
||||
public SamplePipeline(
|
||||
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
|
||||
this.streamStartPositionUs = streamStartPositionUs;
|
||||
this.muxerWrapper = muxerWrapper;
|
||||
trackType = MimeTypes.getTrackType(inputFormat.sampleMimeType);
|
||||
trackType = MimeTypes.getTrackType(firstInputFormat.sampleMimeType);
|
||||
}
|
||||
|
||||
protected static TransformationException createNoSupportedMimeTypeException(
|
||||
|
@ -361,12 +361,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
@Override
|
||||
public SampleConsumer onTrackAdded(
|
||||
Format format,
|
||||
Format firstInputFormat,
|
||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs)
|
||||
throws TransformationException {
|
||||
int trackType = MimeTypes.getTrackType(format.sampleMimeType);
|
||||
int trackType = MimeTypes.getTrackType(firstInputFormat.sampleMimeType);
|
||||
if (!trackAdded) {
|
||||
if (generateSilentAudio) {
|
||||
if (trackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
|
||||
@ -384,7 +384,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
|
||||
SamplePipeline samplePipeline =
|
||||
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
||||
getSamplePipeline(
|
||||
firstInputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
||||
compositeAssetLoader.addOnMediaItemChangedListener(samplePipeline, trackType);
|
||||
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
|
||||
|
||||
@ -452,17 +453,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
// Private methods.
|
||||
|
||||
private SamplePipeline getSamplePipeline(
|
||||
Format inputFormat,
|
||||
Format firstInputFormat,
|
||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs)
|
||||
throws TransformationException {
|
||||
checkState(supportedOutputTypes != 0);
|
||||
boolean isAudio = MimeTypes.isAudio(inputFormat.sampleMimeType);
|
||||
boolean isAudio = MimeTypes.isAudio(firstInputFormat.sampleMimeType);
|
||||
boolean shouldTranscode =
|
||||
isAudio
|
||||
? shouldTranscodeAudio(inputFormat)
|
||||
: shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
|
||||
? shouldTranscodeAudio(firstInputFormat)
|
||||
: shouldTranscodeVideo(firstInputFormat, streamStartPositionUs, streamOffsetUs);
|
||||
boolean assetLoaderNeverDecodes = (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) == 0;
|
||||
checkState(!shouldTranscode || !assetLoaderNeverDecodes);
|
||||
boolean assetLoaderAlwaysDecodes =
|
||||
@ -470,7 +471,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
|
||||
if (isAudio && shouldUseTranscodingPipeline) {
|
||||
return new AudioSamplePipeline(
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
streamStartPositionUs,
|
||||
streamOffsetUs,
|
||||
transformationRequest,
|
||||
@ -483,7 +484,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
} else if (shouldUseTranscodingPipeline) {
|
||||
return new VideoSamplePipeline(
|
||||
context,
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
streamStartPositionUs,
|
||||
streamOffsetUs,
|
||||
transformationRequest,
|
||||
@ -496,7 +497,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
debugViewProvider);
|
||||
} else {
|
||||
return new EncodedSamplePipeline(
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
streamStartPositionUs,
|
||||
transformationRequest,
|
||||
muxerWrapper,
|
||||
|
@ -69,7 +69,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
public VideoSamplePipeline(
|
||||
Context context,
|
||||
Format inputFormat,
|
||||
Format firstInputFormat,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs,
|
||||
TransformationRequest transformationRequest,
|
||||
@ -81,21 +81,22 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
FallbackListener fallbackListener,
|
||||
DebugViewProvider debugViewProvider)
|
||||
throws TransformationException {
|
||||
super(inputFormat, streamStartPositionUs, muxerWrapper);
|
||||
super(firstInputFormat, streamStartPositionUs, muxerWrapper);
|
||||
|
||||
boolean isGlToneMapping = false;
|
||||
if (ColorInfo.isTransferHdr(inputFormat.colorInfo)) {
|
||||
if (ColorInfo.isTransferHdr(firstInputFormat.colorInfo)) {
|
||||
if (transformationRequest.hdrMode == HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR) {
|
||||
if (SDK_INT < 29) {
|
||||
throw TransformationException.createForCodec(
|
||||
new IllegalArgumentException("Interpreting HDR video as SDR is not supported."),
|
||||
/* isVideo= */ true,
|
||||
/* isDecoder= */ true,
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
/* mediaCodecName= */ null,
|
||||
TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED);
|
||||
}
|
||||
inputFormat = inputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
|
||||
firstInputFormat =
|
||||
firstInputFormat.buildUpon().setColorInfo(ColorInfo.SDR_BT709_LIMITED).build();
|
||||
} else if (transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL) {
|
||||
if (SDK_INT < 29) {
|
||||
throw TransformationException.createForCodec(
|
||||
@ -103,7 +104,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
"OpenGL-based HDR to SDR tone mapping is not supported."),
|
||||
/* isVideo= */ true,
|
||||
/* isDecoder= */ true,
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
/* mediaCodecName= */ null,
|
||||
TransformationException.ERROR_CODE_HDR_DECODING_UNSUPPORTED);
|
||||
}
|
||||
@ -113,7 +114,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
new IllegalArgumentException("HDR editing and tone mapping is not supported."),
|
||||
/* isVideo= */ true,
|
||||
/* isDecoder= */ false,
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
/* mediaCodecName= */ null,
|
||||
TransformationException.ERROR_CODE_HDR_ENCODING_UNSUPPORTED);
|
||||
}
|
||||
@ -127,7 +128,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
encoderWrapper =
|
||||
new EncoderWrapper(
|
||||
encoderFactory,
|
||||
inputFormat,
|
||||
firstInputFormat,
|
||||
muxerWrapper.getSupportedSampleMimeTypes(C.TRACK_TYPE_VIDEO),
|
||||
transformationRequest,
|
||||
fallbackListener);
|
||||
@ -136,7 +137,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
|
||||
// possibly by tone mapping.
|
||||
frameProcessorInputColor =
|
||||
isGlToneMapping ? checkNotNull(inputFormat.colorInfo) : encoderInputColor;
|
||||
isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
|
||||
// For consistency with the Android platform, OpenGL tone mapping outputs colors with
|
||||
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
|
||||
// C.COLOR_TRANSFER_SDR to the encoder.
|
||||
@ -199,14 +200,18 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
throw TransformationException.createForFrameProcessingException(
|
||||
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED);
|
||||
}
|
||||
// The decoder rotates encoded frames for display by inputFormat.rotationDegrees.
|
||||
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
|
||||
int decodedWidth =
|
||||
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
|
||||
(firstInputFormat.rotationDegrees % 180 == 0)
|
||||
? firstInputFormat.width
|
||||
: firstInputFormat.height;
|
||||
int decodedHeight =
|
||||
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
|
||||
(firstInputFormat.rotationDegrees % 180 == 0)
|
||||
? firstInputFormat.height
|
||||
: firstInputFormat.width;
|
||||
firstFrameInfo =
|
||||
new FrameInfo.Builder(decodedWidth, decodedHeight)
|
||||
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
|
||||
.setPixelWidthHeightRatio(firstInputFormat.pixelWidthHeightRatio)
|
||||
.setStreamOffsetUs(streamOffsetUs)
|
||||
.build();
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user