Provide TransformerInternal with the AssetLoader output format.

TI can use this Format as part of creating the SamplePipelines.

PiperOrigin-RevId: 513777622
This commit is contained in:
samrobinson 2023-03-03 10:54:48 +00:00 committed by Rohit Singh
parent f23d6c1dc1
commit 07ba24ab1a
15 changed files with 425 additions and 262 deletions

View File

@ -77,7 +77,7 @@ public interface AssetLoader {
void onDurationUs(long durationUs);
/**
* Called when the number of tracks output by the asset loader is known.
* Called when the number of tracks being output is known.
*
* <p>Can be called from any thread.
*/
@ -91,28 +91,41 @@ public interface AssetLoader {
*
* <p>Must be called once per {@linkplain #onTrackCount(int) declared} track.
*
* <p>Must be called from the thread that will be used to call the returned {@link
* SampleConsumer}'s methods. This thread must be the same for all the tracks added, and is
* generally different from the one used to access the {@link AssetLoader} methods.
* <p>Must be called from the thread that will be used to call {@link #onOutputFormat(Format)}.
*
* @param format The {@link Format} of the input media (prior to video slow motion flattening or
* to decoding).
* @param inputFormat The {@link Format} of samples that will be input to the {@link
* AssetLoader} (prior to video slow motion flattening or to decoding).
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
* this asset loader for the track added. At least one output type must be supported.
* this {@link AssetLoader} for the track added. At least one output type must be supported.
* @param streamStartPositionUs The start position of the stream (offset by {@code
* streamOffsetUs}), in microseconds.
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
* non-negative, in microseconds.
* @return The {@link SampleConsumer} describing the type of sample data expected, and to which
* to pass this data.
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
* @return Whether the {@link AssetLoader} needs to provide decoded data to the {@link
* SampleConsumer}.
*/
SampleConsumer onTrackAdded(
Format format,
boolean onTrackAdded(
Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws ExportException;
long streamOffsetUs);
/**
* Called when the {@link Format} of samples that will be output by the {@link AssetLoader} is
* known.
*
* <p>Must be called once per {@linkplain #onTrackCount declared} track, and only after that
* track has been {@link #onTrackAdded added}.
*
* <p>Must be called from the thread that will be used to call the returned {@link
* SampleConsumer}'s methods. This thread must be the same for all formats output, and is
* generally different from the one used to access the {@link AssetLoader} methods.
*
* @param format The {@link Format} of samples that will be output.
* @return The {@link SampleConsumer} of samples of the given {@link Format}.
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
*/
SampleConsumer onOutputFormat(Format format) throws ExportException;
/**
* Called if an error occurs in the asset loader. In this case, the asset loader will be

View File

@ -60,7 +60,8 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline(
Format firstInputFormat,
Format firstAssetLoaderInputFormat,
Format firstPipelineInputFormat,
long streamOffsetUs,
TransformationRequest transformationRequest,
boolean flattenForSlowMotion,
@ -69,9 +70,9 @@ import org.checkerframework.dataflow.qual.Pure;
MuxerWrapper muxerWrapper,
FallbackListener fallbackListener)
throws ExportException {
super(firstInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
super(firstPipelineInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat);
silentAudioGenerator = new SilentAudioGenerator(firstPipelineInputFormat);
availableInputBuffers = new ConcurrentLinkedDeque<>();
ByteBuffer emptyBuffer = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
for (int i = 0; i < MAX_INPUT_BUFFER_COUNT; i++) {
@ -84,12 +85,12 @@ import org.checkerframework.dataflow.qual.Pure;
encoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
encoderOutputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
if (flattenForSlowMotion && firstInputFormat.metadata != null) {
if (flattenForSlowMotion && firstAssetLoaderInputFormat.metadata != null) {
audioProcessors =
new ImmutableList.Builder<AudioProcessor>()
.add(
new SpeedChangingAudioProcessor(
new SegmentSpeedProvider(firstInputFormat.metadata)))
new SegmentSpeedProvider(firstAssetLoaderInputFormat.metadata)))
.addAll(audioProcessors)
.build();
}
@ -98,10 +99,10 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/267301878): Once decoder format propagated, remove setting default PCM encoding.
AudioFormat pipelineInputAudioFormat =
new AudioFormat(
firstInputFormat.sampleRate,
firstInputFormat.channelCount,
firstInputFormat.pcmEncoding != Format.NO_VALUE
? firstInputFormat.pcmEncoding
firstPipelineInputFormat.sampleRate,
firstPipelineInputFormat.channelCount,
firstPipelineInputFormat.pcmEncoding != Format.NO_VALUE
? firstPipelineInputFormat.pcmEncoding
: DEFAULT_PCM_ENCODING);
try {
@ -118,7 +119,7 @@ import org.checkerframework.dataflow.qual.Pure;
.setSampleMimeType(
transformationRequest.audioMimeType != null
? transformationRequest.audioMimeType
: checkNotNull(firstInputFormat.sampleMimeType))
: checkNotNull(firstAssetLoaderInputFormat.sampleMimeType))
.setSampleRate(encoderInputAudioFormat.sampleRate)
.setChannelCount(encoderInputAudioFormat.channelCount)
.setPcmEncoding(encoderInputAudioFormat.encoding)

View File

@ -49,6 +49,13 @@ import java.util.concurrent.atomic.AtomicInteger;
*/
/* package */ final class CompositeAssetLoader implements AssetLoader, AssetLoader.Listener {
private static final Format FORCE_AUDIO_TRACK_FORMAT =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
.setSampleRate(44100)
.setChannelCount(2)
.build();
private final List<EditedMediaItem> editedMediaItems;
private final AtomicInteger currentMediaItemIndex;
private final boolean forceAudioTrack;
@ -76,6 +83,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private AssetLoader currentAssetLoader;
private boolean trackCountReported;
private int processedInputsSize;
private boolean decodeAudio;
private boolean decodeVideo;
private volatile long currentDurationUs;
@ -186,42 +195,65 @@ import java.util.concurrent.atomic.AtomicInteger;
}
@Override
public SampleConsumer onTrackAdded(
Format format,
public boolean onTrackAdded(
Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws ExportException {
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer;
if (currentMediaItemIndex.get() == 0) {
boolean addForcedAudioTrack =
forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO;
long streamOffsetUs) {
boolean isAudio = getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO;
if (currentMediaItemIndex.get() != 0) {
return isAudio ? decodeAudio : decodeVideo;
}
boolean addForcedAudioTrack = forceAudioTrack && nonEndedTracks.get() == 1 && !isAudio;
if (!trackCountReported) {
int trackCount = nonEndedTracks.get() + (addForcedAudioTrack ? 1 : 0);
compositeAssetLoaderListener.onTrackCount(trackCount);
trackCountReported = true;
}
sampleConsumer =
new SampleConsumerWrapper(
boolean decodeOutput =
compositeAssetLoaderListener.onTrackAdded(
format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs));
sampleConsumersByTrackType.put(trackType, sampleConsumer);
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
if (isAudio) {
decodeAudio = decodeOutput;
} else {
decodeVideo = decodeOutput;
}
if (addForcedAudioTrack) {
Format firstAudioFormat =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
.setSampleRate(44100)
.setChannelCount(2)
.build();
SampleConsumer audioSampleConsumer =
new SampleConsumerWrapper(
compositeAssetLoaderListener.onTrackAdded(
firstAudioFormat,
FORCE_AUDIO_TRACK_FORMAT,
SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs,
streamOffsetUs));
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer);
streamOffsetUs);
}
return decodeOutput;
}
@Override
public SampleConsumer onOutputFormat(Format format) throws ExportException {
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer;
if (currentMediaItemIndex.get() == 0) {
sampleConsumer =
new SampleConsumerWrapper(compositeAssetLoaderListener.onOutputFormat(format));
sampleConsumersByTrackType.put(trackType, sampleConsumer);
if (forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
sampleConsumersByTrackType.put(
C.TRACK_TYPE_AUDIO,
new SampleConsumerWrapper(
compositeAssetLoaderListener.onOutputFormat(
FORCE_AUDIO_TRACK_FORMAT
.buildUpon()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(C.ENCODING_PCM_16BIT)
.build())));
}
} else {
// TODO(b/270533049): Remove the check below when implementing blank video frames generation.
@ -288,13 +320,6 @@ import java.util.concurrent.atomic.AtomicInteger;
this.sampleConsumer = sampleConsumer;
}
@Override
public boolean expectsDecodedData() {
// TODO(b/252537210): handle the case where the first media item doesn't need to be encoded
// but a following one does.
return sampleConsumer.expectsDecodedData();
}
@Nullable
@Override
public DecoderInputBuffer getInputBuffer() {

View File

@ -71,11 +71,6 @@ import java.util.concurrent.atomic.AtomicLong;
nextMediaItemOffsetUs.addAndGet(durationUs);
}
@Override
public boolean expectsDecodedData() {
return false;
}
@Override
@Nullable
public DecoderInputBuffer getInputBuffer() {

View File

@ -56,14 +56,13 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @throws ExportException If an error occurs in the decoder.
*/
@Override
@RequiresNonNull("sampleConsumer")
@RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException {
@Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.getInputBuffer();
if (sampleConsumerInputBuffer == null) {
return false;
}
Codec decoder = checkNotNull(this.decoder);
if (decoder.isEnded()) {
checkNotNull(sampleConsumerInputBuffer.data).limit(0);
sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);

View File

@ -17,10 +17,12 @@
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
@ -32,6 +34,7 @@ import androidx.media3.exoplayer.FormatHolder;
import androidx.media3.exoplayer.MediaClock;
import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -42,6 +45,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
protected @MonotonicNonNull SampleConsumer sampleConsumer;
protected @MonotonicNonNull Codec decoder;
protected boolean isEnded;
private @MonotonicNonNull Format inputFormat;
private final TransformerMediaClock mediaClock;
private final AssetLoader.Listener assetLoaderListener;
@ -92,15 +96,25 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override
public void render(long positionUs, long elapsedRealtimeUs) {
try {
if (!isRunning || isEnded() || !ensureConfigured()) {
if (!isRunning || isEnded() || !hasReadInputFormat()) {
return;
}
if (sampleConsumer.expectsDecodedData()) {
while (feedConsumerFromDecoder() || feedDecoderFromInput()) {}
if (decoder != null) {
boolean progressMade;
do {
progressMade = false;
if (ensureSampleConsumerInitialized()) {
progressMade = feedConsumerFromDecoder();
}
progressMade |= feedDecoderFromInput();
} while (progressMade);
} else {
if (ensureSampleConsumerInitialized()) {
while (feedConsumerFromInput()) {}
}
}
} catch (ExportException e) {
isRunning = false;
assetLoaderListener.onError(e);
@ -144,7 +158,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
protected void onInputFormatRead(Format inputFormat) {}
/** Initializes {@link #decoder} with an appropriate {@linkplain Codec decoder}. */
@RequiresNonNull("sampleConsumer")
@EnsuresNonNull("decoder")
protected abstract void initDecoder(Format inputFormat) throws ExportException;
/**
@ -166,12 +180,22 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @return Whether it may be possible to read more data immediately by calling this method again.
* @throws ExportException If an error occurs in the decoder.
*/
@RequiresNonNull("sampleConsumer")
@RequiresNonNull({"sampleConsumer", "decoder"})
protected abstract boolean feedConsumerFromDecoder() throws ExportException;
@EnsuresNonNullIf(expression = "sampleConsumer", result = true)
private boolean ensureConfigured() throws ExportException {
if (sampleConsumer != null) {
/**
* Attempts to read the input {@link Format} from the source, if not read.
*
* <p>After reading the format, {@link AssetLoader.Listener#onTrackAdded} is notified, and, if
* needed, the decoder is {@linkplain #initDecoder(Format) initialized}.
*
* @return Whether the input {@link Format} is available.
* @throws ExportException If an error occurs {@linkplain #initDecoder initializing} the
* {@linkplain Codec decoder}.
*/
@EnsuresNonNullIf(expression = "inputFormat", result = true)
private boolean hasReadInputFormat() throws ExportException {
if (inputFormat != null) {
return true;
}
@ -181,16 +205,58 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (result != C.RESULT_FORMAT_READ) {
return false;
}
Format inputFormat = overrideFormat(checkNotNull(formatHolder.format));
@AssetLoader.SupportedOutputTypes
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
sampleConsumer =
assetLoaderListener.onTrackAdded(
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
inputFormat = overrideFormat(checkNotNull(formatHolder.format));
onInputFormatRead(inputFormat);
if (sampleConsumer.expectsDecodedData()) {
boolean decodeOutput =
assetLoaderListener.onTrackAdded(
inputFormat,
SUPPORTED_OUTPUT_TYPE_DECODED | SUPPORTED_OUTPUT_TYPE_ENCODED,
streamStartPositionUs,
streamOffsetUs);
if (decodeOutput) {
if (getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
initDecoder(inputFormat);
} else {
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and
// get decoder output Format before init sampleConsumer.
checkState(ensureSampleConsumerInitialized());
initDecoder(inputFormat);
}
}
return true;
}
/**
* Attempts to initialize the {@link SampleConsumer}, if not initialized.
*
* @return Whether the {@link SampleConsumer} is initialized.
* @throws ExportException If the {@linkplain Codec decoder} errors getting it's {@linkplain
* Codec#getOutputFormat() output format}.
* @throws ExportException If the {@link AssetLoader.Listener} errors providing a {@link
* SampleConsumer}.
*/
@RequiresNonNull("inputFormat")
@EnsuresNonNullIf(expression = "sampleConsumer", result = true)
private boolean ensureSampleConsumerInitialized() throws ExportException {
if (sampleConsumer != null) {
return true;
}
if (decoder != null
&& getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
sampleConsumer = assetLoaderListener.onOutputFormat(decoderOutputFormat);
} else {
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and get
// decoderOutput Format before init sampleConsumer.
sampleConsumer = assetLoaderListener.onOutputFormat(inputFormat);
}
return true;
}
@ -200,8 +266,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @return Whether it may be possible to read more data immediately by calling this method again.
* @throws ExportException If an error occurs in the decoder.
*/
@RequiresNonNull("decoder")
private boolean feedDecoderFromInput() throws ExportException {
Codec decoder = checkNotNull(this.decoder);
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false;
}

View File

@ -16,6 +16,7 @@
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.media.MediaCodec;
import androidx.annotation.Nullable;
@ -75,8 +76,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
}
@Override
@RequiresNonNull("sampleConsumer")
protected void initDecoder(Format inputFormat) throws ExportException {
// TODO(b/237674316): Move surface creation out of sampleConsumer. Init decoder before
// sampleConsumer.
checkStateNotNull(sampleConsumer);
boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo)
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
@ -116,9 +119,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
}
@Override
@RequiresNonNull("sampleConsumer")
@RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException {
Codec decoder = checkNotNull(this.decoder);
if (decoder.isEnded()) {
sampleConsumer.signalEndOfVideoInput();
isEnded = true;

View File

@ -88,12 +88,13 @@ public final class ImageAssetLoader implements AssetLoader {
.setWidth(bitmap.getWidth())
.setSampleMimeType(MIME_TYPE_IMAGE_ALL)
.build();
SampleConsumer sampleConsumer =
listener.onTrackAdded(
format,
SUPPORTED_OUTPUT_TYPE_DECODED,
/* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0);
SampleConsumer sampleConsumer = listener.onOutputFormat(format);
checkState(editedMediaItem.durationUs != C.TIME_UNSET);
checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT);
// TODO(b/262693274): consider using listener.onDurationUs() or the MediaItem change

View File

@ -28,8 +28,9 @@ import androidx.media3.common.MediaItem;
*
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param durationUs The duration of the {@link MediaItem}, in microseconds.
* @param trackFormat The {@link Format} of the {@link MediaItem} track corresponding to the
* {@link SamplePipeline}, or {@code null} if no such track was extracted.
* @param trackFormat The {@link Format} extracted (and possibly decoded) from the {@link
* MediaItem} track, which represents the samples input to the {@link SamplePipeline}. {@code
* null} if no such track was extracted.
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
* SamplePipeline}.
*/

View File

@ -26,14 +26,6 @@ import androidx.media3.decoder.DecoderInputBuffer;
@UnstableApi
public interface SampleConsumer {
/**
* Returns whether the consumer should be fed with decoded sample data. If false, encoded sample
* data should be fed.
*/
boolean expectsDecodedData();
// Methods to pass compressed input or raw audio input.
/**
* Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise.
*

View File

@ -56,12 +56,7 @@ import java.util.List;
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
this.streamStartPositionUs = streamStartPositionUs;
this.muxerWrapper = muxerWrapper;
this.outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
}
@Override
public boolean expectsDecodedData() {
return true;
outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
}
/**

View File

@ -18,12 +18,15 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.ExportException.ERROR_CODE_FAILED_RUNTIME_CHECK;
import static androidx.media3.transformer.ExportException.ERROR_CODE_MUXING_FAILED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static androidx.media3.transformer.TransformerUtil.areVideoEffectsAllNoOp;
import static androidx.media3.transformer.TransformerUtil.containsSlowMotionData;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import static java.lang.annotation.ElementType.TYPE_USE;
@ -37,21 +40,20 @@ import androidx.media3.common.C;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.effect.GlEffect;
import androidx.media3.effect.Presentation;
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
import com.google.common.collect.ImmutableList;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -404,6 +406,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final TransformationRequest transformationRequest;
private final FallbackListener fallbackListener;
private final DebugViewProvider debugViewProvider;
private final Map<Integer, AddedTrackInfo> addedTrackInfoByTrackType;
public CompositeAssetLoaderListener(
int sequenceIndex,
@ -417,6 +420,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.transformationRequest = transformationRequest;
this.fallbackListener = fallbackListener;
this.debugViewProvider = debugViewProvider;
addedTrackInfoByTrackType = new HashMap<>();
}
@Override
@ -440,20 +444,30 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public SampleConsumer onTrackAdded(
Format firstInputFormat,
public boolean onTrackAdded(
Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws ExportException {
SamplePipeline samplePipeline =
getSamplePipeline(
firstInputFormat,
shouldTranscode(
firstInputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs),
long streamOffsetUs) {
AddedTrackInfo trackInfo =
new AddedTrackInfo(
firstAssetLoaderInputFormat,
supportedOutputTypes,
streamStartPositionUs,
streamOffsetUs);
@C.TrackType int trackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
addedTrackInfoByTrackType.put(
getProcessedTrackType(firstAssetLoaderInputFormat.sampleMimeType), trackInfo);
return trackInfo.shouldTranscode;
}
@Override
public SampleConsumer onOutputFormat(Format assetLoaderOutputFormat) throws ExportException {
@C.TrackType int trackType = getProcessedTrackType(assetLoaderOutputFormat.sampleMimeType);
AddedTrackInfo trackInfo = checkStateNotNull(addedTrackInfoByTrackType.get(trackType));
SamplePipeline samplePipeline = getSamplePipeline(assetLoaderOutputFormat, trackInfo);
compositeAssetLoaders
.get(sequenceIndex)
.addOnMediaItemChangedListener(samplePipeline, trackType);
@ -469,17 +483,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Private methods.
private SamplePipeline getSamplePipeline(
Format firstInputFormat,
boolean shouldTranscode,
long streamStartPositionUs,
long streamOffsetUs)
throws ExportException {
if (shouldTranscode) {
Format firstAssetLoaderOutputFormat, AddedTrackInfo addedTrackInfo) throws ExportException {
if (addedTrackInfo.shouldTranscode) {
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) {
if (MimeTypes.isAudio(firstAssetLoaderOutputFormat.sampleMimeType)) {
return new AudioSamplePipeline(
firstInputFormat,
streamOffsetUs,
addedTrackInfo.firstAssetLoaderInputFormat,
/* firstPipelineInputFormat= */ firstAssetLoaderOutputFormat,
addedTrackInfo.streamOffsetUs,
transformationRequest,
firstEditedMediaItem.flattenForSlowMotion,
firstEditedMediaItem.effects.audioProcessors,
@ -493,11 +504,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
compositionVideoEffects.isEmpty()
? null
: (Presentation) compositionVideoEffects.get(0);
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
return new VideoSamplePipeline(
context,
firstInputFormat,
streamStartPositionUs,
streamOffsetUs,
addedTrackInfo.firstAssetLoaderInputFormat,
addedTrackInfo.streamStartPositionUs,
addedTrackInfo.streamOffsetUs,
transformationRequest,
firstEditedMediaItem.effects.videoEffects,
compositionPresentation,
@ -509,14 +522,37 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
debugViewProvider);
}
}
return new EncodedSamplePipeline(
firstInputFormat,
streamStartPositionUs,
firstAssetLoaderOutputFormat,
addedTrackInfo.streamStartPositionUs,
transformationRequest,
muxerWrapper,
fallbackListener);
}
private final class AddedTrackInfo {
public final Format firstAssetLoaderInputFormat;
public final long streamStartPositionUs;
public final long streamOffsetUs;
public final boolean shouldTranscode;
public AddedTrackInfo(
Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
this.firstAssetLoaderInputFormat = firstAssetLoaderInputFormat;
this.streamStartPositionUs = streamStartPositionUs;
this.streamOffsetUs = streamOffsetUs;
shouldTranscode =
shouldTranscode(
firstAssetLoaderInputFormat,
supportedOutputTypes,
streamStartPositionUs,
streamOffsetUs);
}
private boolean shouldTranscode(
Format inputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
@ -536,7 +572,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else if (trackType == C.TRACK_TYPE_AUDIO) {
shouldTranscode = shouldTranscodeAudio(inputFormat);
} else if (trackType == C.TRACK_TYPE_VIDEO) {
shouldTranscode = shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
shouldTranscode =
shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
}
checkState(!shouldTranscode || assetLoaderCanOutputDecoded);
@ -560,7 +597,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return true;
}
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
if (firstEditedMediaItem.flattenForSlowMotion && isSlowMotion(inputFormat)) {
if (firstEditedMediaItem.flattenForSlowMotion && containsSlowMotionData(inputFormat)) {
return true;
}
if (!firstEditedMediaItem.effects.audioProcessors.isEmpty()) {
@ -570,19 +607,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return false;
}
private boolean isSlowMotion(Format format) {
@Nullable Metadata metadata = format.metadata;
if (metadata == null) {
return false;
}
for (int i = 0; i < metadata.length(); i++) {
if (metadata.get(i) instanceof SlowMotionData) {
return true;
}
}
return false;
}
private boolean shouldTranscodeVideo(
Format inputFormat, long streamStartPositionUs, long streamOffsetUs) {
if (editedMediaItems.size() > 1 && !composition.transmuxVideo) {
@ -615,29 +639,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
return false;
}
/**
* Returns whether all {@code videoEffects} are {@linkplain GlEffect#isNoOp(int, int) no-ops},
* given an input {@link Format}.
*/
private boolean areVideoEffectsAllNoOp(ImmutableList<Effect> videoEffects, Format inputFormat) {
int decodedWidth =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
int decodedHeight =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
for (int i = 0; i < videoEffects.size(); i++) {
Effect videoEffect = videoEffects.get(i);
if (!(videoEffect instanceof GlEffect)) {
// We cannot confirm whether Effect instances that are not GlEffect instances are
// no-ops.
return false;
}
GlEffect glEffect = (GlEffect) videoEffect;
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
return false;
}
}
return true;
}
}
}

View File

@ -18,7 +18,13 @@ package androidx.media3.transformer;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes;
import androidx.media3.effect.GlEffect;
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
import com.google.common.collect.ImmutableList;
/** Utility methods for Transformer. */
/* package */ final class TransformerUtil {
@ -37,4 +43,45 @@ import androidx.media3.common.MimeTypes;
@C.TrackType int trackType = MimeTypes.getTrackType(mimeType);
return trackType == C.TRACK_TYPE_IMAGE ? C.TRACK_TYPE_VIDEO : trackType;
}
/**
* Returns whether the collection of {@code videoEffects} would be a {@linkplain
* GlEffect#isNoOp(int, int) no-op}, if queued samples of this {@link Format}.
*/
public static boolean areVideoEffectsAllNoOp(
ImmutableList<Effect> videoEffects, Format inputFormat) {
int decodedWidth =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
int decodedHeight =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
for (int i = 0; i < videoEffects.size(); i++) {
Effect videoEffect = videoEffects.get(i);
if (!(videoEffect instanceof GlEffect)) {
// We cannot confirm whether Effect instances that are not GlEffect instances are
// no-ops.
return false;
}
GlEffect glEffect = (GlEffect) videoEffect;
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
return false;
}
}
return true;
}
/**
* Returns whether the {@link Format} contains {@linkplain SlowMotionData slow motion metadata}.
*/
public static boolean containsSlowMotionData(Format format) {
@Nullable Metadata metadata = format.metadata;
if (metadata == null) {
return false;
}
for (int i = 0; i < metadata.length(); i++) {
if (metadata.get(i) instanceof SlowMotionData) {
return true;
}
}
return false;
}
}

View File

@ -16,6 +16,7 @@
package androidx.media3.transformer;
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
@ -23,6 +24,7 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Clock;
@ -46,12 +48,16 @@ public class ExoPlayerAssetLoaderTest {
assetLoaderThread.start();
Looper assetLoaderLooper = assetLoaderThread.getLooper();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
AtomicBoolean isTrackAdded = new AtomicBoolean();
AtomicBoolean isAudioOutputFormatSet = new AtomicBoolean();
AtomicBoolean isVideoOutputFormatSet = new AtomicBoolean();
AssetLoader.Listener listener =
new AssetLoader.Listener() {
private volatile boolean isDurationSet;
private volatile boolean isTrackCountSet;
private volatile boolean isAudioTrackAdded;
private volatile boolean isVideoTrackAdded;
@Override
public void onDurationUs(long durationUs) {
@ -68,8 +74,8 @@ public class ExoPlayerAssetLoaderTest {
}
@Override
public SampleConsumer onTrackAdded(
Format format,
public boolean onTrackAdded(
Format inputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
@ -80,7 +86,32 @@ public class ExoPlayerAssetLoaderTest {
exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onTrackCount()"));
}
isTrackAdded.set(true);
sleep();
@C.TrackType int trackType = getProcessedTrackType(inputFormat.sampleMimeType);
if (trackType == C.TRACK_TYPE_AUDIO) {
isAudioTrackAdded = true;
} else if (trackType == C.TRACK_TYPE_VIDEO) {
isVideoTrackAdded = true;
}
return false;
}
@Override
public SampleConsumer onOutputFormat(Format format) {
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
boolean isAudio = trackType == C.TRACK_TYPE_AUDIO;
boolean isVideo = trackType == C.TRACK_TYPE_VIDEO;
boolean isTrackAdded = (isAudio && isAudioTrackAdded) || (isVideo && isVideoTrackAdded);
if (!isTrackAdded) {
exceptionRef.set(
new IllegalStateException("onOutputFormat() called before onTrackAdded()"));
}
if (isAudio) {
isAudioOutputFormatSet.set(true);
} else if (isVideo) {
isVideoOutputFormatSet.set(true);
}
return new FakeSampleConsumer();
}
@ -107,7 +138,8 @@ public class ExoPlayerAssetLoaderTest {
Looper.myLooper(),
() -> {
ShadowSystemClock.advanceBy(Duration.ofMillis(10));
return isTrackAdded.get() || exceptionRef.get() != null;
return (isAudioOutputFormatSet.get() && isVideoOutputFormatSet.get())
|| exceptionRef.get() != null;
});
assertThat(exceptionRef.get()).isNull();
@ -126,11 +158,6 @@ public class ExoPlayerAssetLoaderTest {
private static final class FakeSampleConsumer implements SampleConsumer {
@Override
public boolean expectsDecodedData() {
return false;
}
@Nullable
@Override
public DecoderInputBuffer getInputBuffer() {

View File

@ -1000,7 +1000,7 @@ public final class TransformerEndToEndTest {
transformer.start(mediaItem, outputPath);
runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null);
assertThat(sampleConsumerRef.get().expectsDecodedData()).isTrue();
assertThat(sampleConsumerRef.get()).isNotInstanceOf(EncodedSamplePipeline.class);
}
@Test
@ -1488,12 +1488,10 @@ public final class TransformerEndToEndTest {
.setChannelCount(2)
.build();
try {
SampleConsumer sampleConsumer =
listener.onTrackAdded(
format,
supportedOutputTypes,
/* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0);
format, supportedOutputTypes, /* streamStartPositionUs= */ 0, /* streamOffsetUs= */ 0);
SampleConsumer sampleConsumer = listener.onOutputFormat(format);
if (sampleConsumerRef != null) {
sampleConsumerRef.set(sampleConsumer);
}