Provide TransformerInternal with the AssetLoader output format.

TI can use this Format as part of creating the SamplePipelines.

PiperOrigin-RevId: 513777622
This commit is contained in:
samrobinson 2023-03-03 10:54:48 +00:00 committed by Rohit Singh
parent f23d6c1dc1
commit 07ba24ab1a
15 changed files with 425 additions and 262 deletions

View File

@ -77,7 +77,7 @@ public interface AssetLoader {
void onDurationUs(long durationUs); void onDurationUs(long durationUs);
/** /**
* Called when the number of tracks output by the asset loader is known. * Called when the number of tracks being output is known.
* *
* <p>Can be called from any thread. * <p>Can be called from any thread.
*/ */
@ -91,28 +91,41 @@ public interface AssetLoader {
* *
* <p>Must be called once per {@linkplain #onTrackCount(int) declared} track. * <p>Must be called once per {@linkplain #onTrackCount(int) declared} track.
* *
* <p>Must be called from the thread that will be used to call the returned {@link * <p>Must be called from the thread that will be used to call {@link #onOutputFormat(Format)}.
* SampleConsumer}'s methods. This thread must be the same for all the tracks added, and is
* generally different from the one used to access the {@link AssetLoader} methods.
* *
* @param format The {@link Format} of the input media (prior to video slow motion flattening or * @param inputFormat The {@link Format} of samples that will be input to the {@link
* to decoding). * AssetLoader} (prior to video slow motion flattening or to decoding).
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by * @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
* this asset loader for the track added. At least one output type must be supported. * this {@link AssetLoader} for the track added. At least one output type must be supported.
* @param streamStartPositionUs The start position of the stream (offset by {@code * @param streamStartPositionUs The start position of the stream (offset by {@code
* streamOffsetUs}), in microseconds. * streamOffsetUs}), in microseconds.
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are * @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
* non-negative, in microseconds. * non-negative, in microseconds.
* @return The {@link SampleConsumer} describing the type of sample data expected, and to which * @return Whether the {@link AssetLoader} needs to provide decoded data to the {@link
* to pass this data. * SampleConsumer}.
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
*/ */
SampleConsumer onTrackAdded( boolean onTrackAdded(
Format format, Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes, @SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs);
throws ExportException;
/**
* Called when the {@link Format} of samples that will be output by the {@link AssetLoader} is
* known.
*
* <p>Must be called once per {@linkplain #onTrackCount declared} track, and only after that
* track has been {@link #onTrackAdded added}.
*
* <p>Must be called from the thread that will be used to call the returned {@link
* SampleConsumer}'s methods. This thread must be the same for all formats output, and is
* generally different from the one used to access the {@link AssetLoader} methods.
*
* @param format The {@link Format} of samples that will be output.
* @return The {@link SampleConsumer} of samples of the given {@link Format}.
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
*/
SampleConsumer onOutputFormat(Format format) throws ExportException;
/** /**
* Called if an error occurs in the asset loader. In this case, the asset loader will be * Called if an error occurs in the asset loader. In this case, the asset loader will be

View File

@ -60,7 +60,8 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/260618558): Move silent audio generation upstream of this component. // TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline( public AudioSamplePipeline(
Format firstInputFormat, Format firstAssetLoaderInputFormat,
Format firstPipelineInputFormat,
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
boolean flattenForSlowMotion, boolean flattenForSlowMotion,
@ -69,9 +70,9 @@ import org.checkerframework.dataflow.qual.Pure;
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
FallbackListener fallbackListener) FallbackListener fallbackListener)
throws ExportException { throws ExportException {
super(firstInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper); super(firstPipelineInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat); silentAudioGenerator = new SilentAudioGenerator(firstPipelineInputFormat);
availableInputBuffers = new ConcurrentLinkedDeque<>(); availableInputBuffers = new ConcurrentLinkedDeque<>();
ByteBuffer emptyBuffer = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder()); ByteBuffer emptyBuffer = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
for (int i = 0; i < MAX_INPUT_BUFFER_COUNT; i++) { for (int i = 0; i < MAX_INPUT_BUFFER_COUNT; i++) {
@ -84,12 +85,12 @@ import org.checkerframework.dataflow.qual.Pure;
encoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED); encoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
encoderOutputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED); encoderOutputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
if (flattenForSlowMotion && firstInputFormat.metadata != null) { if (flattenForSlowMotion && firstAssetLoaderInputFormat.metadata != null) {
audioProcessors = audioProcessors =
new ImmutableList.Builder<AudioProcessor>() new ImmutableList.Builder<AudioProcessor>()
.add( .add(
new SpeedChangingAudioProcessor( new SpeedChangingAudioProcessor(
new SegmentSpeedProvider(firstInputFormat.metadata))) new SegmentSpeedProvider(firstAssetLoaderInputFormat.metadata)))
.addAll(audioProcessors) .addAll(audioProcessors)
.build(); .build();
} }
@ -98,10 +99,10 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/267301878): Once decoder format propagated, remove setting default PCM encoding. // TODO(b/267301878): Once decoder format propagated, remove setting default PCM encoding.
AudioFormat pipelineInputAudioFormat = AudioFormat pipelineInputAudioFormat =
new AudioFormat( new AudioFormat(
firstInputFormat.sampleRate, firstPipelineInputFormat.sampleRate,
firstInputFormat.channelCount, firstPipelineInputFormat.channelCount,
firstInputFormat.pcmEncoding != Format.NO_VALUE firstPipelineInputFormat.pcmEncoding != Format.NO_VALUE
? firstInputFormat.pcmEncoding ? firstPipelineInputFormat.pcmEncoding
: DEFAULT_PCM_ENCODING); : DEFAULT_PCM_ENCODING);
try { try {
@ -118,7 +119,7 @@ import org.checkerframework.dataflow.qual.Pure;
.setSampleMimeType( .setSampleMimeType(
transformationRequest.audioMimeType != null transformationRequest.audioMimeType != null
? transformationRequest.audioMimeType ? transformationRequest.audioMimeType
: checkNotNull(firstInputFormat.sampleMimeType)) : checkNotNull(firstAssetLoaderInputFormat.sampleMimeType))
.setSampleRate(encoderInputAudioFormat.sampleRate) .setSampleRate(encoderInputAudioFormat.sampleRate)
.setChannelCount(encoderInputAudioFormat.channelCount) .setChannelCount(encoderInputAudioFormat.channelCount)
.setPcmEncoding(encoderInputAudioFormat.encoding) .setPcmEncoding(encoderInputAudioFormat.encoding)

View File

@ -49,6 +49,13 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
/* package */ final class CompositeAssetLoader implements AssetLoader, AssetLoader.Listener { /* package */ final class CompositeAssetLoader implements AssetLoader, AssetLoader.Listener {
private static final Format FORCE_AUDIO_TRACK_FORMAT =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
.setSampleRate(44100)
.setChannelCount(2)
.build();
private final List<EditedMediaItem> editedMediaItems; private final List<EditedMediaItem> editedMediaItems;
private final AtomicInteger currentMediaItemIndex; private final AtomicInteger currentMediaItemIndex;
private final boolean forceAudioTrack; private final boolean forceAudioTrack;
@ -76,6 +83,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private AssetLoader currentAssetLoader; private AssetLoader currentAssetLoader;
private boolean trackCountReported; private boolean trackCountReported;
private int processedInputsSize; private int processedInputsSize;
private boolean decodeAudio;
private boolean decodeVideo;
private volatile long currentDurationUs; private volatile long currentDurationUs;
@ -186,42 +195,65 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
@Override @Override
public SampleConsumer onTrackAdded( public boolean onTrackAdded(
Format format, Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes, @SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs) {
throws ExportException { boolean isAudio = getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO;
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer; if (currentMediaItemIndex.get() != 0) {
if (currentMediaItemIndex.get() == 0) { return isAudio ? decodeAudio : decodeVideo;
boolean addForcedAudioTrack = }
forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO;
boolean addForcedAudioTrack = forceAudioTrack && nonEndedTracks.get() == 1 && !isAudio;
if (!trackCountReported) { if (!trackCountReported) {
int trackCount = nonEndedTracks.get() + (addForcedAudioTrack ? 1 : 0); int trackCount = nonEndedTracks.get() + (addForcedAudioTrack ? 1 : 0);
compositeAssetLoaderListener.onTrackCount(trackCount); compositeAssetLoaderListener.onTrackCount(trackCount);
trackCountReported = true; trackCountReported = true;
} }
sampleConsumer =
new SampleConsumerWrapper( boolean decodeOutput =
compositeAssetLoaderListener.onTrackAdded( compositeAssetLoaderListener.onTrackAdded(
format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs)); inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
sampleConsumersByTrackType.put(trackType, sampleConsumer);
if (isAudio) {
decodeAudio = decodeOutput;
} else {
decodeVideo = decodeOutput;
}
if (addForcedAudioTrack) { if (addForcedAudioTrack) {
Format firstAudioFormat =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
.setSampleRate(44100)
.setChannelCount(2)
.build();
SampleConsumer audioSampleConsumer =
new SampleConsumerWrapper(
compositeAssetLoaderListener.onTrackAdded( compositeAssetLoaderListener.onTrackAdded(
firstAudioFormat, FORCE_AUDIO_TRACK_FORMAT,
SUPPORTED_OUTPUT_TYPE_DECODED, SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs, streamStartPositionUs,
streamOffsetUs)); streamOffsetUs);
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer); }
return decodeOutput;
}
@Override
public SampleConsumer onOutputFormat(Format format) throws ExportException {
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer;
if (currentMediaItemIndex.get() == 0) {
sampleConsumer =
new SampleConsumerWrapper(compositeAssetLoaderListener.onOutputFormat(format));
sampleConsumersByTrackType.put(trackType, sampleConsumer);
if (forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
sampleConsumersByTrackType.put(
C.TRACK_TYPE_AUDIO,
new SampleConsumerWrapper(
compositeAssetLoaderListener.onOutputFormat(
FORCE_AUDIO_TRACK_FORMAT
.buildUpon()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(C.ENCODING_PCM_16BIT)
.build())));
} }
} else { } else {
// TODO(b/270533049): Remove the check below when implementing blank video frames generation. // TODO(b/270533049): Remove the check below when implementing blank video frames generation.
@ -288,13 +320,6 @@ import java.util.concurrent.atomic.AtomicInteger;
this.sampleConsumer = sampleConsumer; this.sampleConsumer = sampleConsumer;
} }
@Override
public boolean expectsDecodedData() {
// TODO(b/252537210): handle the case where the first media item doesn't need to be encoded
// but a following one does.
return sampleConsumer.expectsDecodedData();
}
@Nullable @Nullable
@Override @Override
public DecoderInputBuffer getInputBuffer() { public DecoderInputBuffer getInputBuffer() {

View File

@ -71,11 +71,6 @@ import java.util.concurrent.atomic.AtomicLong;
nextMediaItemOffsetUs.addAndGet(durationUs); nextMediaItemOffsetUs.addAndGet(durationUs);
} }
@Override
public boolean expectsDecodedData() {
return false;
}
@Override @Override
@Nullable @Nullable
public DecoderInputBuffer getInputBuffer() { public DecoderInputBuffer getInputBuffer() {

View File

@ -56,14 +56,13 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @throws ExportException If an error occurs in the decoder. * @throws ExportException If an error occurs in the decoder.
*/ */
@Override @Override
@RequiresNonNull("sampleConsumer") @RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException { protected boolean feedConsumerFromDecoder() throws ExportException {
@Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.getInputBuffer(); @Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.getInputBuffer();
if (sampleConsumerInputBuffer == null) { if (sampleConsumerInputBuffer == null) {
return false; return false;
} }
Codec decoder = checkNotNull(this.decoder);
if (decoder.isEnded()) { if (decoder.isEnded()) {
checkNotNull(sampleConsumerInputBuffer.data).limit(0); checkNotNull(sampleConsumerInputBuffer.data).limit(0);
sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);

View File

@ -17,10 +17,12 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED; import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT; import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED; import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED; import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -32,6 +34,7 @@ import androidx.media3.exoplayer.FormatHolder;
import androidx.media3.exoplayer.MediaClock; import androidx.media3.exoplayer.MediaClock;
import androidx.media3.exoplayer.RendererCapabilities; import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult; import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -42,6 +45,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
protected @MonotonicNonNull SampleConsumer sampleConsumer; protected @MonotonicNonNull SampleConsumer sampleConsumer;
protected @MonotonicNonNull Codec decoder; protected @MonotonicNonNull Codec decoder;
protected boolean isEnded; protected boolean isEnded;
private @MonotonicNonNull Format inputFormat;
private final TransformerMediaClock mediaClock; private final TransformerMediaClock mediaClock;
private final AssetLoader.Listener assetLoaderListener; private final AssetLoader.Listener assetLoaderListener;
@ -92,15 +96,25 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override @Override
public void render(long positionUs, long elapsedRealtimeUs) { public void render(long positionUs, long elapsedRealtimeUs) {
try { try {
if (!isRunning || isEnded() || !ensureConfigured()) { if (!isRunning || isEnded() || !hasReadInputFormat()) {
return; return;
} }
if (sampleConsumer.expectsDecodedData()) { if (decoder != null) {
while (feedConsumerFromDecoder() || feedDecoderFromInput()) {} boolean progressMade;
do {
progressMade = false;
if (ensureSampleConsumerInitialized()) {
progressMade = feedConsumerFromDecoder();
}
progressMade |= feedDecoderFromInput();
} while (progressMade);
} else { } else {
if (ensureSampleConsumerInitialized()) {
while (feedConsumerFromInput()) {} while (feedConsumerFromInput()) {}
} }
}
} catch (ExportException e) { } catch (ExportException e) {
isRunning = false; isRunning = false;
assetLoaderListener.onError(e); assetLoaderListener.onError(e);
@ -144,7 +158,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
protected void onInputFormatRead(Format inputFormat) {} protected void onInputFormatRead(Format inputFormat) {}
/** Initializes {@link #decoder} with an appropriate {@linkplain Codec decoder}. */ /** Initializes {@link #decoder} with an appropriate {@linkplain Codec decoder}. */
@RequiresNonNull("sampleConsumer") @EnsuresNonNull("decoder")
protected abstract void initDecoder(Format inputFormat) throws ExportException; protected abstract void initDecoder(Format inputFormat) throws ExportException;
/** /**
@ -166,12 +180,22 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @return Whether it may be possible to read more data immediately by calling this method again. * @return Whether it may be possible to read more data immediately by calling this method again.
* @throws ExportException If an error occurs in the decoder. * @throws ExportException If an error occurs in the decoder.
*/ */
@RequiresNonNull("sampleConsumer") @RequiresNonNull({"sampleConsumer", "decoder"})
protected abstract boolean feedConsumerFromDecoder() throws ExportException; protected abstract boolean feedConsumerFromDecoder() throws ExportException;
@EnsuresNonNullIf(expression = "sampleConsumer", result = true) /**
private boolean ensureConfigured() throws ExportException { * Attempts to read the input {@link Format} from the source, if not read.
if (sampleConsumer != null) { *
* <p>After reading the format, {@link AssetLoader.Listener#onTrackAdded} is notified, and, if
* needed, the decoder is {@linkplain #initDecoder(Format) initialized}.
*
* @return Whether the input {@link Format} is available.
* @throws ExportException If an error occurs {@linkplain #initDecoder initializing} the
* {@linkplain Codec decoder}.
*/
@EnsuresNonNullIf(expression = "inputFormat", result = true)
private boolean hasReadInputFormat() throws ExportException {
if (inputFormat != null) {
return true; return true;
} }
@ -181,16 +205,58 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (result != C.RESULT_FORMAT_READ) { if (result != C.RESULT_FORMAT_READ) {
return false; return false;
} }
Format inputFormat = overrideFormat(checkNotNull(formatHolder.format)); inputFormat = overrideFormat(checkNotNull(formatHolder.format));
@AssetLoader.SupportedOutputTypes
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
sampleConsumer =
assetLoaderListener.onTrackAdded(
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
onInputFormatRead(inputFormat); onInputFormatRead(inputFormat);
if (sampleConsumer.expectsDecodedData()) {
boolean decodeOutput =
assetLoaderListener.onTrackAdded(
inputFormat,
SUPPORTED_OUTPUT_TYPE_DECODED | SUPPORTED_OUTPUT_TYPE_ENCODED,
streamStartPositionUs,
streamOffsetUs);
if (decodeOutput) {
if (getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
initDecoder(inputFormat);
} else {
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and
// get decoder output Format before init sampleConsumer.
checkState(ensureSampleConsumerInitialized());
initDecoder(inputFormat); initDecoder(inputFormat);
} }
}
return true;
}
/**
* Attempts to initialize the {@link SampleConsumer}, if not initialized.
*
* @return Whether the {@link SampleConsumer} is initialized.
* @throws ExportException If the {@linkplain Codec decoder} errors getting it's {@linkplain
* Codec#getOutputFormat() output format}.
* @throws ExportException If the {@link AssetLoader.Listener} errors providing a {@link
* SampleConsumer}.
*/
@RequiresNonNull("inputFormat")
@EnsuresNonNullIf(expression = "sampleConsumer", result = true)
private boolean ensureSampleConsumerInitialized() throws ExportException {
if (sampleConsumer != null) {
return true;
}
if (decoder != null
&& getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
sampleConsumer = assetLoaderListener.onOutputFormat(decoderOutputFormat);
} else {
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and get
// decoderOutput Format before init sampleConsumer.
sampleConsumer = assetLoaderListener.onOutputFormat(inputFormat);
}
return true; return true;
} }
@ -200,8 +266,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @return Whether it may be possible to read more data immediately by calling this method again. * @return Whether it may be possible to read more data immediately by calling this method again.
* @throws ExportException If an error occurs in the decoder. * @throws ExportException If an error occurs in the decoder.
*/ */
@RequiresNonNull("decoder")
private boolean feedDecoderFromInput() throws ExportException { private boolean feedDecoderFromInput() throws ExportException {
Codec decoder = checkNotNull(this.decoder);
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) { if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false; return false;
} }

View File

@ -16,6 +16,7 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.media.MediaCodec; import android.media.MediaCodec;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -75,8 +76,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
@Override @Override
@RequiresNonNull("sampleConsumer")
protected void initDecoder(Format inputFormat) throws ExportException { protected void initDecoder(Format inputFormat) throws ExportException {
// TODO(b/237674316): Move surface creation out of sampleConsumer. Init decoder before
// sampleConsumer.
checkStateNotNull(sampleConsumer);
boolean isDecoderToneMappingRequired = boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo) ColorInfo.isTransferHdr(inputFormat.colorInfo)
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo()); && !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
@ -116,9 +119,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
@Override @Override
@RequiresNonNull("sampleConsumer") @RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException { protected boolean feedConsumerFromDecoder() throws ExportException {
Codec decoder = checkNotNull(this.decoder);
if (decoder.isEnded()) { if (decoder.isEnded()) {
sampleConsumer.signalEndOfVideoInput(); sampleConsumer.signalEndOfVideoInput();
isEnded = true; isEnded = true;

View File

@ -88,12 +88,13 @@ public final class ImageAssetLoader implements AssetLoader {
.setWidth(bitmap.getWidth()) .setWidth(bitmap.getWidth())
.setSampleMimeType(MIME_TYPE_IMAGE_ALL) .setSampleMimeType(MIME_TYPE_IMAGE_ALL)
.build(); .build();
SampleConsumer sampleConsumer =
listener.onTrackAdded( listener.onTrackAdded(
format, format,
SUPPORTED_OUTPUT_TYPE_DECODED, SUPPORTED_OUTPUT_TYPE_DECODED,
/* streamStartPositionUs= */ 0, /* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0); /* streamOffsetUs= */ 0);
SampleConsumer sampleConsumer = listener.onOutputFormat(format);
checkState(editedMediaItem.durationUs != C.TIME_UNSET); checkState(editedMediaItem.durationUs != C.TIME_UNSET);
checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT); checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT);
// TODO(b/262693274): consider using listener.onDurationUs() or the MediaItem change // TODO(b/262693274): consider using listener.onDurationUs() or the MediaItem change

View File

@ -28,8 +28,9 @@ import androidx.media3.common.MediaItem;
* *
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it. * @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param durationUs The duration of the {@link MediaItem}, in microseconds. * @param durationUs The duration of the {@link MediaItem}, in microseconds.
* @param trackFormat The {@link Format} of the {@link MediaItem} track corresponding to the * @param trackFormat The {@link Format} extracted (and possibly decoded) from the {@link
* {@link SamplePipeline}, or {@code null} if no such track was extracted. * MediaItem} track, which represents the samples input to the {@link SamplePipeline}. {@code
* null} if no such track was extracted.
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link * @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
* SamplePipeline}. * SamplePipeline}.
*/ */

View File

@ -26,14 +26,6 @@ import androidx.media3.decoder.DecoderInputBuffer;
@UnstableApi @UnstableApi
public interface SampleConsumer { public interface SampleConsumer {
/**
* Returns whether the consumer should be fed with decoded sample data. If false, encoded sample
* data should be fed.
*/
boolean expectsDecodedData();
// Methods to pass compressed input or raw audio input.
/** /**
* Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise. * Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise.
* *

View File

@ -56,12 +56,7 @@ import java.util.List;
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) { Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
this.streamStartPositionUs = streamStartPositionUs; this.streamStartPositionUs = streamStartPositionUs;
this.muxerWrapper = muxerWrapper; this.muxerWrapper = muxerWrapper;
this.outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType); outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
}
@Override
public boolean expectsDecodedData() {
return true;
} }
/** /**

View File

@ -18,12 +18,15 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED; import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED; import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.ExportException.ERROR_CODE_FAILED_RUNTIME_CHECK; import static androidx.media3.transformer.ExportException.ERROR_CODE_FAILED_RUNTIME_CHECK;
import static androidx.media3.transformer.ExportException.ERROR_CODE_MUXING_FAILED; import static androidx.media3.transformer.ExportException.ERROR_CODE_MUXING_FAILED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE; import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED; import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static androidx.media3.transformer.TransformerUtil.areVideoEffectsAllNoOp;
import static androidx.media3.transformer.TransformerUtil.containsSlowMotionData;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType; import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import static java.lang.annotation.ElementType.TYPE_USE; import static java.lang.annotation.ElementType.TYPE_USE;
@ -37,21 +40,20 @@ import androidx.media3.common.C;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable; import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper; import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.effect.GlEffect;
import androidx.media3.effect.Presentation; import androidx.media3.effect.Presentation;
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -404,6 +406,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final TransformationRequest transformationRequest; private final TransformationRequest transformationRequest;
private final FallbackListener fallbackListener; private final FallbackListener fallbackListener;
private final DebugViewProvider debugViewProvider; private final DebugViewProvider debugViewProvider;
private final Map<Integer, AddedTrackInfo> addedTrackInfoByTrackType;
public CompositeAssetLoaderListener( public CompositeAssetLoaderListener(
int sequenceIndex, int sequenceIndex,
@ -417,6 +420,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.transformationRequest = transformationRequest; this.transformationRequest = transformationRequest;
this.fallbackListener = fallbackListener; this.fallbackListener = fallbackListener;
this.debugViewProvider = debugViewProvider; this.debugViewProvider = debugViewProvider;
addedTrackInfoByTrackType = new HashMap<>();
} }
@Override @Override
@ -440,20 +444,30 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public SampleConsumer onTrackAdded( public boolean onTrackAdded(
Format firstInputFormat, Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs) {
throws ExportException { AddedTrackInfo trackInfo =
SamplePipeline samplePipeline = new AddedTrackInfo(
getSamplePipeline( firstAssetLoaderInputFormat,
firstInputFormat, supportedOutputTypes,
shouldTranscode(
firstInputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs),
streamStartPositionUs, streamStartPositionUs,
streamOffsetUs); streamOffsetUs);
@C.TrackType int trackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
addedTrackInfoByTrackType.put(
getProcessedTrackType(firstAssetLoaderInputFormat.sampleMimeType), trackInfo);
return trackInfo.shouldTranscode;
}
@Override
public SampleConsumer onOutputFormat(Format assetLoaderOutputFormat) throws ExportException {
@C.TrackType int trackType = getProcessedTrackType(assetLoaderOutputFormat.sampleMimeType);
AddedTrackInfo trackInfo = checkStateNotNull(addedTrackInfoByTrackType.get(trackType));
SamplePipeline samplePipeline = getSamplePipeline(assetLoaderOutputFormat, trackInfo);
compositeAssetLoaders compositeAssetLoaders
.get(sequenceIndex) .get(sequenceIndex)
.addOnMediaItemChangedListener(samplePipeline, trackType); .addOnMediaItemChangedListener(samplePipeline, trackType);
@ -469,17 +483,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Private methods. // Private methods.
private SamplePipeline getSamplePipeline( private SamplePipeline getSamplePipeline(
Format firstInputFormat, Format firstAssetLoaderOutputFormat, AddedTrackInfo addedTrackInfo) throws ExportException {
boolean shouldTranscode, if (addedTrackInfo.shouldTranscode) {
long streamStartPositionUs,
long streamOffsetUs)
throws ExportException {
if (shouldTranscode) {
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0); EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) { if (MimeTypes.isAudio(firstAssetLoaderOutputFormat.sampleMimeType)) {
return new AudioSamplePipeline( return new AudioSamplePipeline(
firstInputFormat, addedTrackInfo.firstAssetLoaderInputFormat,
streamOffsetUs, /* firstPipelineInputFormat= */ firstAssetLoaderOutputFormat,
addedTrackInfo.streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.flattenForSlowMotion, firstEditedMediaItem.flattenForSlowMotion,
firstEditedMediaItem.effects.audioProcessors, firstEditedMediaItem.effects.audioProcessors,
@ -493,11 +504,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
compositionVideoEffects.isEmpty() compositionVideoEffects.isEmpty()
? null ? null
: (Presentation) compositionVideoEffects.get(0); : (Presentation) compositionVideoEffects.get(0);
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
return new VideoSamplePipeline( return new VideoSamplePipeline(
context, context,
firstInputFormat, addedTrackInfo.firstAssetLoaderInputFormat,
streamStartPositionUs, addedTrackInfo.streamStartPositionUs,
streamOffsetUs, addedTrackInfo.streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.effects.videoEffects, firstEditedMediaItem.effects.videoEffects,
compositionPresentation, compositionPresentation,
@ -509,14 +522,37 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
debugViewProvider); debugViewProvider);
} }
} }
return new EncodedSamplePipeline( return new EncodedSamplePipeline(
firstInputFormat, firstAssetLoaderOutputFormat,
streamStartPositionUs, addedTrackInfo.streamStartPositionUs,
transformationRequest, transformationRequest,
muxerWrapper, muxerWrapper,
fallbackListener); fallbackListener);
} }
private final class AddedTrackInfo {
public final Format firstAssetLoaderInputFormat;
public final long streamStartPositionUs;
public final long streamOffsetUs;
public final boolean shouldTranscode;
public AddedTrackInfo(
Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
this.firstAssetLoaderInputFormat = firstAssetLoaderInputFormat;
this.streamStartPositionUs = streamStartPositionUs;
this.streamOffsetUs = streamOffsetUs;
shouldTranscode =
shouldTranscode(
firstAssetLoaderInputFormat,
supportedOutputTypes,
streamStartPositionUs,
streamOffsetUs);
}
private boolean shouldTranscode( private boolean shouldTranscode(
Format inputFormat, Format inputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
@ -536,7 +572,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else if (trackType == C.TRACK_TYPE_AUDIO) { } else if (trackType == C.TRACK_TYPE_AUDIO) {
shouldTranscode = shouldTranscodeAudio(inputFormat); shouldTranscode = shouldTranscodeAudio(inputFormat);
} else if (trackType == C.TRACK_TYPE_VIDEO) { } else if (trackType == C.TRACK_TYPE_VIDEO) {
shouldTranscode = shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs); shouldTranscode =
shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
} }
checkState(!shouldTranscode || assetLoaderCanOutputDecoded); checkState(!shouldTranscode || assetLoaderCanOutputDecoded);
@ -560,7 +597,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return true; return true;
} }
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0); EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
if (firstEditedMediaItem.flattenForSlowMotion && isSlowMotion(inputFormat)) { if (firstEditedMediaItem.flattenForSlowMotion && containsSlowMotionData(inputFormat)) {
return true; return true;
} }
if (!firstEditedMediaItem.effects.audioProcessors.isEmpty()) { if (!firstEditedMediaItem.effects.audioProcessors.isEmpty()) {
@ -570,19 +607,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return false; return false;
} }
private boolean isSlowMotion(Format format) {
@Nullable Metadata metadata = format.metadata;
if (metadata == null) {
return false;
}
for (int i = 0; i < metadata.length(); i++) {
if (metadata.get(i) instanceof SlowMotionData) {
return true;
}
}
return false;
}
private boolean shouldTranscodeVideo( private boolean shouldTranscodeVideo(
Format inputFormat, long streamStartPositionUs, long streamOffsetUs) { Format inputFormat, long streamStartPositionUs, long streamOffsetUs) {
if (editedMediaItems.size() > 1 && !composition.transmuxVideo) { if (editedMediaItems.size() > 1 && !composition.transmuxVideo) {
@ -615,29 +639,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
return false; return false;
} }
/**
* Returns whether all {@code videoEffects} are {@linkplain GlEffect#isNoOp(int, int) no-ops},
* given an input {@link Format}.
*/
private boolean areVideoEffectsAllNoOp(ImmutableList<Effect> videoEffects, Format inputFormat) {
int decodedWidth =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
int decodedHeight =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
for (int i = 0; i < videoEffects.size(); i++) {
Effect videoEffect = videoEffects.get(i);
if (!(videoEffect instanceof GlEffect)) {
// We cannot confirm whether Effect instances that are not GlEffect instances are
// no-ops.
return false;
}
GlEffect glEffect = (GlEffect) videoEffect;
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
return false;
}
}
return true;
} }
} }
} }

View File

@ -18,7 +18,13 @@ package androidx.media3.transformer;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.effect.GlEffect;
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
import com.google.common.collect.ImmutableList;
/** Utility methods for Transformer. */ /** Utility methods for Transformer. */
/* package */ final class TransformerUtil { /* package */ final class TransformerUtil {
@ -37,4 +43,45 @@ import androidx.media3.common.MimeTypes;
@C.TrackType int trackType = MimeTypes.getTrackType(mimeType); @C.TrackType int trackType = MimeTypes.getTrackType(mimeType);
return trackType == C.TRACK_TYPE_IMAGE ? C.TRACK_TYPE_VIDEO : trackType; return trackType == C.TRACK_TYPE_IMAGE ? C.TRACK_TYPE_VIDEO : trackType;
} }
/**
* Returns whether the collection of {@code videoEffects} would be a {@linkplain
* GlEffect#isNoOp(int, int) no-op}, if queued samples of this {@link Format}.
*/
public static boolean areVideoEffectsAllNoOp(
ImmutableList<Effect> videoEffects, Format inputFormat) {
int decodedWidth =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
int decodedHeight =
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
for (int i = 0; i < videoEffects.size(); i++) {
Effect videoEffect = videoEffects.get(i);
if (!(videoEffect instanceof GlEffect)) {
// We cannot confirm whether Effect instances that are not GlEffect instances are
// no-ops.
return false;
}
GlEffect glEffect = (GlEffect) videoEffect;
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
return false;
}
}
return true;
}
/**
* Returns whether the {@link Format} contains {@linkplain SlowMotionData slow motion metadata}.
*/
public static boolean containsSlowMotionData(Format format) {
@Nullable Metadata metadata = format.metadata;
if (metadata == null) {
return false;
}
for (int i = 0; i < metadata.length(); i++) {
if (metadata.get(i) instanceof SlowMotionData) {
return true;
}
}
return false;
}
} }

View File

@ -16,6 +16,7 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil; import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import android.content.Context; import android.content.Context;
@ -23,6 +24,7 @@ import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
@ -46,12 +48,16 @@ public class ExoPlayerAssetLoaderTest {
assetLoaderThread.start(); assetLoaderThread.start();
Looper assetLoaderLooper = assetLoaderThread.getLooper(); Looper assetLoaderLooper = assetLoaderThread.getLooper();
AtomicReference<Exception> exceptionRef = new AtomicReference<>(); AtomicReference<Exception> exceptionRef = new AtomicReference<>();
AtomicBoolean isTrackAdded = new AtomicBoolean(); AtomicBoolean isAudioOutputFormatSet = new AtomicBoolean();
AtomicBoolean isVideoOutputFormatSet = new AtomicBoolean();
AssetLoader.Listener listener = AssetLoader.Listener listener =
new AssetLoader.Listener() { new AssetLoader.Listener() {
private volatile boolean isDurationSet; private volatile boolean isDurationSet;
private volatile boolean isTrackCountSet; private volatile boolean isTrackCountSet;
private volatile boolean isAudioTrackAdded;
private volatile boolean isVideoTrackAdded;
@Override @Override
public void onDurationUs(long durationUs) { public void onDurationUs(long durationUs) {
@ -68,8 +74,8 @@ public class ExoPlayerAssetLoaderTest {
} }
@Override @Override
public SampleConsumer onTrackAdded( public boolean onTrackAdded(
Format format, Format inputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) { long streamOffsetUs) {
@ -80,7 +86,32 @@ public class ExoPlayerAssetLoaderTest {
exceptionRef.set( exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onTrackCount()")); new IllegalStateException("onTrackAdded() called before onTrackCount()"));
} }
isTrackAdded.set(true); sleep();
@C.TrackType int trackType = getProcessedTrackType(inputFormat.sampleMimeType);
if (trackType == C.TRACK_TYPE_AUDIO) {
isAudioTrackAdded = true;
} else if (trackType == C.TRACK_TYPE_VIDEO) {
isVideoTrackAdded = true;
}
return false;
}
@Override
public SampleConsumer onOutputFormat(Format format) {
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
boolean isAudio = trackType == C.TRACK_TYPE_AUDIO;
boolean isVideo = trackType == C.TRACK_TYPE_VIDEO;
boolean isTrackAdded = (isAudio && isAudioTrackAdded) || (isVideo && isVideoTrackAdded);
if (!isTrackAdded) {
exceptionRef.set(
new IllegalStateException("onOutputFormat() called before onTrackAdded()"));
}
if (isAudio) {
isAudioOutputFormatSet.set(true);
} else if (isVideo) {
isVideoOutputFormatSet.set(true);
}
return new FakeSampleConsumer(); return new FakeSampleConsumer();
} }
@ -107,7 +138,8 @@ public class ExoPlayerAssetLoaderTest {
Looper.myLooper(), Looper.myLooper(),
() -> { () -> {
ShadowSystemClock.advanceBy(Duration.ofMillis(10)); ShadowSystemClock.advanceBy(Duration.ofMillis(10));
return isTrackAdded.get() || exceptionRef.get() != null; return (isAudioOutputFormatSet.get() && isVideoOutputFormatSet.get())
|| exceptionRef.get() != null;
}); });
assertThat(exceptionRef.get()).isNull(); assertThat(exceptionRef.get()).isNull();
@ -126,11 +158,6 @@ public class ExoPlayerAssetLoaderTest {
private static final class FakeSampleConsumer implements SampleConsumer { private static final class FakeSampleConsumer implements SampleConsumer {
@Override
public boolean expectsDecodedData() {
return false;
}
@Nullable @Nullable
@Override @Override
public DecoderInputBuffer getInputBuffer() { public DecoderInputBuffer getInputBuffer() {

View File

@ -1000,7 +1000,7 @@ public final class TransformerEndToEndTest {
transformer.start(mediaItem, outputPath); transformer.start(mediaItem, outputPath);
runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null); runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null);
assertThat(sampleConsumerRef.get().expectsDecodedData()).isTrue(); assertThat(sampleConsumerRef.get()).isNotInstanceOf(EncodedSamplePipeline.class);
} }
@Test @Test
@ -1488,12 +1488,10 @@ public final class TransformerEndToEndTest {
.setChannelCount(2) .setChannelCount(2)
.build(); .build();
try { try {
SampleConsumer sampleConsumer =
listener.onTrackAdded( listener.onTrackAdded(
format, format, supportedOutputTypes, /* streamStartPositionUs= */ 0, /* streamOffsetUs= */ 0);
supportedOutputTypes,
/* streamStartPositionUs= */ 0, SampleConsumer sampleConsumer = listener.onOutputFormat(format);
/* streamOffsetUs= */ 0);
if (sampleConsumerRef != null) { if (sampleConsumerRef != null) {
sampleConsumerRef.set(sampleConsumer); sampleConsumerRef.set(sampleConsumer);
} }