Provide TransformerInternal with the AssetLoader output format.
TI can use this Format as part of creating the SamplePipelines. PiperOrigin-RevId: 513777622
This commit is contained in:
parent
f23d6c1dc1
commit
07ba24ab1a
@ -77,7 +77,7 @@ public interface AssetLoader {
|
|||||||
void onDurationUs(long durationUs);
|
void onDurationUs(long durationUs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called when the number of tracks output by the asset loader is known.
|
* Called when the number of tracks being output is known.
|
||||||
*
|
*
|
||||||
* <p>Can be called from any thread.
|
* <p>Can be called from any thread.
|
||||||
*/
|
*/
|
||||||
@ -91,28 +91,41 @@ public interface AssetLoader {
|
|||||||
*
|
*
|
||||||
* <p>Must be called once per {@linkplain #onTrackCount(int) declared} track.
|
* <p>Must be called once per {@linkplain #onTrackCount(int) declared} track.
|
||||||
*
|
*
|
||||||
* <p>Must be called from the thread that will be used to call the returned {@link
|
* <p>Must be called from the thread that will be used to call {@link #onOutputFormat(Format)}.
|
||||||
* SampleConsumer}'s methods. This thread must be the same for all the tracks added, and is
|
|
||||||
* generally different from the one used to access the {@link AssetLoader} methods.
|
|
||||||
*
|
*
|
||||||
* @param format The {@link Format} of the input media (prior to video slow motion flattening or
|
* @param inputFormat The {@link Format} of samples that will be input to the {@link
|
||||||
* to decoding).
|
* AssetLoader} (prior to video slow motion flattening or to decoding).
|
||||||
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
|
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
|
||||||
* this asset loader for the track added. At least one output type must be supported.
|
* this {@link AssetLoader} for the track added. At least one output type must be supported.
|
||||||
* @param streamStartPositionUs The start position of the stream (offset by {@code
|
* @param streamStartPositionUs The start position of the stream (offset by {@code
|
||||||
* streamOffsetUs}), in microseconds.
|
* streamOffsetUs}), in microseconds.
|
||||||
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
|
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
|
||||||
* non-negative, in microseconds.
|
* non-negative, in microseconds.
|
||||||
* @return The {@link SampleConsumer} describing the type of sample data expected, and to which
|
* @return Whether the {@link AssetLoader} needs to provide decoded data to the {@link
|
||||||
* to pass this data.
|
* SampleConsumer}.
|
||||||
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
|
|
||||||
*/
|
*/
|
||||||
SampleConsumer onTrackAdded(
|
boolean onTrackAdded(
|
||||||
Format format,
|
Format inputFormat,
|
||||||
@SupportedOutputTypes int supportedOutputTypes,
|
@SupportedOutputTypes int supportedOutputTypes,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
long streamOffsetUs)
|
long streamOffsetUs);
|
||||||
throws ExportException;
|
|
||||||
|
/**
|
||||||
|
* Called when the {@link Format} of samples that will be output by the {@link AssetLoader} is
|
||||||
|
* known.
|
||||||
|
*
|
||||||
|
* <p>Must be called once per {@linkplain #onTrackCount declared} track, and only after that
|
||||||
|
* track has been {@link #onTrackAdded added}.
|
||||||
|
*
|
||||||
|
* <p>Must be called from the thread that will be used to call the returned {@link
|
||||||
|
* SampleConsumer}'s methods. This thread must be the same for all formats output, and is
|
||||||
|
* generally different from the one used to access the {@link AssetLoader} methods.
|
||||||
|
*
|
||||||
|
* @param format The {@link Format} of samples that will be output.
|
||||||
|
* @return The {@link SampleConsumer} of samples of the given {@link Format}.
|
||||||
|
* @throws ExportException If an error occurs configuring the {@link SampleConsumer}.
|
||||||
|
*/
|
||||||
|
SampleConsumer onOutputFormat(Format format) throws ExportException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called if an error occurs in the asset loader. In this case, the asset loader will be
|
* Called if an error occurs in the asset loader. In this case, the asset loader will be
|
||||||
|
@ -60,7 +60,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
|
|
||||||
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
||||||
public AudioSamplePipeline(
|
public AudioSamplePipeline(
|
||||||
Format firstInputFormat,
|
Format firstAssetLoaderInputFormat,
|
||||||
|
Format firstPipelineInputFormat,
|
||||||
long streamOffsetUs,
|
long streamOffsetUs,
|
||||||
TransformationRequest transformationRequest,
|
TransformationRequest transformationRequest,
|
||||||
boolean flattenForSlowMotion,
|
boolean flattenForSlowMotion,
|
||||||
@ -69,9 +70,9 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
MuxerWrapper muxerWrapper,
|
MuxerWrapper muxerWrapper,
|
||||||
FallbackListener fallbackListener)
|
FallbackListener fallbackListener)
|
||||||
throws ExportException {
|
throws ExportException {
|
||||||
super(firstInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
|
super(firstPipelineInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
|
||||||
|
|
||||||
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat);
|
silentAudioGenerator = new SilentAudioGenerator(firstPipelineInputFormat);
|
||||||
availableInputBuffers = new ConcurrentLinkedDeque<>();
|
availableInputBuffers = new ConcurrentLinkedDeque<>();
|
||||||
ByteBuffer emptyBuffer = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
|
ByteBuffer emptyBuffer = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
|
||||||
for (int i = 0; i < MAX_INPUT_BUFFER_COUNT; i++) {
|
for (int i = 0; i < MAX_INPUT_BUFFER_COUNT; i++) {
|
||||||
@ -84,12 +85,12 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
encoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
|
encoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
encoderOutputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
|
encoderOutputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
|
|
||||||
if (flattenForSlowMotion && firstInputFormat.metadata != null) {
|
if (flattenForSlowMotion && firstAssetLoaderInputFormat.metadata != null) {
|
||||||
audioProcessors =
|
audioProcessors =
|
||||||
new ImmutableList.Builder<AudioProcessor>()
|
new ImmutableList.Builder<AudioProcessor>()
|
||||||
.add(
|
.add(
|
||||||
new SpeedChangingAudioProcessor(
|
new SpeedChangingAudioProcessor(
|
||||||
new SegmentSpeedProvider(firstInputFormat.metadata)))
|
new SegmentSpeedProvider(firstAssetLoaderInputFormat.metadata)))
|
||||||
.addAll(audioProcessors)
|
.addAll(audioProcessors)
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -98,10 +99,10 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
// TODO(b/267301878): Once decoder format propagated, remove setting default PCM encoding.
|
// TODO(b/267301878): Once decoder format propagated, remove setting default PCM encoding.
|
||||||
AudioFormat pipelineInputAudioFormat =
|
AudioFormat pipelineInputAudioFormat =
|
||||||
new AudioFormat(
|
new AudioFormat(
|
||||||
firstInputFormat.sampleRate,
|
firstPipelineInputFormat.sampleRate,
|
||||||
firstInputFormat.channelCount,
|
firstPipelineInputFormat.channelCount,
|
||||||
firstInputFormat.pcmEncoding != Format.NO_VALUE
|
firstPipelineInputFormat.pcmEncoding != Format.NO_VALUE
|
||||||
? firstInputFormat.pcmEncoding
|
? firstPipelineInputFormat.pcmEncoding
|
||||||
: DEFAULT_PCM_ENCODING);
|
: DEFAULT_PCM_ENCODING);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -118,7 +119,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
.setSampleMimeType(
|
.setSampleMimeType(
|
||||||
transformationRequest.audioMimeType != null
|
transformationRequest.audioMimeType != null
|
||||||
? transformationRequest.audioMimeType
|
? transformationRequest.audioMimeType
|
||||||
: checkNotNull(firstInputFormat.sampleMimeType))
|
: checkNotNull(firstAssetLoaderInputFormat.sampleMimeType))
|
||||||
.setSampleRate(encoderInputAudioFormat.sampleRate)
|
.setSampleRate(encoderInputAudioFormat.sampleRate)
|
||||||
.setChannelCount(encoderInputAudioFormat.channelCount)
|
.setChannelCount(encoderInputAudioFormat.channelCount)
|
||||||
.setPcmEncoding(encoderInputAudioFormat.encoding)
|
.setPcmEncoding(encoderInputAudioFormat.encoding)
|
||||||
|
@ -49,6 +49,13 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
*/
|
*/
|
||||||
/* package */ final class CompositeAssetLoader implements AssetLoader, AssetLoader.Listener {
|
/* package */ final class CompositeAssetLoader implements AssetLoader, AssetLoader.Listener {
|
||||||
|
|
||||||
|
private static final Format FORCE_AUDIO_TRACK_FORMAT =
|
||||||
|
new Format.Builder()
|
||||||
|
.setSampleMimeType(MimeTypes.AUDIO_AAC)
|
||||||
|
.setSampleRate(44100)
|
||||||
|
.setChannelCount(2)
|
||||||
|
.build();
|
||||||
|
|
||||||
private final List<EditedMediaItem> editedMediaItems;
|
private final List<EditedMediaItem> editedMediaItems;
|
||||||
private final AtomicInteger currentMediaItemIndex;
|
private final AtomicInteger currentMediaItemIndex;
|
||||||
private final boolean forceAudioTrack;
|
private final boolean forceAudioTrack;
|
||||||
@ -76,6 +83,8 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
private AssetLoader currentAssetLoader;
|
private AssetLoader currentAssetLoader;
|
||||||
private boolean trackCountReported;
|
private boolean trackCountReported;
|
||||||
private int processedInputsSize;
|
private int processedInputsSize;
|
||||||
|
private boolean decodeAudio;
|
||||||
|
private boolean decodeVideo;
|
||||||
|
|
||||||
private volatile long currentDurationUs;
|
private volatile long currentDurationUs;
|
||||||
|
|
||||||
@ -186,42 +195,65 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SampleConsumer onTrackAdded(
|
public boolean onTrackAdded(
|
||||||
Format format,
|
Format inputFormat,
|
||||||
@SupportedOutputTypes int supportedOutputTypes,
|
@SupportedOutputTypes int supportedOutputTypes,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
long streamOffsetUs)
|
long streamOffsetUs) {
|
||||||
throws ExportException {
|
boolean isAudio = getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO;
|
||||||
|
|
||||||
|
if (currentMediaItemIndex.get() != 0) {
|
||||||
|
return isAudio ? decodeAudio : decodeVideo;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean addForcedAudioTrack = forceAudioTrack && nonEndedTracks.get() == 1 && !isAudio;
|
||||||
|
|
||||||
|
if (!trackCountReported) {
|
||||||
|
int trackCount = nonEndedTracks.get() + (addForcedAudioTrack ? 1 : 0);
|
||||||
|
compositeAssetLoaderListener.onTrackCount(trackCount);
|
||||||
|
trackCountReported = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean decodeOutput =
|
||||||
|
compositeAssetLoaderListener.onTrackAdded(
|
||||||
|
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
||||||
|
|
||||||
|
if (isAudio) {
|
||||||
|
decodeAudio = decodeOutput;
|
||||||
|
} else {
|
||||||
|
decodeVideo = decodeOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (addForcedAudioTrack) {
|
||||||
|
compositeAssetLoaderListener.onTrackAdded(
|
||||||
|
FORCE_AUDIO_TRACK_FORMAT,
|
||||||
|
SUPPORTED_OUTPUT_TYPE_DECODED,
|
||||||
|
streamStartPositionUs,
|
||||||
|
streamOffsetUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
return decodeOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SampleConsumer onOutputFormat(Format format) throws ExportException {
|
||||||
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
|
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
|
||||||
SampleConsumer sampleConsumer;
|
SampleConsumer sampleConsumer;
|
||||||
if (currentMediaItemIndex.get() == 0) {
|
if (currentMediaItemIndex.get() == 0) {
|
||||||
boolean addForcedAudioTrack =
|
|
||||||
forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO;
|
|
||||||
if (!trackCountReported) {
|
|
||||||
int trackCount = nonEndedTracks.get() + (addForcedAudioTrack ? 1 : 0);
|
|
||||||
compositeAssetLoaderListener.onTrackCount(trackCount);
|
|
||||||
trackCountReported = true;
|
|
||||||
}
|
|
||||||
sampleConsumer =
|
sampleConsumer =
|
||||||
new SampleConsumerWrapper(
|
new SampleConsumerWrapper(compositeAssetLoaderListener.onOutputFormat(format));
|
||||||
compositeAssetLoaderListener.onTrackAdded(
|
|
||||||
format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs));
|
|
||||||
sampleConsumersByTrackType.put(trackType, sampleConsumer);
|
sampleConsumersByTrackType.put(trackType, sampleConsumer);
|
||||||
if (addForcedAudioTrack) {
|
|
||||||
Format firstAudioFormat =
|
if (forceAudioTrack && nonEndedTracks.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
|
||||||
new Format.Builder()
|
sampleConsumersByTrackType.put(
|
||||||
.setSampleMimeType(MimeTypes.AUDIO_AAC)
|
C.TRACK_TYPE_AUDIO,
|
||||||
.setSampleRate(44100)
|
|
||||||
.setChannelCount(2)
|
|
||||||
.build();
|
|
||||||
SampleConsumer audioSampleConsumer =
|
|
||||||
new SampleConsumerWrapper(
|
new SampleConsumerWrapper(
|
||||||
compositeAssetLoaderListener.onTrackAdded(
|
compositeAssetLoaderListener.onOutputFormat(
|
||||||
firstAudioFormat,
|
FORCE_AUDIO_TRACK_FORMAT
|
||||||
SUPPORTED_OUTPUT_TYPE_DECODED,
|
.buildUpon()
|
||||||
streamStartPositionUs,
|
.setSampleMimeType(MimeTypes.AUDIO_RAW)
|
||||||
streamOffsetUs));
|
.setPcmEncoding(C.ENCODING_PCM_16BIT)
|
||||||
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer);
|
.build())));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// TODO(b/270533049): Remove the check below when implementing blank video frames generation.
|
// TODO(b/270533049): Remove the check below when implementing blank video frames generation.
|
||||||
@ -288,13 +320,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
this.sampleConsumer = sampleConsumer;
|
this.sampleConsumer = sampleConsumer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean expectsDecodedData() {
|
|
||||||
// TODO(b/252537210): handle the case where the first media item doesn't need to be encoded
|
|
||||||
// but a following one does.
|
|
||||||
return sampleConsumer.expectsDecodedData();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public DecoderInputBuffer getInputBuffer() {
|
public DecoderInputBuffer getInputBuffer() {
|
||||||
|
@ -71,11 +71,6 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||||||
nextMediaItemOffsetUs.addAndGet(durationUs);
|
nextMediaItemOffsetUs.addAndGet(durationUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean expectsDecodedData() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Nullable
|
@Nullable
|
||||||
public DecoderInputBuffer getInputBuffer() {
|
public DecoderInputBuffer getInputBuffer() {
|
||||||
|
@ -56,14 +56,13 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
* @throws ExportException If an error occurs in the decoder.
|
* @throws ExportException If an error occurs in the decoder.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
@RequiresNonNull("sampleConsumer")
|
@RequiresNonNull({"sampleConsumer", "decoder"})
|
||||||
protected boolean feedConsumerFromDecoder() throws ExportException {
|
protected boolean feedConsumerFromDecoder() throws ExportException {
|
||||||
@Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.getInputBuffer();
|
@Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.getInputBuffer();
|
||||||
if (sampleConsumerInputBuffer == null) {
|
if (sampleConsumerInputBuffer == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Codec decoder = checkNotNull(this.decoder);
|
|
||||||
if (decoder.isEnded()) {
|
if (decoder.isEnded()) {
|
||||||
checkNotNull(sampleConsumerInputBuffer.data).limit(0);
|
checkNotNull(sampleConsumerInputBuffer.data).limit(0);
|
||||||
sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
|
sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
|
||||||
|
@ -17,10 +17,12 @@
|
|||||||
package androidx.media3.transformer;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
|
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
|
||||||
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||||
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||||
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
||||||
|
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -32,6 +34,7 @@ import androidx.media3.exoplayer.FormatHolder;
|
|||||||
import androidx.media3.exoplayer.MediaClock;
|
import androidx.media3.exoplayer.MediaClock;
|
||||||
import androidx.media3.exoplayer.RendererCapabilities;
|
import androidx.media3.exoplayer.RendererCapabilities;
|
||||||
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
|
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
|
||||||
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||||
@ -42,6 +45,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
protected @MonotonicNonNull SampleConsumer sampleConsumer;
|
protected @MonotonicNonNull SampleConsumer sampleConsumer;
|
||||||
protected @MonotonicNonNull Codec decoder;
|
protected @MonotonicNonNull Codec decoder;
|
||||||
protected boolean isEnded;
|
protected boolean isEnded;
|
||||||
|
private @MonotonicNonNull Format inputFormat;
|
||||||
|
|
||||||
private final TransformerMediaClock mediaClock;
|
private final TransformerMediaClock mediaClock;
|
||||||
private final AssetLoader.Listener assetLoaderListener;
|
private final AssetLoader.Listener assetLoaderListener;
|
||||||
@ -92,15 +96,25 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
@Override
|
@Override
|
||||||
public void render(long positionUs, long elapsedRealtimeUs) {
|
public void render(long positionUs, long elapsedRealtimeUs) {
|
||||||
try {
|
try {
|
||||||
if (!isRunning || isEnded() || !ensureConfigured()) {
|
if (!isRunning || isEnded() || !hasReadInputFormat()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sampleConsumer.expectsDecodedData()) {
|
if (decoder != null) {
|
||||||
while (feedConsumerFromDecoder() || feedDecoderFromInput()) {}
|
boolean progressMade;
|
||||||
|
do {
|
||||||
|
progressMade = false;
|
||||||
|
if (ensureSampleConsumerInitialized()) {
|
||||||
|
progressMade = feedConsumerFromDecoder();
|
||||||
|
}
|
||||||
|
progressMade |= feedDecoderFromInput();
|
||||||
|
} while (progressMade);
|
||||||
} else {
|
} else {
|
||||||
while (feedConsumerFromInput()) {}
|
if (ensureSampleConsumerInitialized()) {
|
||||||
|
while (feedConsumerFromInput()) {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (ExportException e) {
|
} catch (ExportException e) {
|
||||||
isRunning = false;
|
isRunning = false;
|
||||||
assetLoaderListener.onError(e);
|
assetLoaderListener.onError(e);
|
||||||
@ -144,7 +158,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
protected void onInputFormatRead(Format inputFormat) {}
|
protected void onInputFormatRead(Format inputFormat) {}
|
||||||
|
|
||||||
/** Initializes {@link #decoder} with an appropriate {@linkplain Codec decoder}. */
|
/** Initializes {@link #decoder} with an appropriate {@linkplain Codec decoder}. */
|
||||||
@RequiresNonNull("sampleConsumer")
|
@EnsuresNonNull("decoder")
|
||||||
protected abstract void initDecoder(Format inputFormat) throws ExportException;
|
protected abstract void initDecoder(Format inputFormat) throws ExportException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -166,12 +180,22 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
* @return Whether it may be possible to read more data immediately by calling this method again.
|
* @return Whether it may be possible to read more data immediately by calling this method again.
|
||||||
* @throws ExportException If an error occurs in the decoder.
|
* @throws ExportException If an error occurs in the decoder.
|
||||||
*/
|
*/
|
||||||
@RequiresNonNull("sampleConsumer")
|
@RequiresNonNull({"sampleConsumer", "decoder"})
|
||||||
protected abstract boolean feedConsumerFromDecoder() throws ExportException;
|
protected abstract boolean feedConsumerFromDecoder() throws ExportException;
|
||||||
|
|
||||||
@EnsuresNonNullIf(expression = "sampleConsumer", result = true)
|
/**
|
||||||
private boolean ensureConfigured() throws ExportException {
|
* Attempts to read the input {@link Format} from the source, if not read.
|
||||||
if (sampleConsumer != null) {
|
*
|
||||||
|
* <p>After reading the format, {@link AssetLoader.Listener#onTrackAdded} is notified, and, if
|
||||||
|
* needed, the decoder is {@linkplain #initDecoder(Format) initialized}.
|
||||||
|
*
|
||||||
|
* @return Whether the input {@link Format} is available.
|
||||||
|
* @throws ExportException If an error occurs {@linkplain #initDecoder initializing} the
|
||||||
|
* {@linkplain Codec decoder}.
|
||||||
|
*/
|
||||||
|
@EnsuresNonNullIf(expression = "inputFormat", result = true)
|
||||||
|
private boolean hasReadInputFormat() throws ExportException {
|
||||||
|
if (inputFormat != null) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,16 +205,58 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
if (result != C.RESULT_FORMAT_READ) {
|
if (result != C.RESULT_FORMAT_READ) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Format inputFormat = overrideFormat(checkNotNull(formatHolder.format));
|
inputFormat = overrideFormat(checkNotNull(formatHolder.format));
|
||||||
@AssetLoader.SupportedOutputTypes
|
|
||||||
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
|
|
||||||
sampleConsumer =
|
|
||||||
assetLoaderListener.onTrackAdded(
|
|
||||||
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
|
||||||
onInputFormatRead(inputFormat);
|
onInputFormatRead(inputFormat);
|
||||||
if (sampleConsumer.expectsDecodedData()) {
|
|
||||||
initDecoder(inputFormat);
|
boolean decodeOutput =
|
||||||
|
assetLoaderListener.onTrackAdded(
|
||||||
|
inputFormat,
|
||||||
|
SUPPORTED_OUTPUT_TYPE_DECODED | SUPPORTED_OUTPUT_TYPE_ENCODED,
|
||||||
|
streamStartPositionUs,
|
||||||
|
streamOffsetUs);
|
||||||
|
if (decodeOutput) {
|
||||||
|
if (getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
|
||||||
|
initDecoder(inputFormat);
|
||||||
|
} else {
|
||||||
|
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and
|
||||||
|
// get decoder output Format before init sampleConsumer.
|
||||||
|
checkState(ensureSampleConsumerInitialized());
|
||||||
|
initDecoder(inputFormat);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to initialize the {@link SampleConsumer}, if not initialized.
|
||||||
|
*
|
||||||
|
* @return Whether the {@link SampleConsumer} is initialized.
|
||||||
|
* @throws ExportException If the {@linkplain Codec decoder} errors getting it's {@linkplain
|
||||||
|
* Codec#getOutputFormat() output format}.
|
||||||
|
* @throws ExportException If the {@link AssetLoader.Listener} errors providing a {@link
|
||||||
|
* SampleConsumer}.
|
||||||
|
*/
|
||||||
|
@RequiresNonNull("inputFormat")
|
||||||
|
@EnsuresNonNullIf(expression = "sampleConsumer", result = true)
|
||||||
|
private boolean ensureSampleConsumerInitialized() throws ExportException {
|
||||||
|
if (sampleConsumer != null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decoder != null
|
||||||
|
&& getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO) {
|
||||||
|
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
|
||||||
|
if (decoderOutputFormat == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
sampleConsumer = assetLoaderListener.onOutputFormat(decoderOutputFormat);
|
||||||
|
} else {
|
||||||
|
// TODO(b/237674316): Move surface creation out of video sampleConsumer. Init decoder and get
|
||||||
|
// decoderOutput Format before init sampleConsumer.
|
||||||
|
sampleConsumer = assetLoaderListener.onOutputFormat(inputFormat);
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,8 +266,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
* @return Whether it may be possible to read more data immediately by calling this method again.
|
* @return Whether it may be possible to read more data immediately by calling this method again.
|
||||||
* @throws ExportException If an error occurs in the decoder.
|
* @throws ExportException If an error occurs in the decoder.
|
||||||
*/
|
*/
|
||||||
|
@RequiresNonNull("decoder")
|
||||||
private boolean feedDecoderFromInput() throws ExportException {
|
private boolean feedDecoderFromInput() throws ExportException {
|
||||||
Codec decoder = checkNotNull(this.decoder);
|
|
||||||
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
|
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
package androidx.media3.transformer;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
|
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
@ -75,8 +76,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@RequiresNonNull("sampleConsumer")
|
|
||||||
protected void initDecoder(Format inputFormat) throws ExportException {
|
protected void initDecoder(Format inputFormat) throws ExportException {
|
||||||
|
// TODO(b/237674316): Move surface creation out of sampleConsumer. Init decoder before
|
||||||
|
// sampleConsumer.
|
||||||
|
checkStateNotNull(sampleConsumer);
|
||||||
boolean isDecoderToneMappingRequired =
|
boolean isDecoderToneMappingRequired =
|
||||||
ColorInfo.isTransferHdr(inputFormat.colorInfo)
|
ColorInfo.isTransferHdr(inputFormat.colorInfo)
|
||||||
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
|
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedInputColorInfo());
|
||||||
@ -116,9 +119,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@RequiresNonNull("sampleConsumer")
|
@RequiresNonNull({"sampleConsumer", "decoder"})
|
||||||
protected boolean feedConsumerFromDecoder() throws ExportException {
|
protected boolean feedConsumerFromDecoder() throws ExportException {
|
||||||
Codec decoder = checkNotNull(this.decoder);
|
|
||||||
if (decoder.isEnded()) {
|
if (decoder.isEnded()) {
|
||||||
sampleConsumer.signalEndOfVideoInput();
|
sampleConsumer.signalEndOfVideoInput();
|
||||||
isEnded = true;
|
isEnded = true;
|
||||||
|
@ -88,12 +88,13 @@ public final class ImageAssetLoader implements AssetLoader {
|
|||||||
.setWidth(bitmap.getWidth())
|
.setWidth(bitmap.getWidth())
|
||||||
.setSampleMimeType(MIME_TYPE_IMAGE_ALL)
|
.setSampleMimeType(MIME_TYPE_IMAGE_ALL)
|
||||||
.build();
|
.build();
|
||||||
SampleConsumer sampleConsumer =
|
listener.onTrackAdded(
|
||||||
listener.onTrackAdded(
|
format,
|
||||||
format,
|
SUPPORTED_OUTPUT_TYPE_DECODED,
|
||||||
SUPPORTED_OUTPUT_TYPE_DECODED,
|
/* streamStartPositionUs= */ 0,
|
||||||
/* streamStartPositionUs= */ 0,
|
/* streamOffsetUs= */ 0);
|
||||||
/* streamOffsetUs= */ 0);
|
SampleConsumer sampleConsumer = listener.onOutputFormat(format);
|
||||||
|
|
||||||
checkState(editedMediaItem.durationUs != C.TIME_UNSET);
|
checkState(editedMediaItem.durationUs != C.TIME_UNSET);
|
||||||
checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT);
|
checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT);
|
||||||
// TODO(b/262693274): consider using listener.onDurationUs() or the MediaItem change
|
// TODO(b/262693274): consider using listener.onDurationUs() or the MediaItem change
|
||||||
|
@ -28,8 +28,9 @@ import androidx.media3.common.MediaItem;
|
|||||||
*
|
*
|
||||||
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
|
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
|
||||||
* @param durationUs The duration of the {@link MediaItem}, in microseconds.
|
* @param durationUs The duration of the {@link MediaItem}, in microseconds.
|
||||||
* @param trackFormat The {@link Format} of the {@link MediaItem} track corresponding to the
|
* @param trackFormat The {@link Format} extracted (and possibly decoded) from the {@link
|
||||||
* {@link SamplePipeline}, or {@code null} if no such track was extracted.
|
* MediaItem} track, which represents the samples input to the {@link SamplePipeline}. {@code
|
||||||
|
* null} if no such track was extracted.
|
||||||
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
|
* @param isLast Whether the {@link MediaItem} is the last one passed to the {@link
|
||||||
* SamplePipeline}.
|
* SamplePipeline}.
|
||||||
*/
|
*/
|
||||||
|
@ -26,14 +26,6 @@ import androidx.media3.decoder.DecoderInputBuffer;
|
|||||||
@UnstableApi
|
@UnstableApi
|
||||||
public interface SampleConsumer {
|
public interface SampleConsumer {
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether the consumer should be fed with decoded sample data. If false, encoded sample
|
|
||||||
* data should be fed.
|
|
||||||
*/
|
|
||||||
boolean expectsDecodedData();
|
|
||||||
|
|
||||||
// Methods to pass compressed input or raw audio input.
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise.
|
* Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise.
|
||||||
*
|
*
|
||||||
|
@ -56,12 +56,7 @@ import java.util.List;
|
|||||||
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
|
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
|
||||||
this.streamStartPositionUs = streamStartPositionUs;
|
this.streamStartPositionUs = streamStartPositionUs;
|
||||||
this.muxerWrapper = muxerWrapper;
|
this.muxerWrapper = muxerWrapper;
|
||||||
this.outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
|
outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean expectsDecodedData() {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -18,12 +18,15 @@ package androidx.media3.transformer;
|
|||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||||
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
||||||
import static androidx.media3.transformer.ExportException.ERROR_CODE_FAILED_RUNTIME_CHECK;
|
import static androidx.media3.transformer.ExportException.ERROR_CODE_FAILED_RUNTIME_CHECK;
|
||||||
import static androidx.media3.transformer.ExportException.ERROR_CODE_MUXING_FAILED;
|
import static androidx.media3.transformer.ExportException.ERROR_CODE_MUXING_FAILED;
|
||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
||||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
||||||
|
import static androidx.media3.transformer.TransformerUtil.areVideoEffectsAllNoOp;
|
||||||
|
import static androidx.media3.transformer.TransformerUtil.containsSlowMotionData;
|
||||||
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
|
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
|
||||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||||
|
|
||||||
@ -37,21 +40,20 @@ import androidx.media3.common.C;
|
|||||||
import androidx.media3.common.DebugViewProvider;
|
import androidx.media3.common.DebugViewProvider;
|
||||||
import androidx.media3.common.Effect;
|
import androidx.media3.common.Effect;
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.Metadata;
|
|
||||||
import androidx.media3.common.MimeTypes;
|
import androidx.media3.common.MimeTypes;
|
||||||
import androidx.media3.common.util.Clock;
|
import androidx.media3.common.util.Clock;
|
||||||
import androidx.media3.common.util.ConditionVariable;
|
import androidx.media3.common.util.ConditionVariable;
|
||||||
import androidx.media3.common.util.HandlerWrapper;
|
import androidx.media3.common.util.HandlerWrapper;
|
||||||
import androidx.media3.effect.GlEffect;
|
|
||||||
import androidx.media3.effect.Presentation;
|
import androidx.media3.effect.Presentation;
|
||||||
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import java.lang.annotation.Documented;
|
import java.lang.annotation.Documented;
|
||||||
import java.lang.annotation.Retention;
|
import java.lang.annotation.Retention;
|
||||||
import java.lang.annotation.RetentionPolicy;
|
import java.lang.annotation.RetentionPolicy;
|
||||||
import java.lang.annotation.Target;
|
import java.lang.annotation.Target;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
|
|
||||||
@ -404,6 +406,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private final TransformationRequest transformationRequest;
|
private final TransformationRequest transformationRequest;
|
||||||
private final FallbackListener fallbackListener;
|
private final FallbackListener fallbackListener;
|
||||||
private final DebugViewProvider debugViewProvider;
|
private final DebugViewProvider debugViewProvider;
|
||||||
|
private final Map<Integer, AddedTrackInfo> addedTrackInfoByTrackType;
|
||||||
|
|
||||||
public CompositeAssetLoaderListener(
|
public CompositeAssetLoaderListener(
|
||||||
int sequenceIndex,
|
int sequenceIndex,
|
||||||
@ -417,6 +420,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
this.transformationRequest = transformationRequest;
|
this.transformationRequest = transformationRequest;
|
||||||
this.fallbackListener = fallbackListener;
|
this.fallbackListener = fallbackListener;
|
||||||
this.debugViewProvider = debugViewProvider;
|
this.debugViewProvider = debugViewProvider;
|
||||||
|
addedTrackInfoByTrackType = new HashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -440,20 +444,30 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SampleConsumer onTrackAdded(
|
public boolean onTrackAdded(
|
||||||
Format firstInputFormat,
|
Format firstAssetLoaderInputFormat,
|
||||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
long streamOffsetUs)
|
long streamOffsetUs) {
|
||||||
throws ExportException {
|
AddedTrackInfo trackInfo =
|
||||||
SamplePipeline samplePipeline =
|
new AddedTrackInfo(
|
||||||
getSamplePipeline(
|
firstAssetLoaderInputFormat,
|
||||||
firstInputFormat,
|
supportedOutputTypes,
|
||||||
shouldTranscode(
|
|
||||||
firstInputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs),
|
|
||||||
streamStartPositionUs,
|
streamStartPositionUs,
|
||||||
streamOffsetUs);
|
streamOffsetUs);
|
||||||
@C.TrackType int trackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
|
|
||||||
|
addedTrackInfoByTrackType.put(
|
||||||
|
getProcessedTrackType(firstAssetLoaderInputFormat.sampleMimeType), trackInfo);
|
||||||
|
|
||||||
|
return trackInfo.shouldTranscode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SampleConsumer onOutputFormat(Format assetLoaderOutputFormat) throws ExportException {
|
||||||
|
@C.TrackType int trackType = getProcessedTrackType(assetLoaderOutputFormat.sampleMimeType);
|
||||||
|
AddedTrackInfo trackInfo = checkStateNotNull(addedTrackInfoByTrackType.get(trackType));
|
||||||
|
SamplePipeline samplePipeline = getSamplePipeline(assetLoaderOutputFormat, trackInfo);
|
||||||
|
|
||||||
compositeAssetLoaders
|
compositeAssetLoaders
|
||||||
.get(sequenceIndex)
|
.get(sequenceIndex)
|
||||||
.addOnMediaItemChangedListener(samplePipeline, trackType);
|
.addOnMediaItemChangedListener(samplePipeline, trackType);
|
||||||
@ -469,17 +483,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
// Private methods.
|
// Private methods.
|
||||||
|
|
||||||
private SamplePipeline getSamplePipeline(
|
private SamplePipeline getSamplePipeline(
|
||||||
Format firstInputFormat,
|
Format firstAssetLoaderOutputFormat, AddedTrackInfo addedTrackInfo) throws ExportException {
|
||||||
boolean shouldTranscode,
|
if (addedTrackInfo.shouldTranscode) {
|
||||||
long streamStartPositionUs,
|
|
||||||
long streamOffsetUs)
|
|
||||||
throws ExportException {
|
|
||||||
if (shouldTranscode) {
|
|
||||||
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
||||||
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) {
|
if (MimeTypes.isAudio(firstAssetLoaderOutputFormat.sampleMimeType)) {
|
||||||
return new AudioSamplePipeline(
|
return new AudioSamplePipeline(
|
||||||
firstInputFormat,
|
addedTrackInfo.firstAssetLoaderInputFormat,
|
||||||
streamOffsetUs,
|
/* firstPipelineInputFormat= */ firstAssetLoaderOutputFormat,
|
||||||
|
addedTrackInfo.streamOffsetUs,
|
||||||
transformationRequest,
|
transformationRequest,
|
||||||
firstEditedMediaItem.flattenForSlowMotion,
|
firstEditedMediaItem.flattenForSlowMotion,
|
||||||
firstEditedMediaItem.effects.audioProcessors,
|
firstEditedMediaItem.effects.audioProcessors,
|
||||||
@ -493,11 +504,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
compositionVideoEffects.isEmpty()
|
compositionVideoEffects.isEmpty()
|
||||||
? null
|
? null
|
||||||
: (Presentation) compositionVideoEffects.get(0);
|
: (Presentation) compositionVideoEffects.get(0);
|
||||||
|
|
||||||
|
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
|
||||||
return new VideoSamplePipeline(
|
return new VideoSamplePipeline(
|
||||||
context,
|
context,
|
||||||
firstInputFormat,
|
addedTrackInfo.firstAssetLoaderInputFormat,
|
||||||
streamStartPositionUs,
|
addedTrackInfo.streamStartPositionUs,
|
||||||
streamOffsetUs,
|
addedTrackInfo.streamOffsetUs,
|
||||||
transformationRequest,
|
transformationRequest,
|
||||||
firstEditedMediaItem.effects.videoEffects,
|
firstEditedMediaItem.effects.videoEffects,
|
||||||
compositionPresentation,
|
compositionPresentation,
|
||||||
@ -509,135 +522,123 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
debugViewProvider);
|
debugViewProvider);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new EncodedSamplePipeline(
|
return new EncodedSamplePipeline(
|
||||||
firstInputFormat,
|
firstAssetLoaderOutputFormat,
|
||||||
streamStartPositionUs,
|
addedTrackInfo.streamStartPositionUs,
|
||||||
transformationRequest,
|
transformationRequest,
|
||||||
muxerWrapper,
|
muxerWrapper,
|
||||||
fallbackListener);
|
fallbackListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean shouldTranscode(
|
private final class AddedTrackInfo {
|
||||||
Format inputFormat,
|
public final Format firstAssetLoaderInputFormat;
|
||||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
public final long streamStartPositionUs;
|
||||||
long streamStartPositionUs,
|
public final long streamOffsetUs;
|
||||||
long streamOffsetUs) {
|
public final boolean shouldTranscode;
|
||||||
boolean assetLoaderCanOutputDecoded =
|
|
||||||
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) != 0;
|
|
||||||
boolean assetLoaderCanOutputEncoded =
|
|
||||||
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) != 0;
|
|
||||||
checkArgument(assetLoaderCanOutputDecoded || assetLoaderCanOutputEncoded);
|
|
||||||
|
|
||||||
@C.TrackType int trackType = getProcessedTrackType(inputFormat.sampleMimeType);
|
public AddedTrackInfo(
|
||||||
|
Format firstAssetLoaderInputFormat,
|
||||||
boolean shouldTranscode = false;
|
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||||
if (!assetLoaderCanOutputEncoded) {
|
long streamStartPositionUs,
|
||||||
shouldTranscode = true;
|
long streamOffsetUs) {
|
||||||
} else if (trackType == C.TRACK_TYPE_AUDIO) {
|
this.firstAssetLoaderInputFormat = firstAssetLoaderInputFormat;
|
||||||
shouldTranscode = shouldTranscodeAudio(inputFormat);
|
this.streamStartPositionUs = streamStartPositionUs;
|
||||||
} else if (trackType == C.TRACK_TYPE_VIDEO) {
|
this.streamOffsetUs = streamOffsetUs;
|
||||||
shouldTranscode = shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
|
shouldTranscode =
|
||||||
|
shouldTranscode(
|
||||||
|
firstAssetLoaderInputFormat,
|
||||||
|
supportedOutputTypes,
|
||||||
|
streamStartPositionUs,
|
||||||
|
streamOffsetUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkState(!shouldTranscode || assetLoaderCanOutputDecoded);
|
private boolean shouldTranscode(
|
||||||
|
Format inputFormat,
|
||||||
|
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||||
|
long streamStartPositionUs,
|
||||||
|
long streamOffsetUs) {
|
||||||
|
boolean assetLoaderCanOutputDecoded =
|
||||||
|
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) != 0;
|
||||||
|
boolean assetLoaderCanOutputEncoded =
|
||||||
|
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) != 0;
|
||||||
|
checkArgument(assetLoaderCanOutputDecoded || assetLoaderCanOutputEncoded);
|
||||||
|
|
||||||
return shouldTranscode;
|
@C.TrackType int trackType = getProcessedTrackType(inputFormat.sampleMimeType);
|
||||||
}
|
|
||||||
|
|
||||||
private boolean shouldTranscodeAudio(Format inputFormat) {
|
boolean shouldTranscode = false;
|
||||||
if (editedMediaItems.size() > 1 && !composition.transmuxAudio) {
|
if (!assetLoaderCanOutputEncoded) {
|
||||||
return true;
|
shouldTranscode = true;
|
||||||
}
|
} else if (trackType == C.TRACK_TYPE_AUDIO) {
|
||||||
if (encoderFactory.audioNeedsEncoding()) {
|
shouldTranscode = shouldTranscodeAudio(inputFormat);
|
||||||
return true;
|
} else if (trackType == C.TRACK_TYPE_VIDEO) {
|
||||||
}
|
shouldTranscode =
|
||||||
if (transformationRequest.audioMimeType != null
|
shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
|
||||||
&& !transformationRequest.audioMimeType.equals(inputFormat.sampleMimeType)) {
|
}
|
||||||
return true;
|
|
||||||
}
|
checkState(!shouldTranscode || assetLoaderCanOutputDecoded);
|
||||||
if (transformationRequest.audioMimeType == null
|
|
||||||
&& !muxerWrapper.supportsSampleMimeType(inputFormat.sampleMimeType)) {
|
return shouldTranscode;
|
||||||
return true;
|
|
||||||
}
|
|
||||||
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
|
||||||
if (firstEditedMediaItem.flattenForSlowMotion && isSlowMotion(inputFormat)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (!firstEditedMediaItem.effects.audioProcessors.isEmpty()) {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
private boolean shouldTranscodeAudio(Format inputFormat) {
|
||||||
}
|
if (editedMediaItems.size() > 1 && !composition.transmuxAudio) {
|
||||||
|
|
||||||
private boolean isSlowMotion(Format format) {
|
|
||||||
@Nullable Metadata metadata = format.metadata;
|
|
||||||
if (metadata == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
for (int i = 0; i < metadata.length(); i++) {
|
|
||||||
if (metadata.get(i) instanceof SlowMotionData) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
if (encoderFactory.audioNeedsEncoding()) {
|
||||||
return false;
|
return true;
|
||||||
}
|
|
||||||
|
|
||||||
private boolean shouldTranscodeVideo(
|
|
||||||
Format inputFormat, long streamStartPositionUs, long streamOffsetUs) {
|
|
||||||
if (editedMediaItems.size() > 1 && !composition.transmuxVideo) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
|
||||||
if ((streamStartPositionUs - streamOffsetUs) != 0
|
|
||||||
&& !firstEditedMediaItem.mediaItem.clippingConfiguration.startsAtKeyFrame) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (encoderFactory.videoNeedsEncoding()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (transformationRequest.hdrMode != TransformationRequest.HDR_MODE_KEEP_HDR) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (transformationRequest.videoMimeType != null
|
|
||||||
&& !transformationRequest.videoMimeType.equals(inputFormat.sampleMimeType)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (transformationRequest.videoMimeType == null
|
|
||||||
&& !muxerWrapper.supportsSampleMimeType(inputFormat.sampleMimeType)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (inputFormat.pixelWidthHeightRatio != 1f) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (!areVideoEffectsAllNoOp(firstEditedMediaItem.effects.videoEffects, inputFormat)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether all {@code videoEffects} are {@linkplain GlEffect#isNoOp(int, int) no-ops},
|
|
||||||
* given an input {@link Format}.
|
|
||||||
*/
|
|
||||||
private boolean areVideoEffectsAllNoOp(ImmutableList<Effect> videoEffects, Format inputFormat) {
|
|
||||||
int decodedWidth =
|
|
||||||
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
|
|
||||||
int decodedHeight =
|
|
||||||
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
|
|
||||||
for (int i = 0; i < videoEffects.size(); i++) {
|
|
||||||
Effect videoEffect = videoEffects.get(i);
|
|
||||||
if (!(videoEffect instanceof GlEffect)) {
|
|
||||||
// We cannot confirm whether Effect instances that are not GlEffect instances are
|
|
||||||
// no-ops.
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
GlEffect glEffect = (GlEffect) videoEffect;
|
if (transformationRequest.audioMimeType != null
|
||||||
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
|
&& !transformationRequest.audioMimeType.equals(inputFormat.sampleMimeType)) {
|
||||||
return false;
|
return true;
|
||||||
}
|
}
|
||||||
|
if (transformationRequest.audioMimeType == null
|
||||||
|
&& !muxerWrapper.supportsSampleMimeType(inputFormat.sampleMimeType)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
||||||
|
if (firstEditedMediaItem.flattenForSlowMotion && containsSlowMotionData(inputFormat)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (!firstEditedMediaItem.effects.audioProcessors.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean shouldTranscodeVideo(
|
||||||
|
Format inputFormat, long streamStartPositionUs, long streamOffsetUs) {
|
||||||
|
if (editedMediaItems.size() > 1 && !composition.transmuxVideo) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
|
||||||
|
if ((streamStartPositionUs - streamOffsetUs) != 0
|
||||||
|
&& !firstEditedMediaItem.mediaItem.clippingConfiguration.startsAtKeyFrame) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (encoderFactory.videoNeedsEncoding()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (transformationRequest.hdrMode != TransformationRequest.HDR_MODE_KEEP_HDR) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (transformationRequest.videoMimeType != null
|
||||||
|
&& !transformationRequest.videoMimeType.equals(inputFormat.sampleMimeType)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (transformationRequest.videoMimeType == null
|
||||||
|
&& !muxerWrapper.supportsSampleMimeType(inputFormat.sampleMimeType)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (inputFormat.pixelWidthHeightRatio != 1f) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (!areVideoEffectsAllNoOp(firstEditedMediaItem.effects.videoEffects, inputFormat)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,13 @@ package androidx.media3.transformer;
|
|||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.Effect;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
|
import androidx.media3.common.Metadata;
|
||||||
import androidx.media3.common.MimeTypes;
|
import androidx.media3.common.MimeTypes;
|
||||||
|
import androidx.media3.effect.GlEffect;
|
||||||
|
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
|
||||||
/** Utility methods for Transformer. */
|
/** Utility methods for Transformer. */
|
||||||
/* package */ final class TransformerUtil {
|
/* package */ final class TransformerUtil {
|
||||||
@ -37,4 +43,45 @@ import androidx.media3.common.MimeTypes;
|
|||||||
@C.TrackType int trackType = MimeTypes.getTrackType(mimeType);
|
@C.TrackType int trackType = MimeTypes.getTrackType(mimeType);
|
||||||
return trackType == C.TRACK_TYPE_IMAGE ? C.TRACK_TYPE_VIDEO : trackType;
|
return trackType == C.TRACK_TYPE_IMAGE ? C.TRACK_TYPE_VIDEO : trackType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether the collection of {@code videoEffects} would be a {@linkplain
|
||||||
|
* GlEffect#isNoOp(int, int) no-op}, if queued samples of this {@link Format}.
|
||||||
|
*/
|
||||||
|
public static boolean areVideoEffectsAllNoOp(
|
||||||
|
ImmutableList<Effect> videoEffects, Format inputFormat) {
|
||||||
|
int decodedWidth =
|
||||||
|
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.width : inputFormat.height;
|
||||||
|
int decodedHeight =
|
||||||
|
(inputFormat.rotationDegrees % 180 == 0) ? inputFormat.height : inputFormat.width;
|
||||||
|
for (int i = 0; i < videoEffects.size(); i++) {
|
||||||
|
Effect videoEffect = videoEffects.get(i);
|
||||||
|
if (!(videoEffect instanceof GlEffect)) {
|
||||||
|
// We cannot confirm whether Effect instances that are not GlEffect instances are
|
||||||
|
// no-ops.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
GlEffect glEffect = (GlEffect) videoEffect;
|
||||||
|
if (!glEffect.isNoOp(decodedWidth, decodedHeight)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether the {@link Format} contains {@linkplain SlowMotionData slow motion metadata}.
|
||||||
|
*/
|
||||||
|
public static boolean containsSlowMotionData(Format format) {
|
||||||
|
@Nullable Metadata metadata = format.metadata;
|
||||||
|
if (metadata == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (int i = 0; i < metadata.length(); i++) {
|
||||||
|
if (metadata.get(i) instanceof SlowMotionData) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
package androidx.media3.transformer;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
|
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
|
||||||
|
import static androidx.media3.transformer.TransformerUtil.getProcessedTrackType;
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
@ -23,6 +24,7 @@ import android.os.Handler;
|
|||||||
import android.os.HandlerThread;
|
import android.os.HandlerThread;
|
||||||
import android.os.Looper;
|
import android.os.Looper;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.MediaItem;
|
import androidx.media3.common.MediaItem;
|
||||||
import androidx.media3.common.util.Clock;
|
import androidx.media3.common.util.Clock;
|
||||||
@ -46,12 +48,16 @@ public class ExoPlayerAssetLoaderTest {
|
|||||||
assetLoaderThread.start();
|
assetLoaderThread.start();
|
||||||
Looper assetLoaderLooper = assetLoaderThread.getLooper();
|
Looper assetLoaderLooper = assetLoaderThread.getLooper();
|
||||||
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
|
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
|
||||||
AtomicBoolean isTrackAdded = new AtomicBoolean();
|
AtomicBoolean isAudioOutputFormatSet = new AtomicBoolean();
|
||||||
|
AtomicBoolean isVideoOutputFormatSet = new AtomicBoolean();
|
||||||
|
|
||||||
AssetLoader.Listener listener =
|
AssetLoader.Listener listener =
|
||||||
new AssetLoader.Listener() {
|
new AssetLoader.Listener() {
|
||||||
|
|
||||||
private volatile boolean isDurationSet;
|
private volatile boolean isDurationSet;
|
||||||
private volatile boolean isTrackCountSet;
|
private volatile boolean isTrackCountSet;
|
||||||
|
private volatile boolean isAudioTrackAdded;
|
||||||
|
private volatile boolean isVideoTrackAdded;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onDurationUs(long durationUs) {
|
public void onDurationUs(long durationUs) {
|
||||||
@ -68,8 +74,8 @@ public class ExoPlayerAssetLoaderTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SampleConsumer onTrackAdded(
|
public boolean onTrackAdded(
|
||||||
Format format,
|
Format inputFormat,
|
||||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
long streamOffsetUs) {
|
long streamOffsetUs) {
|
||||||
@ -80,7 +86,32 @@ public class ExoPlayerAssetLoaderTest {
|
|||||||
exceptionRef.set(
|
exceptionRef.set(
|
||||||
new IllegalStateException("onTrackAdded() called before onTrackCount()"));
|
new IllegalStateException("onTrackAdded() called before onTrackCount()"));
|
||||||
}
|
}
|
||||||
isTrackAdded.set(true);
|
sleep();
|
||||||
|
@C.TrackType int trackType = getProcessedTrackType(inputFormat.sampleMimeType);
|
||||||
|
if (trackType == C.TRACK_TYPE_AUDIO) {
|
||||||
|
isAudioTrackAdded = true;
|
||||||
|
} else if (trackType == C.TRACK_TYPE_VIDEO) {
|
||||||
|
isVideoTrackAdded = true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SampleConsumer onOutputFormat(Format format) {
|
||||||
|
@C.TrackType int trackType = getProcessedTrackType(format.sampleMimeType);
|
||||||
|
boolean isAudio = trackType == C.TRACK_TYPE_AUDIO;
|
||||||
|
boolean isVideo = trackType == C.TRACK_TYPE_VIDEO;
|
||||||
|
|
||||||
|
boolean isTrackAdded = (isAudio && isAudioTrackAdded) || (isVideo && isVideoTrackAdded);
|
||||||
|
if (!isTrackAdded) {
|
||||||
|
exceptionRef.set(
|
||||||
|
new IllegalStateException("onOutputFormat() called before onTrackAdded()"));
|
||||||
|
}
|
||||||
|
if (isAudio) {
|
||||||
|
isAudioOutputFormatSet.set(true);
|
||||||
|
} else if (isVideo) {
|
||||||
|
isVideoOutputFormatSet.set(true);
|
||||||
|
}
|
||||||
return new FakeSampleConsumer();
|
return new FakeSampleConsumer();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,7 +138,8 @@ public class ExoPlayerAssetLoaderTest {
|
|||||||
Looper.myLooper(),
|
Looper.myLooper(),
|
||||||
() -> {
|
() -> {
|
||||||
ShadowSystemClock.advanceBy(Duration.ofMillis(10));
|
ShadowSystemClock.advanceBy(Duration.ofMillis(10));
|
||||||
return isTrackAdded.get() || exceptionRef.get() != null;
|
return (isAudioOutputFormatSet.get() && isVideoOutputFormatSet.get())
|
||||||
|
|| exceptionRef.get() != null;
|
||||||
});
|
});
|
||||||
|
|
||||||
assertThat(exceptionRef.get()).isNull();
|
assertThat(exceptionRef.get()).isNull();
|
||||||
@ -126,11 +158,6 @@ public class ExoPlayerAssetLoaderTest {
|
|||||||
|
|
||||||
private static final class FakeSampleConsumer implements SampleConsumer {
|
private static final class FakeSampleConsumer implements SampleConsumer {
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean expectsDecodedData() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public DecoderInputBuffer getInputBuffer() {
|
public DecoderInputBuffer getInputBuffer() {
|
||||||
|
@ -1000,7 +1000,7 @@ public final class TransformerEndToEndTest {
|
|||||||
transformer.start(mediaItem, outputPath);
|
transformer.start(mediaItem, outputPath);
|
||||||
runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null);
|
runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null);
|
||||||
|
|
||||||
assertThat(sampleConsumerRef.get().expectsDecodedData()).isTrue();
|
assertThat(sampleConsumerRef.get()).isNotInstanceOf(EncodedSamplePipeline.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -1488,12 +1488,10 @@ public final class TransformerEndToEndTest {
|
|||||||
.setChannelCount(2)
|
.setChannelCount(2)
|
||||||
.build();
|
.build();
|
||||||
try {
|
try {
|
||||||
SampleConsumer sampleConsumer =
|
listener.onTrackAdded(
|
||||||
listener.onTrackAdded(
|
format, supportedOutputTypes, /* streamStartPositionUs= */ 0, /* streamOffsetUs= */ 0);
|
||||||
format,
|
|
||||||
supportedOutputTypes,
|
SampleConsumer sampleConsumer = listener.onOutputFormat(format);
|
||||||
/* streamStartPositionUs= */ 0,
|
|
||||||
/* streamOffsetUs= */ 0);
|
|
||||||
if (sampleConsumerRef != null) {
|
if (sampleConsumerRef != null) {
|
||||||
sampleConsumerRef.set(sampleConsumer);
|
sampleConsumerRef.set(sampleConsumer);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user