Specify the types of samples output by AssetLoader
This is useful if an app passes a custom AssetLoader that can only output encoded or decoded samples. PiperOrigin-RevId: 497097492
This commit is contained in:
parent
f2eac2df71
commit
4dddcb00ff
@ -16,13 +16,20 @@
|
||||
|
||||
package com.google.android.exoplayer2.transformer;
|
||||
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Looper;
|
||||
import androidx.annotation.IntDef;
|
||||
import androidx.annotation.IntRange;
|
||||
import com.google.android.exoplayer2.Format;
|
||||
import com.google.android.exoplayer2.MediaItem;
|
||||
import com.google.android.exoplayer2.util.Clock;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Provides media data to a {@linkplain Transformer}.
|
||||
@ -119,7 +126,7 @@ public interface AssetLoader {
|
||||
* <ul>
|
||||
* <li>{@linkplain #onDurationUs(long)} Report} the duration of the input media.
|
||||
* <li>{@linkplain #onTrackCount(int) Report} the number of output tracks.
|
||||
* <li>{@linkplain #onTrackAdded(Format, long, long) Add} the information for each track.
|
||||
* <li>{@linkplain #onTrackAdded(Format, int, long, long) Add} the information for each track.
|
||||
* </ul>
|
||||
*
|
||||
* <p>This listener can be called from any thread.
|
||||
@ -142,6 +149,8 @@ public interface AssetLoader {
|
||||
*
|
||||
* @param format The {@link Format} of the input media (prior to video slow motion flattening or
|
||||
* to decoding).
|
||||
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
|
||||
* this asset loader for the track added. At least one output type must be supported.
|
||||
* @param streamStartPositionUs The start position of the stream (offset by {@code
|
||||
* streamOffsetUs}), in microseconds.
|
||||
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
|
||||
@ -152,7 +161,10 @@ public interface AssetLoader {
|
||||
* SamplePipeline.Input}.
|
||||
*/
|
||||
SamplePipeline.Input onTrackAdded(
|
||||
Format format, long streamStartPositionUs, long streamOffsetUs)
|
||||
Format format,
|
||||
@SupportedOutputTypes int supportedOutputTypes,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs)
|
||||
throws TransformationException;
|
||||
|
||||
/**
|
||||
@ -162,6 +174,25 @@ public interface AssetLoader {
|
||||
void onError(Exception e);
|
||||
}
|
||||
|
||||
/**
|
||||
* Supported output types of an asset loader. Possible flag values are {@link
|
||||
* #SUPPORTED_OUTPUT_TYPE_ENCODED} and {@link #SUPPORTED_OUTPUT_TYPE_DECODED}.
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
@Target(TYPE_USE)
|
||||
@IntDef(
|
||||
flag = true,
|
||||
value = {
|
||||
SUPPORTED_OUTPUT_TYPE_ENCODED,
|
||||
SUPPORTED_OUTPUT_TYPE_DECODED,
|
||||
})
|
||||
@interface SupportedOutputTypes {}
|
||||
/** Indicates that the asset loader can output encoded samples. */
|
||||
int SUPPORTED_OUTPUT_TYPE_ENCODED = 1;
|
||||
/** Indicates that the asset loader can output decoded samples. */
|
||||
int SUPPORTED_OUTPUT_TYPE_DECODED = 1 << 1;
|
||||
|
||||
/** Starts the asset loader. */
|
||||
void start();
|
||||
|
||||
|
@ -18,6 +18,8 @@ package com.google.android.exoplayer2.transformer;
|
||||
|
||||
import static com.google.android.exoplayer2.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
|
||||
import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT;
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
||||
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
@ -162,8 +164,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
return false;
|
||||
}
|
||||
Format inputFormat = checkNotNull(formatHolder.format);
|
||||
@AssetLoader.SupportedOutputTypes
|
||||
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||
samplePipelineInput =
|
||||
assetLoaderListener.onTrackAdded(inputFormat, streamStartPositionUs, streamOffsetUs);
|
||||
assetLoaderListener.onTrackAdded(
|
||||
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
||||
if (getTrackType() == C.TRACK_TYPE_VIDEO && flattenForSlowMotion) {
|
||||
sefVideoSlowMotionFlattener = new SefSlowMotionFlattener(inputFormat);
|
||||
}
|
||||
|
@ -16,8 +16,11 @@
|
||||
|
||||
package com.google.android.exoplayer2.transformer;
|
||||
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
||||
import static com.google.android.exoplayer2.transformer.TransformationException.ERROR_CODE_MUXING_FAILED;
|
||||
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
||||
import static com.google.android.exoplayer2.util.Assertions.checkState;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.content.Context;
|
||||
@ -413,7 +416,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
@Override
|
||||
public SamplePipeline.Input onTrackAdded(
|
||||
Format format, long streamStartPositionUs, long streamOffsetUs)
|
||||
Format format,
|
||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs)
|
||||
throws TransformationException {
|
||||
if (tracksAddedCount == 0) {
|
||||
// Call setTrackCount() methods here so that they are called from the same thread as the
|
||||
@ -423,7 +429,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
|
||||
SamplePipeline samplePipeline =
|
||||
getSamplePipeline(format, streamStartPositionUs, streamOffsetUs);
|
||||
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
|
||||
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
|
||||
|
||||
int samplePipelineIndex = tracksAddedCount;
|
||||
@ -437,7 +443,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
.setChannelCount(2)
|
||||
.build();
|
||||
SamplePipeline audioSamplePipeline =
|
||||
getSamplePipeline(silentAudioFormat, streamStartPositionUs, streamOffsetUs);
|
||||
getSamplePipeline(
|
||||
silentAudioFormat,
|
||||
SUPPORTED_OUTPUT_TYPE_DECODED,
|
||||
streamStartPositionUs,
|
||||
streamOffsetUs);
|
||||
internalHandler
|
||||
.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, audioSamplePipeline)
|
||||
.sendToTarget();
|
||||
@ -469,9 +479,23 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
|
||||
private SamplePipeline getSamplePipeline(
|
||||
Format inputFormat, long streamStartPositionUs, long streamOffsetUs)
|
||||
Format inputFormat,
|
||||
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs)
|
||||
throws TransformationException {
|
||||
if (MimeTypes.isAudio(inputFormat.sampleMimeType) && shouldTranscodeAudio(inputFormat)) {
|
||||
checkState(supportedOutputTypes != 0);
|
||||
boolean isAudio = MimeTypes.isAudio(inputFormat.sampleMimeType);
|
||||
boolean shouldTranscode =
|
||||
isAudio
|
||||
? shouldTranscodeAudio(inputFormat)
|
||||
: shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
|
||||
boolean assetLoaderNeverDecodes = (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) == 0;
|
||||
checkState(!shouldTranscode || !assetLoaderNeverDecodes);
|
||||
boolean assetLoaderAlwaysDecodes =
|
||||
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) == 0;
|
||||
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
|
||||
if (isAudio && shouldUseTranscodingPipeline) {
|
||||
return new AudioTranscodingSamplePipeline(
|
||||
inputFormat,
|
||||
streamStartPositionUs,
|
||||
@ -482,8 +506,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
encoderFactory,
|
||||
muxerWrapper,
|
||||
fallbackListener);
|
||||
} else if (MimeTypes.isVideo(inputFormat.sampleMimeType)
|
||||
&& shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs)) {
|
||||
} else if (shouldUseTranscodingPipeline) {
|
||||
return new VideoTranscodingSamplePipeline(
|
||||
context,
|
||||
inputFormat,
|
||||
|
@ -16,10 +16,14 @@
|
||||
|
||||
package com.google.android.exoplayer2.transformer;
|
||||
|
||||
import static com.google.android.exoplayer2.robolectric.RobolectricUtil.runLooperUntil;
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
|
||||
import static com.google.android.exoplayer2.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
|
||||
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
||||
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
||||
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
||||
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_WAITING_FOR_AVAILABILITY;
|
||||
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@ -42,6 +46,7 @@ import androidx.annotation.Nullable;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.android.exoplayer2.C;
|
||||
import com.google.android.exoplayer2.Format;
|
||||
import com.google.android.exoplayer2.MediaItem;
|
||||
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
|
||||
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
|
||||
@ -54,6 +59,7 @@ import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
|
||||
import com.google.android.exoplayer2.source.MediaSource;
|
||||
import com.google.android.exoplayer2.testutil.DumpFileAsserts;
|
||||
import com.google.android.exoplayer2.testutil.FakeClock;
|
||||
import com.google.android.exoplayer2.util.Clock;
|
||||
import com.google.android.exoplayer2.util.MimeTypes;
|
||||
import com.google.android.exoplayer2.util.Util;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
@ -230,7 +236,7 @@ public final class TransformerEndToEndTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startTransformation_concurrentTransformations_throwsError() throws Exception {
|
||||
public void startTransformation_concurrentTransformations_throwsError() {
|
||||
Transformer transformer = createTransformerBuilder(/* enableFallback= */ false).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_VIDEO_ONLY);
|
||||
|
||||
@ -633,6 +639,42 @@ public final class TransformerEndToEndTest {
|
||||
assertThat(illegalStateException.get()).isNotNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startTransformation_withAssetLoaderAlwaysDecoding_pipelineExpectsDecoded()
|
||||
throws Exception {
|
||||
AtomicReference<SamplePipeline.Input> samplePipelineInputRef = new AtomicReference<>();
|
||||
Transformer transformer =
|
||||
createTransformerBuilder(/* enableFallback= */ false)
|
||||
.setAssetLoaderFactory(
|
||||
new FakeAssetLoader.Factory(SUPPORTED_OUTPUT_TYPE_DECODED, samplePipelineInputRef))
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
|
||||
|
||||
transformer.startTransformation(mediaItem, outputPath);
|
||||
runLooperUntil(transformer.getApplicationLooper(), () -> samplePipelineInputRef.get() != null);
|
||||
|
||||
assertThat(samplePipelineInputRef.get().expectsDecodedData()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startTransformation_withAssetLoaderNotDecodingAndDecodingNeeded_completesWithError()
|
||||
throws Exception {
|
||||
Transformer transformer =
|
||||
createTransformerBuilder(/* enableFallback= */ false)
|
||||
.setAudioProcessors(ImmutableList.of(new SonicAudioProcessor()))
|
||||
.setAssetLoaderFactory(
|
||||
new FakeAssetLoader.Factory(
|
||||
SUPPORTED_OUTPUT_TYPE_ENCODED, /* samplePipelineInputRef= */ null))
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
|
||||
|
||||
transformer.startTransformation(mediaItem, outputPath);
|
||||
TransformationException transformationException =
|
||||
TransformerTestRunner.runUntilError(transformer);
|
||||
|
||||
assertThat(transformationException).hasCauseThat().isInstanceOf(IllegalStateException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getProgress_knownDuration_returnsConsistentStates() throws Exception {
|
||||
Transformer transformer = createTransformerBuilder(/* enableFallback= */ false).build();
|
||||
@ -1038,4 +1080,120 @@ public final class TransformerEndToEndTest {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static final class FakeAssetLoader implements AssetLoader {
|
||||
|
||||
public static final class Factory implements AssetLoader.Factory {
|
||||
|
||||
private final @SupportedOutputTypes int supportedOutputTypes;
|
||||
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef;
|
||||
|
||||
@Nullable private AssetLoader.Listener listener;
|
||||
|
||||
public Factory(
|
||||
@SupportedOutputTypes int supportedOutputTypes,
|
||||
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) {
|
||||
this.supportedOutputTypes = supportedOutputTypes;
|
||||
this.samplePipelineInputRef = samplePipelineInputRef;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setContext(Context context) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setMediaItem(MediaItem mediaItem) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setRemoveAudio(boolean removeAudio) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setRemoveVideo(boolean removeVideo) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setFlattenVideoForSlowMotion(boolean flattenVideoForSlowMotion) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setDecoderFactory(Codec.DecoderFactory decoderFactory) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setLooper(Looper looper) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setListener(Listener listener) {
|
||||
this.listener = listener;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader.Factory setClock(Clock clock) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AssetLoader createAssetLoader() {
|
||||
return new FakeAssetLoader(
|
||||
checkNotNull(listener), supportedOutputTypes, samplePipelineInputRef);
|
||||
}
|
||||
}
|
||||
|
||||
private final AssetLoader.Listener listener;
|
||||
private final @SupportedOutputTypes int supportedOutputTypes;
|
||||
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef;
|
||||
|
||||
public FakeAssetLoader(
|
||||
Listener listener,
|
||||
@SupportedOutputTypes int supportedOutputTypes,
|
||||
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) {
|
||||
this.listener = listener;
|
||||
this.supportedOutputTypes = supportedOutputTypes;
|
||||
this.samplePipelineInputRef = samplePipelineInputRef;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
listener.onDurationUs(10_000_000);
|
||||
listener.onTrackCount(1);
|
||||
Format format =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(MimeTypes.AUDIO_AAC)
|
||||
.setSampleRate(44100)
|
||||
.setChannelCount(2)
|
||||
.build();
|
||||
try {
|
||||
SamplePipeline.Input samplePipelineInput =
|
||||
listener.onTrackAdded(
|
||||
format,
|
||||
supportedOutputTypes,
|
||||
/* streamStartPositionUs= */ 0,
|
||||
/* streamOffsetUs= */ 0);
|
||||
if (samplePipelineInputRef != null) {
|
||||
samplePipelineInputRef.set(samplePipelineInput);
|
||||
}
|
||||
} catch (TransformationException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user