mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Create an AssetInfo class for AndroidTestUtil test asset information.
This is an internal refactor with no logic changed. There is a duplication in information and a lack of consistency around the use of test assets in transformer androidTest. This change refactors each asset to be defined as its own AssetInfo, with the other relevant parts stored alongside the uri. Once this is in place, we can consider other useful functionality, such as having boolean flags for what tracks an asset has, helper methods for whether an asset is local or remote, and more. This will reduce the manual overhead necessary to use more assets in tests, and in particular leads towards easily using new & existing assets in parameterized tests. PiperOrigin-RevId: 644040595
This commit is contained in:
parent
d0815d3f7b
commit
2b55a5bc2d
File diff suppressed because it is too large
Load Diff
@ -16,8 +16,8 @@
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.PlaybackException.ERROR_CODE_DECODER_INIT_FAILED;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.util.concurrent.Futures.immediateFuture;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
@ -122,7 +122,7 @@ public class CompositionPlayerTest {
|
||||
compositionPlayer.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build()))
|
||||
.build());
|
||||
@ -142,7 +142,7 @@ public class CompositionPlayerTest {
|
||||
compositionPlayer.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build()))
|
||||
.build());
|
||||
@ -166,7 +166,7 @@ public class CompositionPlayerTest {
|
||||
compositionPlayer.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build()))
|
||||
.build());
|
||||
@ -188,7 +188,7 @@ public class CompositionPlayerTest {
|
||||
compositionPlayer.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build()))
|
||||
.build());
|
||||
@ -232,7 +232,7 @@ public class CompositionPlayerTest {
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(1_000)
|
||||
.build())
|
||||
.setDurationUs(1_000_000)
|
||||
@ -273,7 +273,7 @@ public class CompositionPlayerTest {
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setMimeType(MimeTypes.APPLICATION_EXTERNALLY_LOADED_IMAGE)
|
||||
.setImageDurationMs(1_000)
|
||||
.build())
|
||||
@ -292,7 +292,7 @@ public class CompositionPlayerTest {
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(500)
|
||||
.build())
|
||||
.setDurationUs(500_000)
|
||||
@ -320,14 +320,14 @@ public class CompositionPlayerTest {
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(500)
|
||||
.build())
|
||||
.setDurationUs(500_000)
|
||||
.build();
|
||||
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build();
|
||||
|
||||
@ -351,13 +351,13 @@ public class CompositionPlayerTest {
|
||||
public void composition_videoThenImage() throws Exception {
|
||||
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build();
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(500)
|
||||
.build())
|
||||
.setDurationUs(500_000)
|
||||
@ -383,7 +383,7 @@ public class CompositionPlayerTest {
|
||||
public void playback_videoSinkProviderFails_playerRaisesError() {
|
||||
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build();
|
||||
|
||||
@ -420,7 +420,7 @@ public class CompositionPlayerTest {
|
||||
throws Exception {
|
||||
PlayerTestListener playerTestListener = new PlayerTestListener(TEST_TIMEOUT_MS);
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.build();
|
||||
instrumentation.runOnMainSync(
|
||||
|
@ -17,8 +17,7 @@
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FRAME_COUNT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -70,18 +69,20 @@ public class ForceEndOfStreamTest {
|
||||
return;
|
||||
}
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
int framesToSkip = 4;
|
||||
|
||||
ExportTestResult testResult =
|
||||
new TransformerAndroidTestRunner.Builder(context, buildTransformer(context, framesToSkip))
|
||||
.build()
|
||||
.run(
|
||||
testId, createComposition(MediaItem.fromUri(AndroidTestUtil.MP4_ASSET_URI_STRING)));
|
||||
.run(testId, createComposition(MediaItem.fromUri(MP4_ASSET.uri)));
|
||||
|
||||
assertThat(testResult.analysisException).isNull();
|
||||
assertThat(testResult.exportResult.videoFrameCount)
|
||||
.isEqualTo(MP4_ASSET_FRAME_COUNT - framesToSkip);
|
||||
.isEqualTo(MP4_ASSET.videoFrameCount - framesToSkip);
|
||||
assertThat(new File(testResult.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
@ -91,17 +92,19 @@ public class ForceEndOfStreamTest {
|
||||
return;
|
||||
}
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
|
||||
ExportTestResult testResult =
|
||||
new TransformerAndroidTestRunner.Builder(
|
||||
context, buildTransformer(context, /* framesToSkip= */ 0))
|
||||
.build()
|
||||
.run(
|
||||
testId, createComposition(MediaItem.fromUri(AndroidTestUtil.MP4_ASSET_URI_STRING)));
|
||||
.run(testId, createComposition(MediaItem.fromUri(MP4_ASSET.uri)));
|
||||
|
||||
assertThat(testResult.analysisException).isNull();
|
||||
assertThat(testResult.exportResult.videoFrameCount).isEqualTo(MP4_ASSET_FRAME_COUNT);
|
||||
assertThat(testResult.exportResult.videoFrameCount).isEqualTo(MP4_ASSET.videoFrameCount);
|
||||
assertThat(new File(testResult.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
@ -120,7 +123,7 @@ public class ForceEndOfStreamTest {
|
||||
.setAssetLoaderFactory(
|
||||
new DefaultAssetLoaderFactory(
|
||||
context,
|
||||
new FrameDroppingDecoderFactory(context, MP4_ASSET_FRAME_COUNT, framesToSkip),
|
||||
new FrameDroppingDecoderFactory(context, MP4_ASSET.videoFrameCount, framesToSkip),
|
||||
Clock.DEFAULT))
|
||||
.build();
|
||||
}
|
||||
|
@ -17,13 +17,10 @@
|
||||
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET_FRAME_COUNT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FRAME_COUNT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.getFormatForTestFile;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.content.Context;
|
||||
@ -36,6 +33,7 @@ import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.Assertions;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.effect.Presentation;
|
||||
import androidx.media3.transformer.AndroidTestUtil.AssetInfo;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
@ -60,13 +58,11 @@ import org.junit.runners.Parameterized.Parameters;
|
||||
@RunWith(Parameterized.class)
|
||||
public class ParameterizedInputSequenceExportTest {
|
||||
private static final ImageItemConfig PNG_ITEM =
|
||||
new ImageItemConfig(PNG_ASSET_URI_STRING, /* frameCount= */ 34);
|
||||
new ImageItemConfig(PNG_ASSET.uri, /* frameCount= */ 34);
|
||||
private static final ImageItemConfig JPG_ITEM =
|
||||
new ImageItemConfig(JPG_ASSET_URI_STRING, /* frameCount= */ 41);
|
||||
private static final VideoItemConfig BT709_ITEM =
|
||||
new VideoItemConfig(MP4_ASSET_URI_STRING, MP4_ASSET_FRAME_COUNT);
|
||||
private static final VideoItemConfig BT601_ITEM =
|
||||
new VideoItemConfig(BT601_MP4_ASSET_URI_STRING, BT601_MP4_ASSET_FRAME_COUNT);
|
||||
new ImageItemConfig(JPG_ASSET.uri, /* frameCount= */ 41);
|
||||
private static final VideoItemConfig BT709_ITEM = new VideoItemConfig(MP4_ASSET);
|
||||
private static final VideoItemConfig BT601_ITEM = new VideoItemConfig(BT601_MP4_ASSET);
|
||||
|
||||
@Parameters(name = "{0}")
|
||||
public static ImmutableList<SequenceConfig> params() {
|
||||
@ -264,8 +260,8 @@ public class ParameterizedInputSequenceExportTest {
|
||||
* <p>Audio is removed and a {@link Presentation} of specified {@code height=360}.
|
||||
*/
|
||||
private static final class VideoItemConfig extends ItemConfig {
|
||||
public VideoItemConfig(String uri, int frameCount) {
|
||||
super(uri, frameCount, getFormatForTestFile(uri), getFormatForTestFile(uri));
|
||||
public VideoItemConfig(AssetInfo asset) {
|
||||
super(asset.uri, asset.videoFrameCount, asset.videoFormat, asset.videoFormat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -15,7 +15,7 @@
|
||||
*/
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
@ -108,8 +108,7 @@ public class RawAssetLoaderAndroidTest {
|
||||
@Test
|
||||
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
|
||||
throws Exception {
|
||||
Bitmap bitmap =
|
||||
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
|
||||
Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET.uri)).get();
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||
.setGlObjectsProvider(new DefaultGlObjectsProvider(createOpenGlObjects()))
|
||||
@ -152,8 +151,7 @@ public class RawAssetLoaderAndroidTest {
|
||||
@Test
|
||||
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
|
||||
throws Exception {
|
||||
Bitmap bitmap =
|
||||
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
|
||||
Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET.uri)).get();
|
||||
EGLContext currentContext = createOpenGlObjects();
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||
@ -199,8 +197,7 @@ public class RawAssetLoaderAndroidTest {
|
||||
@Test
|
||||
public void audioAndVideoTranscoding_withRawData_completesWithCorrectFrameCountAndDuration()
|
||||
throws Exception {
|
||||
Bitmap bitmap =
|
||||
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
|
||||
Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET.uri)).get();
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||
.setGlObjectsProvider(new DefaultGlObjectsProvider(createOpenGlObjects()))
|
||||
|
@ -18,19 +18,16 @@ package androidx.media3.transformer;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Util.isRunningOnEmulator;
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP3_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_SHORTER_AUDIO_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_180_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_270_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP3_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_SHORTER_AUDIO;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_180;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_270;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap;
|
||||
@ -133,16 +130,19 @@ public class TransformerEndToEndTest {
|
||||
public void compositionEditing_withThreeSequences_completes() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
EditedMediaItem audioVideoItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
ImmutableList.of(createSonic(/* pitch= */ 2f)),
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter())))
|
||||
.build();
|
||||
EditedMediaItem imageItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET.uri))
|
||||
.setDurationUs(1_500_000)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
@ -151,7 +151,7 @@ public class TransformerEndToEndTest {
|
||||
new EditedMediaItemSequence(audioVideoItem, imageItem, audioVideoItem);
|
||||
|
||||
EditedMediaItem.Builder audioBuilder =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING)).setRemoveVideo(true);
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri)).setRemoveVideo(true);
|
||||
|
||||
EditedMediaItemSequence audioSequence =
|
||||
new EditedMediaItemSequence(
|
||||
@ -200,9 +200,12 @@ public class TransformerEndToEndTest {
|
||||
public void compositionEditing_withLongLoopingSequence_completes() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
EditedMediaItem imageItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET.uri))
|
||||
.setDurationUs(500_000)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
@ -210,7 +213,7 @@ public class TransformerEndToEndTest {
|
||||
EditedMediaItemSequence imageSequence = new EditedMediaItemSequence(imageItem);
|
||||
|
||||
EditedMediaItem.Builder audioBuilder =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING)).setRemoveVideo(true);
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri)).setRemoveVideo(true);
|
||||
|
||||
EditedMediaItemSequence loopingAudioSequence =
|
||||
new EditedMediaItemSequence(
|
||||
@ -245,7 +248,7 @@ public class TransformerEndToEndTest {
|
||||
Effects effects = new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects);
|
||||
int expectedFrameCount = 40;
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET.uri))
|
||||
.setDurationUs(C.MICROS_PER_SECOND)
|
||||
.setFrameRate(expectedFrameCount)
|
||||
.setEffects(effects)
|
||||
@ -268,7 +271,7 @@ public class TransformerEndToEndTest {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
int expectedFrameCount = 40;
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET.uri))
|
||||
.setDurationUs(C.MICROS_PER_SECOND)
|
||||
.setFrameRate(expectedFrameCount)
|
||||
.build();
|
||||
@ -287,8 +290,7 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
|
||||
throws Exception {
|
||||
Bitmap bitmap =
|
||||
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
|
||||
Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET.uri)).get();
|
||||
int expectedFrameCount = 2;
|
||||
EGLContext currentContext = createOpenGlObjects();
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
@ -342,8 +344,7 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
|
||||
throws Exception {
|
||||
Bitmap bitmap =
|
||||
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
|
||||
Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET.uri)).get();
|
||||
int expectedFrameCount = 2;
|
||||
EGLContext currentContext = createOpenGlObjects();
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
@ -394,13 +395,16 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoEditing_completesWithConsistentFrameCount() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
|
||||
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
|
||||
Effects effects = new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects);
|
||||
EditedMediaItem editedMediaItem =
|
||||
@ -429,13 +433,13 @@ public class TransformerEndToEndTest {
|
||||
.build();
|
||||
|
||||
EditedMediaItem image1 =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET.uri))
|
||||
.setDurationUs(100_000)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
int image1FrameCount = 3;
|
||||
EditedMediaItem image2 =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ASSET.uri))
|
||||
.setDurationUs(200_000)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
@ -476,13 +480,16 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoEditing_effectsOverTime_completesWithConsistentFrameCount() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
|
||||
ImmutableList<Effect> videoEffects =
|
||||
ImmutableList.of(
|
||||
new TimestampWrapper(
|
||||
@ -512,13 +519,16 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoOnly_completesWithConsistentDuration() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
|
||||
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
|
||||
Effects effects = new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects);
|
||||
EditedMediaItem editedMediaItem =
|
||||
@ -539,14 +549,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
long clippingStartMs = 10_000;
|
||||
long clippingEndMs = 11_000;
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -570,14 +580,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
long clippingStartMs = 10_000;
|
||||
long clippingEndMs = 11_000;
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -609,8 +619,8 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
long clippingStartMs = 10_000;
|
||||
@ -619,7 +629,7 @@ public class TransformerEndToEndTest {
|
||||
// available to match the csd.
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -647,8 +657,8 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
long clippingStartMs = 10_000;
|
||||
@ -657,7 +667,7 @@ public class TransformerEndToEndTest {
|
||||
// fallback. This is because its csd doesn't match any known phone decoder.
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -693,15 +703,15 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
long clippingStartMs = 10_000;
|
||||
long clippingEndMs = 11_000;
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -729,14 +739,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
long clippingStartMs = 14_500;
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
.setUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartMs)
|
||||
@ -770,7 +780,7 @@ public class TransformerEndToEndTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -806,7 +816,7 @@ public class TransformerEndToEndTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_270_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_270.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -843,7 +853,7 @@ public class TransformerEndToEndTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_180_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_180.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -880,7 +890,7 @@ public class TransformerEndToEndTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -927,7 +937,7 @@ public class TransformerEndToEndTest {
|
||||
}
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -973,7 +983,7 @@ public class TransformerEndToEndTest {
|
||||
/* videoEffects= */ ImmutableList.of(speedEffect.second));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setEffects(effects)
|
||||
.build();
|
||||
ExportTestResult result =
|
||||
@ -1010,7 +1020,7 @@ public class TransformerEndToEndTest {
|
||||
/* videoEffects= */ ImmutableList.of(speedEffect.second));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING))
|
||||
MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri))
|
||||
.setEffects(effects)
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
@ -1033,13 +1043,16 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void videoEncoderFormatUnsupported_completesWithError() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new VideoUnsupportedEncoderFactory(context))
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
|
||||
@ -1062,14 +1075,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
ImmutableList<Effect> videoEffects =
|
||||
ImmutableList.of(new SpeedChangeEffect(1.5f), new SpeedChangeEffect(2f));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)))
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
|
||||
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
@ -1092,13 +1105,13 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
ImmutableList<Effect> videoEffects = ImmutableList.of(new SpeedChangeEffect(1.5f));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)))
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
|
||||
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
@ -1119,11 +1132,13 @@ public class TransformerEndToEndTest {
|
||||
public void audioVideoTranscodedFromDifferentSequences_producesExpectedResult() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
ImmutableList<AudioProcessor> audioProcessors = ImmutableList.of(createSonic(1.2f));
|
||||
ImmutableList<Effect> videoEffects = ImmutableList.of(RgbFilter.createGrayscaleFilter());
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem)
|
||||
.setEffects(new Effects(audioProcessors, videoEffects))
|
||||
@ -1166,15 +1181,17 @@ public class TransformerEndToEndTest {
|
||||
public void loopingTranscodedAudio_producesExpectedResult() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
EditedMediaItem audioEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET.uri)).build();
|
||||
EditedMediaItemSequence loopingAudioSequence =
|
||||
new EditedMediaItemSequence(
|
||||
ImmutableList.of(audioEditedMediaItem, audioEditedMediaItem), /* isLooping= */ true);
|
||||
EditedMediaItem videoEditedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
EditedMediaItemSequence videoSequence =
|
||||
@ -1206,16 +1223,17 @@ public class TransformerEndToEndTest {
|
||||
public void loopingTranscodedVideo_producesExpectedResult() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
EditedMediaItem audioEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET.uri)).build();
|
||||
EditedMediaItemSequence audioSequence =
|
||||
new EditedMediaItemSequence(
|
||||
audioEditedMediaItem, audioEditedMediaItem, audioEditedMediaItem);
|
||||
EditedMediaItem videoEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri)).setRemoveAudio(true).build();
|
||||
EditedMediaItemSequence loopingVideoSequence =
|
||||
new EditedMediaItemSequence(
|
||||
ImmutableList.of(videoEditedMediaItem, videoEditedMediaItem), /* isLooping= */ true);
|
||||
@ -1244,12 +1262,12 @@ public class TransformerEndToEndTest {
|
||||
public void loopingImage_producesExpectedResult() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem audioEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET.uri)).build();
|
||||
EditedMediaItemSequence audioSequence =
|
||||
new EditedMediaItemSequence(
|
||||
audioEditedMediaItem, audioEditedMediaItem, audioEditedMediaItem);
|
||||
EditedMediaItem imageEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET.uri))
|
||||
.setDurationUs(1_000_000)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
@ -1281,10 +1299,10 @@ public class TransformerEndToEndTest {
|
||||
public void loopingImage_loopingSequenceIsLongest_producesExpectedResult() throws Exception {
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem audioEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET.uri)).build();
|
||||
EditedMediaItemSequence audioSequence = new EditedMediaItemSequence(audioEditedMediaItem);
|
||||
EditedMediaItem imageEditedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET.uri))
|
||||
.setDurationUs(1_050_000)
|
||||
.setFrameRate(20)
|
||||
.build();
|
||||
@ -1317,7 +1335,7 @@ public class TransformerEndToEndTest {
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
ImmutableList.of(audioFormatTracker.createTeeAudioProcessor()),
|
||||
@ -1341,7 +1359,7 @@ public class TransformerEndToEndTest {
|
||||
channelMixingAudioProcessor.putChannelMixingMatrix(
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 1, /* outputChannelCount= */ 2));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setRemoveVideo(true)
|
||||
.setEffects(
|
||||
new Effects(
|
||||
@ -1360,7 +1378,7 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void audioComposition_noEffects_transmuxes() throws Exception {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setRemoveVideo(true)
|
||||
.build();
|
||||
Composition composition =
|
||||
@ -1378,7 +1396,7 @@ public class TransformerEndToEndTest {
|
||||
@Test
|
||||
public void audioComposition_compositionEffects_transcodes() throws Exception {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setRemoveVideo(true)
|
||||
.build();
|
||||
Composition composition =
|
||||
@ -1401,14 +1419,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer = ExperimentalAnalyzerModeFactory.buildAnalyzer(context);
|
||||
AtomicInteger audioBytesSeen = new AtomicInteger(/* initialValue= */ 0);
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)))
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
|
||||
.setRemoveVideo(true)
|
||||
.setEffects(
|
||||
new Effects(
|
||||
@ -1432,7 +1450,7 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer = ExperimentalAnalyzerModeFactory.buildAnalyzer(context);
|
||||
AtomicInteger videoFramesSeen = new AtomicInteger(/* initialValue= */ 0);
|
||||
@ -1441,7 +1459,7 @@ public class TransformerEndToEndTest {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)))
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
|
||||
.setRemoveAudio(true)
|
||||
.setEffects(
|
||||
new Effects(
|
||||
@ -1465,7 +1483,7 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer = ExperimentalAnalyzerModeFactory.buildAnalyzer(context);
|
||||
AtomicInteger audioBytesSeen = new AtomicInteger(/* initialValue= */ 0);
|
||||
@ -1475,7 +1493,7 @@ public class TransformerEndToEndTest {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)))
|
||||
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
ImmutableList.of(createByteCountingAudioProcessor(audioBytesSeen)),
|
||||
@ -1500,13 +1518,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_FORMAT
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET
|
||||
.videoFormat
|
||||
.buildUpon()
|
||||
.setSampleMimeType(MimeTypes.VIDEO_AV1)
|
||||
.setCodecs(null)
|
||||
.build());
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
|
||||
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_AV1).build();
|
||||
@ -1527,7 +1546,7 @@ public class TransformerEndToEndTest {
|
||||
|
||||
@Test
|
||||
public void transcode_withOutputAudioMimeTypeAac_completesSuccessfully() throws Exception {
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP3_ASSET_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP3_ASSET.uri));
|
||||
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).setAudioMimeType(MimeTypes.AUDIO_AAC).build();
|
||||
@ -1660,14 +1679,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat);
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_SHORTER_AUDIO_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_SHORTER_AUDIO.uri));
|
||||
|
||||
ExportTestResult exportTestResult =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
@ -1702,14 +1721,14 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat);
|
||||
assumeTrue(
|
||||
"Old SDKs have large audio encoder buffer, and hits deadlocks due to b/329087277.",
|
||||
Util.SDK_INT >= 31);
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_SHORTER_AUDIO_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_SHORTER_AUDIO.uri));
|
||||
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
|
||||
|
||||
Composition composition =
|
||||
@ -1748,8 +1767,8 @@ public class TransformerEndToEndTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_SHORTER_AUDIO.videoFormat);
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithStartTimes(
|
||||
@ -1761,7 +1780,7 @@ public class TransformerEndToEndTest {
|
||||
/* audioProcessors= */ ImmutableList.of(speedEffect.first),
|
||||
/* videoEffects= */ ImmutableList.of(speedEffect.second));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_WITH_SHORTER_AUDIO_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_WITH_SHORTER_AUDIO.uri))
|
||||
.setEffects(effects)
|
||||
.build();
|
||||
ExportTestResult result =
|
||||
|
@ -22,9 +22,8 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIX
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
@ -83,7 +82,10 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
@Test
|
||||
public void export_withTwoSequencesEachWithOneVideoMediaItem_succeeds() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
|
||||
Composition composition =
|
||||
createComposition(
|
||||
@ -93,15 +95,14 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
|
||||
/* firstSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemByClippingVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
/* effects= */ ImmutableList.of(
|
||||
new AlphaScale(0.5f),
|
||||
new ScaleAndRotateTransformation.Builder()
|
||||
.setRotationDegrees(180)
|
||||
.build()))),
|
||||
/* secondSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemByClippingVideo(
|
||||
MP4_ASSET_URI_STRING, /* effects= */ ImmutableList.of())),
|
||||
editedMediaItemByClippingVideo(MP4_ASSET.uri, /* effects= */ ImmutableList.of())),
|
||||
VideoCompositorSettings.DEFAULT);
|
||||
|
||||
ExportTestResult result =
|
||||
@ -117,7 +118,10 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
@Test
|
||||
public void export_withTwoSequencesOneWithVideoOneWithImage_succeeds() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
|
||||
Composition composition =
|
||||
createComposition(
|
||||
@ -127,15 +131,14 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
|
||||
/* firstSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemByClippingVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
/* effects= */ ImmutableList.of(
|
||||
new AlphaScale(0.5f),
|
||||
new ScaleAndRotateTransformation.Builder()
|
||||
.setRotationDegrees(180)
|
||||
.build()))),
|
||||
/* secondSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemOfOneFrameImage(
|
||||
JPG_ASSET_URI_STRING, /* effects= */ ImmutableList.of())),
|
||||
editedMediaItemOfOneFrameImage(JPG_ASSET.uri, /* effects= */ ImmutableList.of())),
|
||||
VideoCompositorSettings.DEFAULT);
|
||||
|
||||
ExportTestResult result =
|
||||
@ -151,7 +154,10 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
@Test
|
||||
public void export_withTwoSequencesWithVideoCompositorSettings_succeeds() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
|
||||
VideoCompositorSettings pictureInPictureVideoCompositorSettings =
|
||||
new VideoCompositorSettings() {
|
||||
@ -183,15 +189,14 @@ public final class TransformerMultiSequenceCompositionTest {
|
||||
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
|
||||
/* firstSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemByClippingVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
/* effects= */ ImmutableList.of(
|
||||
new AlphaScale(0.5f),
|
||||
new ScaleAndRotateTransformation.Builder()
|
||||
.setRotationDegrees(180)
|
||||
.build()))),
|
||||
/* secondSequenceMediaItems= */ ImmutableList.of(
|
||||
editedMediaItemByClippingVideo(
|
||||
MP4_ASSET_URI_STRING, /* effects= */ ImmutableList.of())),
|
||||
editedMediaItemByClippingVideo(MP4_ASSET.uri, /* effects= */ ImmutableList.of())),
|
||||
pictureInPictureVideoCompositorSettings);
|
||||
|
||||
ExportTestResult result =
|
||||
|
@ -15,8 +15,7 @@
|
||||
*/
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
@ -80,8 +79,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 0,
|
||||
@ -132,8 +131,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 0,
|
||||
@ -167,8 +166,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 0,
|
||||
@ -222,8 +221,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 2_000L,
|
||||
@ -274,8 +273,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 0,
|
||||
@ -329,8 +328,8 @@ public class TransformerPauseResumeTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
Composition composition =
|
||||
buildSingleSequenceComposition(
|
||||
/* clippingStartPositionMs= */ 0,
|
||||
@ -385,7 +384,7 @@ public class TransformerPauseResumeTest {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING)
|
||||
.setUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(clippingStartPositionMs)
|
||||
|
@ -17,7 +17,7 @@ package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
import static androidx.media3.common.util.Util.isRunningOnEmulator;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION;
|
||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
|
||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
|
||||
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
|
||||
@ -115,7 +115,7 @@ public class TransformerProgressTest {
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(AndroidTestUtil.MP4_ASSET_URI_STRING))
|
||||
MediaItem.fromUri(AndroidTestUtil.MP4_ASSET.uri))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
/* audioProcessors= */ ImmutableList.of(),
|
||||
@ -173,7 +173,7 @@ public class TransformerProgressTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
@ -253,7 +253,7 @@ public class TransformerProgressTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
|
@ -21,21 +21,16 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_SURFACE_TEXTURE_TRANSFORM_FIX;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MOV_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MOV_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD_VIDEO_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_LINES_1080P_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MOV_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD_VIDEO;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_LINES_1080P;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
@ -112,26 +107,29 @@ public final class TransformerSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withNoCompositionPresentationAndWithPerMediaItemEffects() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
OverlayEffect overlayEffect = createOverlayEffect();
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
Presentation.createForWidthAndHeight(
|
||||
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT)),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
oneFrameFromImage(
|
||||
JPG_ASSET_URI_STRING,
|
||||
JPG_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(72).build(),
|
||||
overlayEffect)),
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
|
||||
oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT),
|
||||
// Transition to a different aspect ratio.
|
||||
oneFrameFromImage(
|
||||
JPG_ASSET_URI_STRING,
|
||||
JPG_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
Presentation.createForWidthAndHeight(
|
||||
EXPORT_WIDTH / 2, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT),
|
||||
@ -151,17 +149,19 @@ public final class TransformerSequenceEffectTest {
|
||||
@Test
|
||||
public void export1080x720_withAllAvailableDecoders_doesNotStretchOutputOnAny() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_ASSET_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_ASSET.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
MP4_ASSET_URI_STRING, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
clippedVideo(MP4_ASSET.uri, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
@ -188,20 +188,18 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_PORTRAIT_ASSET_FORMAT,
|
||||
/* outputFormat= */ MP4_PORTRAIT_ASSET_FORMAT);
|
||||
/* inputFormat= */ MP4_PORTRAIT_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_PORTRAIT_ASSET.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_PORTRAIT_ASSET_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_PORTRAIT_ASSET.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
MP4_PORTRAIT_ASSET.uri, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
@ -228,20 +226,18 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_MOV_ASSET_FORMAT,
|
||||
/* outputFormat= */ BT601_MOV_ASSET_FORMAT);
|
||||
/* inputFormat= */ BT601_MOV_ASSET.videoFormat,
|
||||
/* outputFormat= */ BT601_MOV_ASSET.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(BT601_MOV_ASSET_FORMAT.sampleMimeType),
|
||||
checkNotNull(BT601_MOV_ASSET.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
BT601_MOV_ASSET.uri, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
@ -269,20 +265,18 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_AV1_VIDEO.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_AV1_VIDEO.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_ASSET_AV1_VIDEO_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_ASSET_AV1_VIDEO.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
MP4_ASSET_AV1_VIDEO_URI_STRING,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
MP4_ASSET_AV1_VIDEO.uri, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
@ -309,11 +303,11 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_CHECKERBOARD_VIDEO.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_ASSET_CHECKERBOARD_VIDEO.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
@ -321,7 +315,7 @@ public final class TransformerSequenceEffectTest {
|
||||
Presentation.createForWidthAndHeight(
|
||||
/* width= */ 320, /* height= */ 240, Presentation.LAYOUT_SCALE_TO_FIT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_CHECKERBOARD_VIDEO_URI_STRING,
|
||||
MP4_ASSET_CHECKERBOARD_VIDEO.uri,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
DebugTraceUtil.enableTracing = true;
|
||||
@ -354,12 +348,12 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_LINES_1080P_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_LINES_1080P.uri))
|
||||
.setFrameRate(30)
|
||||
.setDurationUs(C.MICROS_PER_SECOND / 4)
|
||||
.build());
|
||||
@ -410,7 +404,8 @@ public final class TransformerSequenceEffectTest {
|
||||
int exportWidth = 640;
|
||||
int exportHeight = 240;
|
||||
Format outputFormat =
|
||||
MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT
|
||||
MP4_ASSET_WITH_INCREASING_TIMESTAMPS
|
||||
.videoFormat
|
||||
.buildUpon()
|
||||
.setWidth(exportWidth)
|
||||
.setHeight(exportHeight)
|
||||
@ -418,12 +413,12 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
outputFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_LINES_1080P_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_LINES_1080P.uri))
|
||||
.setFrameRate(30)
|
||||
.setDurationUs(C.MICROS_PER_SECOND / 4)
|
||||
.setEffects(
|
||||
@ -472,24 +467,27 @@ public final class TransformerSequenceEffectTest {
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "vivo 1820")));
|
||||
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForWidthAndHeight(
|
||||
EXPORT_WIDTH, /* height= */ EXPORT_WIDTH, Presentation.LAYOUT_SCALE_TO_FIT),
|
||||
oneFrameFromImage(
|
||||
JPG_ASSET_URI_STRING,
|
||||
JPG_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(90).build(),
|
||||
Presentation.createForWidthAndHeight(
|
||||
EXPORT_WIDTH, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT))),
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
|
||||
oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_ASSET_URI_STRING,
|
||||
MP4_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
Presentation.createForWidthAndHeight(
|
||||
EXPORT_WIDTH / 2, EXPORT_HEIGHT, Presentation.LAYOUT_SCALE_TO_FIT),
|
||||
@ -509,15 +507,17 @@ public final class TransformerSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withCompositionPresentationAndNoVideoEffects() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
oneFrameFromImage(JPG_PORTRAIT_ASSET_URI_STRING, NO_EFFECT));
|
||||
oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT),
|
||||
clippedVideo(MP4_PORTRAIT_ASSET.uri, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(MP4_ASSET.uri, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
oneFrameFromImage(JPG_PORTRAIT_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, getLinearColorSpaceTransformer())
|
||||
@ -533,13 +533,16 @@ public final class TransformerSequenceEffectTest {
|
||||
public void export_withCompositionPresentationAndNoVideoEffectsForFirstMediaItem()
|
||||
throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(MP4_ASSET.uri, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING,
|
||||
MP4_PORTRAIT_ASSET.uri,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
|
||||
@ -556,17 +559,20 @@ public final class TransformerSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withBt601AndBt709MediaItems() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET.videoFormat, /* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
BT601_MOV_ASSET.uri,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
clippedVideo(MP4_ASSET.uri, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, getLinearColorSpaceTransformer())
|
||||
@ -581,17 +587,20 @@ public final class TransformerSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withBt601VideoAndBt709ImageMediaItems() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET.videoFormat, /* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
BT601_MOV_ASSET.uri,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT));
|
||||
oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, getLinearColorSpaceTransformer())
|
||||
|
@ -17,8 +17,8 @@
|
||||
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ULTRA_HDR_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assertSdrColors;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
|
||||
@ -85,7 +85,7 @@ public final class TransformerUltraHdrTest {
|
||||
/* outputFormat= */ DOWNSCALED_ULTRA_HDR_FORMAT);
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false, oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
/* tonemap= */ false, oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
ExportTestResult result =
|
||||
@ -105,7 +105,7 @@ public final class TransformerUltraHdrTest {
|
||||
/* outputFormat= */ DOWNSCALED_ULTRA_HDR_FORMAT);
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ true, oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
/* tonemap= */ true, oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
ExportTestResult result =
|
||||
@ -125,7 +125,7 @@ public final class TransformerUltraHdrTest {
|
||||
/* outputFormat= */ DOWNSCALED_ULTRA_HDR_FORMAT);
|
||||
Composition composition =
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT)))
|
||||
new EditedMediaItemSequence(oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT)))
|
||||
.build();
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
@ -141,7 +141,7 @@ public final class TransformerUltraHdrTest {
|
||||
public void exportNonUltraHdrImage_withUltraHdrEnabled_exportsSdr() throws Exception {
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false, oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT));
|
||||
/* tonemap= */ false, oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
@ -156,8 +156,8 @@ public final class TransformerUltraHdrTest {
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false,
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT),
|
||||
oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
oneFrameFromImage(JPG_ASSET.uri, NO_EFFECT),
|
||||
oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
|
@ -16,7 +16,7 @@
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
@ -61,7 +61,10 @@ public class TransformerWithInAppMuxerEndToEndAndroidTest {
|
||||
// signal a lack of support for H265_MP4's actual format, but pass this test when using
|
||||
// MP4_ASSET_FORMAT for skipping.
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setMuxerFactory(new InAppMuxer.Factory.Builder().build())
|
||||
|
@ -17,9 +17,8 @@
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.util.Util.usToMs;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DURATION_US;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_SINGLE_PIXEL_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.app.Instrumentation;
|
||||
@ -89,7 +88,7 @@ public class VideoTimestampConsistencyTest {
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
@ -102,8 +101,8 @@ public class VideoTimestampConsistencyTest {
|
||||
@Test
|
||||
public void oneVideoComposition_timestampsAreConsistent() throws Exception {
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
compareTimestamps(ImmutableList.of(video), MP4_ASSET_FRAME_TIMESTAMPS_US);
|
||||
@ -115,19 +114,19 @@ public class VideoTimestampConsistencyTest {
|
||||
long clippedStartUs = 500_000L;
|
||||
EditedMediaItem video1 =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(MP4_ASSET_URI_STRING)
|
||||
MediaItem.fromUri(MP4_ASSET.uri)
|
||||
.buildUpon()
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(usToMs(clippedStartUs))
|
||||
.build())
|
||||
.build())
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
EditedMediaItem video2 =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> expectedTimestamps =
|
||||
@ -136,7 +135,7 @@ public class VideoTimestampConsistencyTest {
|
||||
.addAll(
|
||||
Lists.transform(
|
||||
MP4_ASSET_FRAME_TIMESTAMPS_US,
|
||||
timestampUs -> ((MP4_ASSET_DURATION_US - clippedStartUs) + timestampUs)))
|
||||
timestampUs -> ((MP4_ASSET.videoDurationUs - clippedStartUs) + timestampUs)))
|
||||
.build();
|
||||
|
||||
compareTimestamps(ImmutableList.of(video1, video2), expectedTimestamps);
|
||||
@ -145,13 +144,13 @@ public class VideoTimestampConsistencyTest {
|
||||
@Test
|
||||
public void twoVideosComposition_timestampsAreConsistent() throws Exception {
|
||||
EditedMediaItem video1 =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
EditedMediaItem video2 =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> expectedTimestamps =
|
||||
@ -160,7 +159,7 @@ public class VideoTimestampConsistencyTest {
|
||||
.addAll(
|
||||
Lists.transform(
|
||||
MP4_ASSET_FRAME_TIMESTAMPS_US,
|
||||
timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
compareTimestamps(ImmutableList.of(video1, video2), expectedTimestamps);
|
||||
@ -173,7 +172,7 @@ public class VideoTimestampConsistencyTest {
|
||||
EditedMediaItem image1 =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
@ -182,7 +181,7 @@ public class VideoTimestampConsistencyTest {
|
||||
EditedMediaItem image2 =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
@ -209,15 +208,15 @@ public class VideoTimestampConsistencyTest {
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
.setFrameRate(30)
|
||||
.build();
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> expectedTimestamps =
|
||||
@ -236,13 +235,13 @@ public class VideoTimestampConsistencyTest {
|
||||
long imageDurationUs = 500_000L;
|
||||
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
@ -255,7 +254,7 @@ public class VideoTimestampConsistencyTest {
|
||||
.addAll(
|
||||
Lists.transform(
|
||||
IMAGE_TIMESTAMPS_US_500_MS_30_FPS,
|
||||
timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
compareTimestamps(ImmutableList.of(video, image), expectedTimestamps);
|
||||
@ -268,19 +267,19 @@ public class VideoTimestampConsistencyTest {
|
||||
|
||||
EditedMediaItem video =
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(MP4_ASSET_URI_STRING)
|
||||
MediaItem.fromUri(MP4_ASSET.uri)
|
||||
.buildUpon()
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(usToMs(clippedStartUs))
|
||||
.build())
|
||||
.build())
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.build();
|
||||
EditedMediaItem image =
|
||||
new EditedMediaItem.Builder(
|
||||
new MediaItem.Builder()
|
||||
.setUri(JPG_SINGLE_PIXEL_URI_STRING)
|
||||
.setUri(JPG_SINGLE_PIXEL_ASSET.uri)
|
||||
.setImageDurationMs(usToMs(imageDurationUs))
|
||||
.build())
|
||||
.setDurationUs(imageDurationUs)
|
||||
@ -293,7 +292,7 @@ public class VideoTimestampConsistencyTest {
|
||||
.addAll(
|
||||
Lists.transform(
|
||||
IMAGE_TIMESTAMPS_US_500_MS_30_FPS,
|
||||
timestampUs -> ((MP4_ASSET_DURATION_US - clippedStartUs) + timestampUs)))
|
||||
timestampUs -> ((MP4_ASSET.videoDurationUs - clippedStartUs) + timestampUs)))
|
||||
.build();
|
||||
|
||||
compareTimestamps(ImmutableList.of(video, image), expectedTimestamps);
|
||||
|
@ -23,9 +23,9 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePix
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.test.utils.TestUtil.assertBitmapsAreSimilar;
|
||||
import static androidx.media3.test.utils.VideoFrameProcessorTestRunner.VIDEO_FRAME_PROCESSING_WAIT_MS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD;
|
||||
@ -152,7 +152,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
@ -172,7 +172,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
|
||||
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
|
||||
@ -200,7 +200,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
videoFrameProcessorTestRunner =
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
@ -218,7 +218,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
|
||||
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
|
||||
@ -242,7 +242,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
assumeFormatsSupported(
|
||||
getApplicationContext(),
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
|
||||
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
|
||||
@ -269,7 +269,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Ignore("TODO: b/344529901 - enable this test when fixed.")
|
||||
public void ultraHdrBitmapAndTextOverlay_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -322,7 +322,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void ultraHdrBitmapOverlay_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -369,7 +369,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void ultraHdrBitmapOverlay_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -404,7 +404,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void textOverlay_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -450,7 +450,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -475,7 +475,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hlg10InputAndHdr10Output_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format inputFormat = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format inputFormat = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
Format outputFormat =
|
||||
inputFormat
|
||||
.buildUpon()
|
||||
@ -509,7 +509,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hlg10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -537,7 +537,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
|
||||
@Test
|
||||
public void noEffects_hlg10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
@ -571,7 +571,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -596,7 +596,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hdr10InputAndHlg10Output_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format inputFormat = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format inputFormat = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
Format outputFormat =
|
||||
inputFormat
|
||||
.buildUpon()
|
||||
@ -630,7 +630,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noEffects_hdr10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -658,7 +658,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
|
||||
@Test
|
||||
public void noEffects_hdr10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
@ -692,7 +692,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noOpEffect_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -718,7 +718,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noOpEffect_hlg10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -746,7 +746,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hlg10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
@ -781,7 +781,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noOpEffect_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -807,7 +807,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
@Test
|
||||
public void noOpEffect_hdr10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
@ -835,7 +835,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hdr10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
|
@ -17,18 +17,14 @@ package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_4K60_PORTRAIT_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_4K60_PORTRAIT_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_8K24_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_8K24_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_4K60_PORTRAIT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_8K24;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_BT2020_SDR;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_BT2020_SDR_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_SEF_H265_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_SEF_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_PIXEL_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_SEF;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_SEF_H265;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_PIXEL;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.ExportResult.CONVERSION_PROCESS_TRANSMUXED_AND_TRANSCODED;
|
||||
@ -92,14 +88,13 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
boolean skipCalculateSsim =
|
||||
(Util.SDK_INT < 33 && (Util.MODEL.equals("SM-F711U1") || Util.MODEL.equals("SM-F926U1")))
|
||||
|| (Util.SDK_INT == 33 && Util.MODEL.equals("LE2121"));
|
||||
@ -117,8 +112,7 @@ public class ExportTest {
|
||||
public void exportWithoutDecodeEncode() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
// No need to calculate SSIM because no decode/encoding, so input frames match output frames.
|
||||
|
||||
ExportTestResult result =
|
||||
@ -135,7 +129,7 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
@ -146,8 +140,7 @@ public class ExportTest {
|
||||
new VideoEncoderSettings.Builder().setBitrate(5_000_000).build())
|
||||
.build()))
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveAudio(true).build();
|
||||
boolean skipCalculateSsim =
|
||||
@ -169,7 +162,7 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_4K60_PORTRAIT_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_4K60_PORTRAIT.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
// Reference: b/262710361
|
||||
assumeFalse(
|
||||
@ -179,7 +172,7 @@ public class ExportTest {
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_4K60_PORTRAIT_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_4K60_PORTRAIT.uri));
|
||||
boolean skipCalculateSsim = Util.SDK_INT < 30 && Util.DEVICE.equals("joyeuse");
|
||||
|
||||
ExportTestResult result =
|
||||
@ -203,12 +196,12 @@ public class ExportTest {
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "le2121"));
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_8K24_FORMAT, /* outputFormat= */ null);
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_8K24.videoFormat, /* outputFormat= */ null);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_8K24_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_8K24.uri));
|
||||
// TODO: b/281824052 - have requestCalculateSsim always be true after linked bug is fixed.
|
||||
boolean requestCalculateSsim = !Util.MODEL.equals("SM-G991B");
|
||||
|
||||
@ -231,7 +224,7 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_8K24_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_8K24.videoFormat,
|
||||
/* outputFormat= */ new Format.Builder()
|
||||
.setSampleMimeType(MimeTypes.VIDEO_H264)
|
||||
.setWidth(downscaledWidth)
|
||||
@ -244,7 +237,7 @@ public class ExportTest {
|
||||
.build()
|
||||
.run(
|
||||
testId,
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_8K24_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_8K24.uri)))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
/* audioProcessors= */ ImmutableList.of(),
|
||||
@ -264,14 +257,13 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveAudio(true).build();
|
||||
boolean skipCalculateSsim =
|
||||
@ -295,7 +287,7 @@ public class ExportTest {
|
||||
.setEncoderFactory(new ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET.uri)))
|
||||
.setRemoveVideo(true)
|
||||
.build();
|
||||
|
||||
@ -318,7 +310,7 @@ public class ExportTest {
|
||||
}
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_SEF_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_SEF.uri)))
|
||||
.setFlattenForSlowMotion(true)
|
||||
.build();
|
||||
|
||||
@ -343,7 +335,7 @@ public class ExportTest {
|
||||
}
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_SEF_H265_URI_STRING)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_SEF_H265.uri)))
|
||||
.setFlattenForSlowMotion(true)
|
||||
.build();
|
||||
|
||||
@ -361,11 +353,10 @@ public class ExportTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
ImmutableList<Effect> videoEffects =
|
||||
ImmutableList.of(new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build());
|
||||
Effects effects = new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects);
|
||||
@ -390,9 +381,12 @@ public class ExportTest {
|
||||
throw new AssumptionViolatedException(reason);
|
||||
}
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_BT2020_SDR_FORMAT, /* outputFormat= */ null);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_BT2020_SDR.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_BT2020_SDR));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_BT2020_SDR.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -420,7 +414,7 @@ public class ExportTest {
|
||||
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
|
||||
MediaItem mediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_PIXEL_URI_STRING)
|
||||
.setUri(MP4_TRIM_OPTIMIZATION_PIXEL.uri)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(500)
|
||||
|
@ -18,9 +18,7 @@ package androidx.media3.transformer.mh;
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
@ -74,7 +72,8 @@ public class ForceInterpretHdrVideoAsSdrTest {
|
||||
|
||||
// Force interpret HDR as SDR signals SDR input to the decoder, even if the actual input is HDR.
|
||||
Format decoderInputFormat =
|
||||
MP4_ASSET_720P_4_SECOND_HDR10_FORMAT
|
||||
MP4_ASSET_720P_4_SECOND_HDR10
|
||||
.videoFormat
|
||||
.buildUpon()
|
||||
.setColorInfo(ColorInfo.SDR_BT709_LIMITED)
|
||||
.build();
|
||||
@ -82,7 +81,7 @@ public class ForceInterpretHdrVideoAsSdrTest {
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri)))
|
||||
.build();
|
||||
Composition composition =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
|
||||
@ -114,7 +113,8 @@ public class ForceInterpretHdrVideoAsSdrTest {
|
||||
|
||||
// Force interpret HDR as SDR signals SDR input to the decoder, even if the actual input is HDR.
|
||||
Format decoderInputFormat =
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10
|
||||
.videoFormat
|
||||
.buildUpon()
|
||||
.setColorInfo(ColorInfo.SDR_BT709_LIMITED)
|
||||
.build();
|
||||
@ -122,7 +122,8 @@ public class ForceInterpretHdrVideoAsSdrTest {
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10)))
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri)))
|
||||
.build();
|
||||
Composition composition =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
|
||||
|
@ -19,13 +19,9 @@ import static androidx.media3.effect.DefaultVideoFrameProcessor.WORKING_COLOR_SP
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
|
||||
@ -92,11 +88,11 @@ public final class HdrEditingTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri));
|
||||
|
||||
ExportTestResult exportTestResult =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
@ -123,11 +119,11 @@ public final class HdrEditingTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri));
|
||||
|
||||
ExportTestResult exportTestResult =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
@ -144,13 +140,13 @@ public final class HdrEditingTest {
|
||||
@Test
|
||||
public void exportAndTranscode_hdr10File_whenHdrEditingIsSupported() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -169,13 +165,13 @@ public final class HdrEditingTest {
|
||||
@Test
|
||||
public void exportAndTranscode_hlg10File_whenHdrEditingIsSupported() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -194,13 +190,13 @@ public final class HdrEditingTest {
|
||||
@Test
|
||||
public void exportAndTranscode_dolbyVisionFile_whenHdrEditingIsSupported() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_DOLBY_VISION_HDR_FORMAT;
|
||||
Format format = MP4_ASSET_DOLBY_VISION_HDR.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -221,7 +217,7 @@ public final class HdrEditingTest {
|
||||
exportAndTranscode_av1FileWithAv1HdrEditingUnsupportedAndHevcHdrEditingSupported_fallsBackToH265()
|
||||
throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_AV1_2_SECOND_HDR10.videoFormat;
|
||||
if (EncoderUtil.getSupportedEncodersForHdrEditing(MimeTypes.VIDEO_H265, format.colorInfo)
|
||||
.isEmpty()) {
|
||||
String skipReason = "No H265 HDR editing support for " + format.colorInfo;
|
||||
@ -242,7 +238,7 @@ public final class HdrEditingTest {
|
||||
/* outputFormat= */ format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_H265).build());
|
||||
|
||||
Transformer transformer = new Transformer.Builder(context).build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_AV1_2_SECOND_HDR10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_AV1_2_SECOND_HDR10.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -260,7 +256,7 @@ public final class HdrEditingTest {
|
||||
public void exportAndTranscodeHdr_ignoringSdrWorkingColorSpace_whenHdrEditingIsSupported()
|
||||
throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceSupportsHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
|
||||
@ -273,7 +269,8 @@ public final class HdrEditingTest {
|
||||
.build())
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10)))
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri)))
|
||||
.setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS)
|
||||
.build();
|
||||
|
||||
@ -293,7 +290,7 @@ public final class HdrEditingTest {
|
||||
public void exportAndTranscode_hdr10File_whenHdrEditingUnsupported_toneMapsOrThrows()
|
||||
throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
|
||||
assumeDeviceDoesNotSupportHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -317,7 +314,7 @@ public final class HdrEditingTest {
|
||||
}
|
||||
})
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -352,7 +349,7 @@ public final class HdrEditingTest {
|
||||
public void exportAndTranscode_hlg10File_whenHdrEditingUnsupported_toneMapsOrThrows()
|
||||
throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat;
|
||||
assumeDeviceDoesNotSupportHdrEditing(testId, format);
|
||||
|
||||
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||
@ -374,7 +371,7 @@ public final class HdrEditingTest {
|
||||
}
|
||||
})
|
||||
.build();
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
|
@ -18,9 +18,7 @@ package androidx.media3.transformer.mh;
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -67,7 +65,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer =
|
||||
@ -87,7 +85,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
})
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri)))
|
||||
.build();
|
||||
Composition composition =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
|
||||
@ -127,7 +125,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer =
|
||||
@ -147,7 +145,8 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
})
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10)))
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri)))
|
||||
.build();
|
||||
Composition composition =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
|
||||
@ -187,7 +186,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer =
|
||||
@ -207,7 +206,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
})
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10.uri)))
|
||||
.setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS)
|
||||
.build();
|
||||
Composition composition =
|
||||
@ -248,7 +247,7 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
Transformer transformer =
|
||||
@ -268,7 +267,8 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
|
||||
})
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10)))
|
||||
new EditedMediaItem.Builder(
|
||||
MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri)))
|
||||
.setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS)
|
||||
.build();
|
||||
Composition composition =
|
||||
|
@ -17,11 +17,8 @@ package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR_FORMAT;
|
||||
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -61,25 +58,25 @@ public class ToneMapHdrToSdrUsingOpenGlTest {
|
||||
@Test
|
||||
public void export_toneMap_hlg10File_toneMaps() throws Exception {
|
||||
assumeDeviceSupportsOpenGlToneMapping(
|
||||
testId, /* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
testId, /* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_1080P_5_SECOND_HLG10);
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_1080P_5_SECOND_HLG10.uri);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void export_toneMap_hdr10File_toneMaps() throws Exception {
|
||||
assumeDeviceSupportsOpenGlToneMapping(
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_720P_4_SECOND_HDR10);
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_720P_4_SECOND_HDR10.uri);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void export_toneMap_dolbyVisionFile_toneMaps() throws Exception {
|
||||
assumeDeviceSupportsOpenGlToneMapping(
|
||||
testId, /* inputFormat= */ MP4_ASSET_DOLBY_VISION_HDR_FORMAT);
|
||||
testId, /* inputFormat= */ MP4_ASSET_DOLBY_VISION_HDR.videoFormat);
|
||||
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_DOLBY_VISION_HDR);
|
||||
runTransformerWithOpenGlToneMapping(testId, MP4_ASSET_DOLBY_VISION_HDR.uri);
|
||||
}
|
||||
|
||||
private void runTransformerWithOpenGlToneMapping(String testId, String fileUri) throws Exception {
|
||||
|
@ -60,8 +60,8 @@ public final class TranscodeQualityTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT);
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat);
|
||||
// Skip on specific pre-API 34 devices where calculating SSIM fails.
|
||||
assumeFalse(
|
||||
(Util.SDK_INT < 33 && (Util.MODEL.equals("SM-F711U1") || Util.MODEL.equals("SM-F926U1")))
|
||||
@ -80,8 +80,7 @@ public final class TranscodeQualityTest {
|
||||
.build())
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveAudio(true).build();
|
||||
|
||||
@ -103,8 +102,9 @@ public final class TranscodeQualityTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS
|
||||
.videoFormat
|
||||
.buildUpon()
|
||||
.setSampleMimeType(MimeTypes.VIDEO_H265)
|
||||
.build());
|
||||
@ -114,8 +114,7 @@ public final class TranscodeQualityTest {
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H265).build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING));
|
||||
MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveAudio(true).build();
|
||||
|
||||
@ -145,8 +144,7 @@ public final class TranscodeQualityTest {
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(
|
||||
Uri.parse(
|
||||
AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING));
|
||||
Uri.parse(AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveAudio(true).build();
|
||||
|
||||
|
@ -16,8 +16,8 @@
|
||||
package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.common.MimeTypes.VIDEO_H264;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ULTRA_HDR_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -63,15 +63,15 @@ public class TranscodeSpeedTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* outputFormat= */ AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT);
|
||||
/* inputFormat= */ MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
/* outputFormat= */ MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setVideoMimeType(MimeTypes.VIDEO_H264)
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(context))
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING))
|
||||
MediaItem.fromUri(Uri.parse(MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS.uri))
|
||||
.buildUpon()
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(15_000).build())
|
||||
@ -101,7 +101,7 @@ public class TranscodeSpeedTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* inputFormat= */ MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS.videoFormat,
|
||||
outputFormat);
|
||||
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
|
||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||
@ -126,7 +126,7 @@ public class TranscodeSpeedTest {
|
||||
// This test uses ULTRA_HDR_URI_STRING because it's high resolution.
|
||||
// Ultra HDR gainmap is ignored.
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(ULTRA_HDR_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(JPG_ULTRA_HDR_ASSET.uri))
|
||||
.setFrameRate(30)
|
||||
.setDurationUs(isHighPerformance ? 45_000_000 : 15_000_000)
|
||||
.setEffects(
|
||||
|
@ -19,12 +19,9 @@ package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
@ -89,17 +86,17 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withSdrThenHdr() throws Exception {
|
||||
assumeDeviceSupportsOpenGlToneMapping(
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING,
|
||||
MP4_PORTRAIT_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
new Crop(/* left= */ -1, /* right= */ 0, /* bottom= */ -1, /* top= */ 0)),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -122,22 +119,22 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
*/
|
||||
@Test
|
||||
public void export_withHdrThenSdr_whenHdrEditingSupported_throws() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING,
|
||||
MP4_PORTRAIT_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
new Crop(/* left= */ -1, /* right= */ 0, /* bottom= */ -1, /* top= */ 0)),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -161,24 +158,24 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
*/
|
||||
@Test
|
||||
public void export_withHdrThenSdr_whenHdrEditingUnsupported() throws Exception {
|
||||
assumeDeviceDoesNotSupportHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceDoesNotSupportHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeDeviceSupportsOpenGlToneMapping(
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_PORTRAIT_ASSET_URI_STRING,
|
||||
MP4_PORTRAIT_ASSET.uri,
|
||||
ImmutableList.of(
|
||||
new Crop(/* left= */ -1, /* right= */ 0, /* bottom= */ -1, /* top= */ 0)),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -198,22 +195,22 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withHdr10ThenHdr10_whenHdrEditingSupported_producesExpectedFrame()
|
||||
throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -232,28 +229,28 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
@Test
|
||||
public void export_withHlg10ThenHdr10_whenHdrEditingSupported_producesExpectedFrame()
|
||||
throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10,
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(
|
||||
MP4_ASSET_720P_4_SECOND_HDR10,
|
||||
MP4_ASSET_720P_4_SECOND_HDR10.uri,
|
||||
ImmutableList.of(
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()),
|
||||
/* endPositionMs= */ SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -273,22 +270,22 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
@Test
|
||||
public void export1920x1080Hlg_withAllAvailableDecoders_doesNotStretchOutputOnAny()
|
||||
throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10,
|
||||
MP4_ASSET_1080P_5_SECOND_HLG10.uri,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
@ -312,21 +309,21 @@ public final class TransformerHdrSequenceEffectTest {
|
||||
@Test
|
||||
public void export720x1280Av1Hdr10_withAllAvailableDecoders_doesNotStretchOutputOnAny()
|
||||
throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_AV1_2_SECOND_HDR10.videoFormat);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT);
|
||||
/* inputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10.videoFormat,
|
||||
/* outputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10.videoFormat);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT.sampleMimeType),
|
||||
checkNotNull(MP4_ASSET_AV1_2_SECOND_HDR10.videoFormat.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(MP4_ASSET_AV1_2_SECOND_HDR10, NO_EFFECT, C.MILLIS_PER_SECOND / 4));
|
||||
clippedVideo(MP4_ASSET_AV1_2_SECOND_HDR10.uri, NO_EFFECT, C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
|
@ -18,9 +18,8 @@
|
||||
package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ULTRA_HDR_ASSET;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assertSdrColors;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
|
||||
@ -76,7 +75,7 @@ public final class TransformerMhUltraHdrTest {
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false, oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
/* tonemap= */ false, oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
@ -95,8 +94,8 @@ public final class TransformerMhUltraHdrTest {
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10, NO_EFFECT, ONE_FRAME_END_POSITION_MS),
|
||||
oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10.uri, NO_EFFECT, ONE_FRAME_END_POSITION_MS),
|
||||
oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
@ -115,8 +114,8 @@ public final class TransformerMhUltraHdrTest {
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false,
|
||||
oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT),
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10, NO_EFFECT, ONE_FRAME_END_POSITION_MS));
|
||||
oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT),
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10.uri, NO_EFFECT, ONE_FRAME_END_POSITION_MS));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
@ -131,12 +130,12 @@ public final class TransformerMhUltraHdrTest {
|
||||
|
||||
@Test
|
||||
public void exportTonemappedHdrVideoThenUltraHdrImage_exportsSdr() throws Exception {
|
||||
assumeDeviceSupportsOpenGlToneMapping(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsOpenGlToneMapping(testId, MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ true,
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10, NO_EFFECT, ONE_FRAME_END_POSITION_MS),
|
||||
oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
clippedVideo(MP4_ASSET_1080P_5_SECOND_HLG10.uri, NO_EFFECT, ONE_FRAME_END_POSITION_MS),
|
||||
oneFrameFromImage(JPG_ULTRA_HDR_ASSET.uri, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
@ -163,6 +162,6 @@ public final class TransformerMhUltraHdrTest {
|
||||
getApplicationContext(), testId, "Ultra HDR is not supported on this API level.");
|
||||
throw new AssumptionViolatedException("Ultra HDR is not supported on this API level.");
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat);
|
||||
}
|
||||
}
|
||||
|
@ -15,8 +15,7 @@
|
||||
*/
|
||||
package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -57,13 +56,16 @@ public class TransformerWithInAppMuxerEndToEndMhTest {
|
||||
public void videoEditing_forAv1Video_completesSuccessfully() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT, /* outputFormat= */ null);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_AV1_VIDEO.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setMuxerFactory(new InAppMuxer.Factory.Builder().build())
|
||||
.build();
|
||||
ImmutableList<Effect> videoEffects = ImmutableList.of(RgbFilter.createGrayscaleFilter());
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_AV1_VIDEO_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_AV1_VIDEO.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem)
|
||||
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
|
||||
|
@ -41,6 +41,7 @@ import android.net.Uri;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.util.Assertions;
|
||||
import androidx.media3.transformer.AndroidTestUtil;
|
||||
import androidx.media3.transformer.AndroidTestUtil.AssetInfo;
|
||||
import androidx.media3.transformer.DefaultEncoderFactory;
|
||||
import androidx.media3.transformer.EditedMediaItem;
|
||||
import androidx.media3.transformer.Transformer;
|
||||
@ -68,7 +69,7 @@ import org.junit.runners.Parameterized.Parameters;
|
||||
"Analysis tests are not used for confirming Transformer is running properly, and not configured"
|
||||
+ " for this use as they're missing skip checks for unsupported devices.")
|
||||
public class BitrateAnalysisTest {
|
||||
private static final ImmutableList<String> INPUT_FILES =
|
||||
private static final ImmutableList<AssetInfo> INPUT_ASSETS =
|
||||
ImmutableList.of(
|
||||
MP4_REMOTE_640W_480H_31_SECOND_ROOF_SONYXPERIAXZ3,
|
||||
MP4_REMOTE_1280W_720H_5_SECOND_HIGHMOTION,
|
||||
@ -102,15 +103,15 @@ public class BitrateAnalysisTest {
|
||||
public int bitrateMode;
|
||||
|
||||
@Parameter(2)
|
||||
public @MonotonicNonNull String fileUri;
|
||||
public @MonotonicNonNull AssetInfo assetInfo;
|
||||
|
||||
@Parameters(name = "analyzeBitrate_{0}_{1}_{2}")
|
||||
public static List<Object[]> parameters() {
|
||||
List<Object[]> parameterList = new ArrayList<>();
|
||||
for (int bitrate = START_BITRATE; bitrate <= END_BITRATE; bitrate += BITRATE_INTERVAL) {
|
||||
for (int mode : INPUT_BITRATE_MODES) {
|
||||
for (String file : INPUT_FILES) {
|
||||
parameterList.add(new Object[] {bitrate, mode, file});
|
||||
for (AssetInfo assetInfo : INPUT_ASSETS) {
|
||||
parameterList.add(new Object[] {bitrate, mode, assetInfo});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,8 +121,9 @@ public class BitrateAnalysisTest {
|
||||
|
||||
@Test
|
||||
public void analyzeBitrate() throws Exception {
|
||||
Assertions.checkNotNull(fileUri);
|
||||
String fileName = Assertions.checkNotNull(Iterables.getLast(Splitter.on("/").split(fileUri)));
|
||||
Assertions.checkNotNull(assetInfo);
|
||||
String fileName =
|
||||
Assertions.checkNotNull(Iterables.getLast(Splitter.on("/").split(assetInfo.uri)));
|
||||
String testId = String.format("analyzeBitrate_ssim_%s_%d_%s", bitrate, bitrateMode, fileName);
|
||||
|
||||
Map<String, Object> inputValues = new HashMap<>();
|
||||
@ -137,11 +139,8 @@ public class BitrateAnalysisTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri),
|
||||
/* outputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri)
|
||||
.buildUpon()
|
||||
.setAverageBitrate(bitrate)
|
||||
.build());
|
||||
/* inputFormat= */ assetInfo.videoFormat,
|
||||
/* outputFormat= */ assetInfo.videoFormat.buildUpon().setAverageBitrate(bitrate).build());
|
||||
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
@ -157,7 +156,7 @@ public class BitrateAnalysisTest {
|
||||
.build()))
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(fileUri)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(assetInfo.uri)))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
|
||||
|
@ -27,6 +27,7 @@ import android.net.Uri;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.transformer.AndroidTestUtil;
|
||||
import androidx.media3.transformer.AndroidTestUtil.AssetInfo;
|
||||
import androidx.media3.transformer.DefaultEncoderFactory;
|
||||
import androidx.media3.transformer.EditedMediaItem;
|
||||
import androidx.media3.transformer.Transformer;
|
||||
@ -51,11 +52,11 @@ import org.junit.runners.Parameterized.Parameters;
|
||||
+ " for this use as they're missing skip checks for unsupported devices.")
|
||||
public class EncoderPerformanceAnalysisTest {
|
||||
|
||||
private static final ImmutableList<String> INPUT_FILES =
|
||||
private static final ImmutableList<AssetInfo> INPUT_ASSETS =
|
||||
ImmutableList.of(
|
||||
AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING,
|
||||
AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING,
|
||||
AndroidTestUtil.MP4_ASSET_4K60_PORTRAIT_URI_STRING);
|
||||
AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S,
|
||||
AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS,
|
||||
AndroidTestUtil.MP4_ASSET_4K60_PORTRAIT);
|
||||
|
||||
private static final ImmutableList<Integer> OPERATING_RATE_SETTINGS =
|
||||
ImmutableList.of(VideoEncoderSettings.NO_VALUE, 30, Integer.MAX_VALUE);
|
||||
@ -68,7 +69,7 @@ public class EncoderPerformanceAnalysisTest {
|
||||
MEDIA_CODEC_PRIORITY_REALTIME);
|
||||
|
||||
@Parameter(0)
|
||||
public @MonotonicNonNull String fileUri;
|
||||
public @MonotonicNonNull AssetInfo assetInfo;
|
||||
|
||||
@Parameter(1)
|
||||
public int operatingRate;
|
||||
@ -79,12 +80,12 @@ public class EncoderPerformanceAnalysisTest {
|
||||
@Parameters(name = "analyzePerformance_{0}_OpRate={1}_Priority={2}")
|
||||
public static ImmutableList<Object[]> parameters() {
|
||||
ImmutableList.Builder<Object[]> parametersBuilder = new ImmutableList.Builder<>();
|
||||
for (int i = 0; i < INPUT_FILES.size(); i++) {
|
||||
for (int i = 0; i < INPUT_ASSETS.size(); i++) {
|
||||
for (int j = 0; j < OPERATING_RATE_SETTINGS.size(); j++) {
|
||||
for (int k = 0; k < PRIORITY_SETTINGS.size(); k++) {
|
||||
parametersBuilder.add(
|
||||
new Object[] {
|
||||
INPUT_FILES.get(i), OPERATING_RATE_SETTINGS.get(j), PRIORITY_SETTINGS.get(k)
|
||||
INPUT_ASSETS.get(i), OPERATING_RATE_SETTINGS.get(j), PRIORITY_SETTINGS.get(k)
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -94,8 +95,8 @@ public class EncoderPerformanceAnalysisTest {
|
||||
|
||||
@Test
|
||||
public void analyzeEncoderPerformance() throws Exception {
|
||||
checkNotNull(fileUri);
|
||||
String filename = checkNotNull(Uri.parse(fileUri).getLastPathSegment());
|
||||
checkNotNull(assetInfo.uri);
|
||||
String filename = checkNotNull(Uri.parse(assetInfo.uri).getLastPathSegment());
|
||||
String testId =
|
||||
Util.formatInvariant(
|
||||
"analyzePerformance_%s_OpRate_%d_Priority_%d", filename, operatingRate, priority);
|
||||
@ -104,8 +105,8 @@ public class EncoderPerformanceAnalysisTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri),
|
||||
/* outputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri));
|
||||
/* inputFormat= */ assetInfo.videoFormat,
|
||||
/* outputFormat= */ assetInfo.videoFormat);
|
||||
|
||||
if (Util.SDK_INT < 23) {
|
||||
recordTestSkipped(
|
||||
@ -133,7 +134,7 @@ public class EncoderPerformanceAnalysisTest {
|
||||
.build()))
|
||||
.build();
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(fileUri)))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(assetInfo.uri)))
|
||||
.setRemoveAudio(true)
|
||||
.build();
|
||||
|
||||
|
@ -54,8 +54,7 @@ public final class RepeatedTranscodeTest {
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(context))
|
||||
.build())
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
|
||||
|
||||
@ -84,8 +83,7 @@ public final class RepeatedTranscodeTest {
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(context))
|
||||
.build())
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem)
|
||||
.setRemoveAudio(true)
|
||||
@ -118,8 +116,7 @@ public final class RepeatedTranscodeTest {
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(context))
|
||||
.build())
|
||||
.build();
|
||||
MediaItem mediaItem =
|
||||
MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS_URI_STRING));
|
||||
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(AndroidTestUtil.MP4_REMOTE_10_SECONDS.uri));
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(mediaItem).setRemoveVideo(true).build();
|
||||
|
||||
|
@ -42,7 +42,6 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_7680W_4320H
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_854W_480H_30_SECOND_ROOF_ONEPLUSNORD2_DOWNSAMPLED;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_854W_480H_30_SECOND_ROOF_REDMINOTE9_DOWNSAMPLED;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.getFormatForTestFile;
|
||||
import static androidx.media3.transformer.ExportTestResult.SSIM_UNSET;
|
||||
import static com.google.common.collect.Iterables.getLast;
|
||||
|
||||
@ -53,7 +52,7 @@ import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.transformer.AndroidTestUtil;
|
||||
import androidx.media3.transformer.AndroidTestUtil.AssetInfo;
|
||||
import androidx.media3.transformer.DefaultEncoderFactory;
|
||||
import androidx.media3.transformer.EditedMediaItem;
|
||||
import androidx.media3.transformer.Transformer;
|
||||
@ -88,7 +87,7 @@ public class SsimMapperTest {
|
||||
|
||||
// When running this test, input file list should be restricted more than this. Binary search can
|
||||
// take up to 40 minutes to complete for a single clip on lower end devices.
|
||||
private static final ImmutableList<String> INPUT_FILES =
|
||||
private static final ImmutableList<AssetInfo> INPUT_ASSETS =
|
||||
ImmutableList.of(
|
||||
MP4_REMOTE_256W_144H_30_SECOND_ROOF_ONEPLUSNORD2_DOWNSAMPLED,
|
||||
MP4_REMOTE_256W_144H_30_SECOND_ROOF_REDMINOTE9_DOWNSAMPLED,
|
||||
@ -117,11 +116,11 @@ public class SsimMapperTest {
|
||||
@Parameters
|
||||
public static List<Object[]> parameters() {
|
||||
List<Object[]> parameterList = new ArrayList<>();
|
||||
for (String file : INPUT_FILES) {
|
||||
parameterList.add(new Object[] {file, MimeTypes.VIDEO_H264});
|
||||
for (AssetInfo assetInfo : INPUT_ASSETS) {
|
||||
parameterList.add(new Object[] {assetInfo, MimeTypes.VIDEO_H264});
|
||||
// TODO(b/210593256): Test pre 24 once in-app muxing implemented.
|
||||
if (Util.SDK_INT >= 24) {
|
||||
parameterList.add(new Object[] {file, MimeTypes.VIDEO_H265});
|
||||
parameterList.add(new Object[] {assetInfo, MimeTypes.VIDEO_H265});
|
||||
}
|
||||
}
|
||||
return parameterList;
|
||||
@ -129,7 +128,7 @@ public class SsimMapperTest {
|
||||
|
||||
@Parameter(0)
|
||||
@Nullable
|
||||
public String fileUri;
|
||||
public AssetInfo assetInfo;
|
||||
|
||||
@Parameter(1)
|
||||
@Nullable
|
||||
@ -137,7 +136,6 @@ public class SsimMapperTest {
|
||||
|
||||
@Test
|
||||
public void findSsimMapping() throws Exception {
|
||||
String fileUri = checkNotNull(this.fileUri);
|
||||
String mimeType = checkNotNull(this.mimeType);
|
||||
|
||||
String testIdPrefix =
|
||||
@ -147,11 +145,11 @@ public class SsimMapperTest {
|
||||
assumeFormatsSupported(
|
||||
ApplicationProvider.getApplicationContext(),
|
||||
testIdPrefix + "_codecSupport",
|
||||
/* inputFormat= */ getFormatForTestFile(fileUri),
|
||||
/* inputFormat= */ assetInfo.videoFormat,
|
||||
/* outputFormat= */ null);
|
||||
|
||||
new SsimBinarySearcher(
|
||||
ApplicationProvider.getApplicationContext(), testIdPrefix, fileUri, mimeType)
|
||||
ApplicationProvider.getApplicationContext(), testIdPrefix, assetInfo, mimeType)
|
||||
.search();
|
||||
}
|
||||
|
||||
@ -178,18 +176,18 @@ public class SsimMapperTest {
|
||||
*
|
||||
* @param context The {@link Context}.
|
||||
* @param testIdPrefix The test ID prefix.
|
||||
* @param videoUri The URI of the video to transform.
|
||||
* @param assetInfo The video {@link AssetInfo} to transform.
|
||||
* @param outputMimeType The video sample MIME type to output, see {@link
|
||||
* Transformer.Builder#setVideoMimeType}.
|
||||
*/
|
||||
public SsimBinarySearcher(
|
||||
Context context, String testIdPrefix, String videoUri, String outputMimeType) {
|
||||
Context context, String testIdPrefix, AssetInfo assetInfo, String outputMimeType) {
|
||||
this.context = context;
|
||||
this.testIdPrefix = testIdPrefix;
|
||||
this.videoUri = videoUri;
|
||||
this.videoUri = assetInfo.uri;
|
||||
this.outputMimeType = outputMimeType;
|
||||
exportsLeft = MAX_EXPORTS;
|
||||
format = AndroidTestUtil.getFormatForTestFile(videoUri);
|
||||
format = assetInfo.videoFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -20,8 +20,7 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIX
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.media3.transformer.mh.performance.PlaybackTestUtil.createTimestampOverlay;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
@ -91,8 +90,8 @@ public class CompositionPlaybackTest {
|
||||
ConditionVariable hasRenderedFirstFrameCondition = new ConditionVariable();
|
||||
outputImageReader =
|
||||
ImageReader.newInstance(
|
||||
MP4_ASSET_FORMAT.width,
|
||||
MP4_ASSET_FORMAT.height,
|
||||
MP4_ASSET.videoFormat.width,
|
||||
MP4_ASSET.videoFormat.height,
|
||||
PixelFormat.RGBA_8888,
|
||||
/* maxImages= */ 1);
|
||||
|
||||
@ -110,11 +109,11 @@ public class CompositionPlaybackTest {
|
||||
|
||||
player.setVideoSurface(
|
||||
outputImageReader.getSurface(),
|
||||
new Size(MP4_ASSET_FORMAT.width, MP4_ASSET_FORMAT.height));
|
||||
new Size(MP4_ASSET.videoFormat.width, MP4_ASSET.videoFormat.height));
|
||||
player.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setEffects(
|
||||
new Effects(
|
||||
/* audioProcessors= */ ImmutableList.of(),
|
||||
|
@ -17,8 +17,7 @@
|
||||
package androidx.media3.transformer.mh.performance;
|
||||
|
||||
import static androidx.media3.common.util.Util.usToMs;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DURATION_US;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static com.google.common.collect.Iterables.getLast;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -131,7 +130,8 @@ public class CompositionPlayerSeekTest {
|
||||
// Plays the second video
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
MP4_ASSET_TIMESTAMPS_US, timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
MP4_ASSET_TIMESTAMPS_US,
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
// Seeked after the first playback ends, so the timestamps are repeated twice.
|
||||
ImmutableList<Long> expectedTimestampsUs =
|
||||
@ -160,7 +160,8 @@ public class CompositionPlayerSeekTest {
|
||||
// Plays the second video
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
MP4_ASSET_TIMESTAMPS_US, timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
MP4_ASSET_TIMESTAMPS_US,
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> actualTimestampsUs =
|
||||
@ -188,7 +189,7 @@ public class CompositionPlayerSeekTest {
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
Iterables.skip(MP4_ASSET_TIMESTAMPS_US, /* numberToSkip= */ 3),
|
||||
timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> actualTimestampsUs =
|
||||
@ -214,13 +215,14 @@ public class CompositionPlayerSeekTest {
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
Iterables.limit(MP4_ASSET_TIMESTAMPS_US, /* limitSize= */ 15),
|
||||
timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
// Seek to the first, skipping the first three frames.
|
||||
.addAll(Iterables.skip(MP4_ASSET_TIMESTAMPS_US, /* numberToSkip= */ 3))
|
||||
// Plays the second video
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
MP4_ASSET_TIMESTAMPS_US, timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
MP4_ASSET_TIMESTAMPS_US,
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> actualTimestampsUs =
|
||||
@ -236,7 +238,7 @@ public class CompositionPlayerSeekTest {
|
||||
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
|
||||
int numberOfFramesBeforeSeeking = 15;
|
||||
// Seek to the duration of the first video.
|
||||
long seekTimeMs = usToMs(MP4_ASSET_DURATION_US);
|
||||
long seekTimeMs = usToMs(MP4_ASSET.videoDurationUs);
|
||||
|
||||
ImmutableList<Long> expectedTimestampsUs =
|
||||
new ImmutableList.Builder<Long>()
|
||||
@ -245,7 +247,8 @@ public class CompositionPlayerSeekTest {
|
||||
// Plays the second video
|
||||
.addAll(
|
||||
Iterables.transform(
|
||||
MP4_ASSET_TIMESTAMPS_US, timestampUs -> (MP4_ASSET_DURATION_US + timestampUs)))
|
||||
MP4_ASSET_TIMESTAMPS_US,
|
||||
timestampUs -> (MP4_ASSET.videoDurationUs + timestampUs)))
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> actualTimestampsUs =
|
||||
@ -268,7 +271,7 @@ public class CompositionPlayerSeekTest {
|
||||
// Play the first 15 frames of the first video
|
||||
.addAll(Iterables.limit(MP4_ASSET_TIMESTAMPS_US, /* limitSize= */ 15))
|
||||
// Seeking to/beyond the end plays the last frame.
|
||||
.add(MP4_ASSET_DURATION_US + getLast(MP4_ASSET_TIMESTAMPS_US))
|
||||
.add(MP4_ASSET.videoDurationUs + getLast(MP4_ASSET_TIMESTAMPS_US))
|
||||
.build();
|
||||
|
||||
ImmutableList<Long> actualTimestampsUs =
|
||||
@ -279,9 +282,9 @@ public class CompositionPlayerSeekTest {
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays the {@link AndroidTestUtil#MP4_ASSET_URI_STRING} for {@code videoLoopCount} times, seeks
|
||||
* after {@code numberOfFramesBeforeSeeking} frames to {@code seekTimeMs}, and returns the
|
||||
* timestamps of the processed frames, in microsecond.
|
||||
* Plays the {@link AndroidTestUtil#MP4_ASSET} for {@code videoLoopCount} times, seeks after
|
||||
* {@code numberOfFramesBeforeSeeking} frames to {@code seekTimeMs}, and returns the timestamps of
|
||||
* the processed frames, in microsecond.
|
||||
*/
|
||||
private ImmutableList<Long> playCompositionOfTwoVideosAndGetTimestamps(
|
||||
PlayerTestListener listener, int numberOfFramesBeforeSeeking, long seekTimeMs)
|
||||
@ -353,8 +356,8 @@ public class CompositionPlayerSeekTest {
|
||||
}
|
||||
|
||||
private static EditedMediaItem createEditedMediaItem(List<Effect> videoEffects) {
|
||||
return new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
|
||||
.setDurationUs(MP4_ASSET_DURATION_US)
|
||||
return new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET.uri))
|
||||
.setDurationUs(MP4_ASSET.videoDurationUs)
|
||||
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
|
||||
.build();
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
package androidx.media3.transformer.mh.performance;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -97,8 +97,8 @@ public class VideoCompositionPreviewPerformanceTest {
|
||||
player.setComposition(
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(
|
||||
getClippedEditedMediaItem(MP4_ASSET_URI_STRING, new Contrast(.2f)),
|
||||
getClippedEditedMediaItem(MP4_ASSET_URI_STRING, new Contrast(-.2f))))
|
||||
getClippedEditedMediaItem(MP4_ASSET.uri, new Contrast(.2f)),
|
||||
getClippedEditedMediaItem(MP4_ASSET.uri, new Contrast(-.2f))))
|
||||
.build());
|
||||
player.prepare();
|
||||
});
|
||||
|
@ -16,7 +16,7 @@
|
||||
package androidx.media3.transformer.mh.performance;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.app.Instrumentation;
|
||||
@ -80,7 +80,7 @@ public class VideoEffectsPreviewPerformanceTest {
|
||||
player.addAnalyticsListener(listener);
|
||||
// Adding an EventLogger to use its log output in case the test fails.
|
||||
player.addAnalyticsListener(new EventLogger());
|
||||
MediaItem mediaItem = getClippedMediaItem(MP4_ASSET_URI_STRING);
|
||||
MediaItem mediaItem = getClippedMediaItem(MP4_ASSET.uri);
|
||||
// Use the same media item so that format changes do not force exoplayer to re-init codecs
|
||||
// between item transitions.
|
||||
player.addMediaItems(ImmutableList.of(mediaItem, mediaItem, mediaItem, mediaItem));
|
||||
|
Loading…
x
Reference in New Issue
Block a user