Test: Use AssumptionViolatedException for format support detection.

This is the most widely-used test-skipping method I'm aware of, so I figured this
would be a great method to scale usage of AssumptionViolatedException.

PiperOrigin-RevId: 618160931
This commit is contained in:
huangdarwin 2024-03-22 06:37:55 -07:00 committed by Copybara-Service
parent 16aac07bce
commit d165af9a85
20 changed files with 179 additions and 413 deletions

View File

@ -53,6 +53,7 @@ import java.io.FileWriter;
import java.io.IOException;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.AssumptionViolatedException;
/** Utilities for instrumentation tests. */
public final class AndroidTestUtil {
@ -717,20 +718,18 @@ public final class AndroidTestUtil {
}
/**
* Returns whether the test should be skipped because the device is incapable of decoding the
* input format, or encoding/muxing the output format. Assumes the input will always need to be
* decoded, and both encoded and muxed if {@code outputFormat} is non-null.
*
* <p>If the test should be skipped, logs the reason for skipping.
* Assumes that the device supports decoding the input format, and encoding/muxing the output
* format if needed.
*
* @param context The {@link Context context}.
* @param testId The test ID.
* @param inputFormat The {@link Format format} to decode.
* @param outputFormat The {@link Format format} to encode/mux or {@code null} if the output won't
* be encoded or muxed.
* @return Whether the test should be skipped.
* @throws AssumptionViolatedException If the device does not support the formats. In this case,
* the reason for skipping the test is logged.
*/
public static boolean skipAndLogIfFormatsUnsupported(
public static void assumeFormatsSupported(
Context context, String testId, Format inputFormat, @Nullable Format outputFormat)
throws IOException, JSONException, MediaCodecUtil.DecoderQueryException {
// TODO(b/278657595): Make this capability check match the default codec factory selection code.
@ -739,7 +738,7 @@ public final class AndroidTestUtil {
boolean canEncode = outputFormat == null || canEncode(outputFormat);
boolean canMux = outputFormat == null || canMux(outputFormat);
if (canDecode && canEncode && canMux) {
return false;
return;
}
StringBuilder skipReasonBuilder = new StringBuilder();
@ -752,8 +751,9 @@ public final class AndroidTestUtil {
if (!canMux) {
skipReasonBuilder.append("Cannot mux ").append(outputFormat);
}
recordTestSkipped(context, testId, skipReasonBuilder.toString());
return true;
String skipReason = skipReasonBuilder.toString();
recordTestSkipped(context, testId, skipReason);
throw new AssumptionViolatedException(skipReason);
}
/**

View File

@ -19,6 +19,7 @@ package androidx.media3.transformer;
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FRAME_COUNT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
@ -67,13 +68,8 @@ public class ForceEndOfStreamTest {
if (skipTestBelowApi29(context, testId)) {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
int framesToSkip = 4;
ExportTestResult testResult =
@ -92,13 +88,8 @@ public class ForceEndOfStreamTest {
if (skipTestBelowApi29(context, testId)) {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
ExportTestResult testResult =
new TransformerAndroidTestRunner.Builder(

View File

@ -29,6 +29,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_270_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects;
import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
@ -122,13 +123,8 @@ public class TransformerEndToEndTest {
@Test
public void compositionEditing_withThreeSequences_completes() throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
EditedMediaItem audioVideoItem =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
.setEffects(
@ -340,13 +336,8 @@ public class TransformerEndToEndTest {
@Test
public void videoEditing_completesWithConsistentFrameCount() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -371,13 +362,8 @@ public class TransformerEndToEndTest {
@Test
public void videoEditing_effectsOverTime_completesWithConsistentFrameCount() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -411,13 +397,8 @@ public class TransformerEndToEndTest {
@Test
public void videoOnly_completesWithConsistentDuration() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -440,13 +421,11 @@ public class TransformerEndToEndTest {
@Test
public void clippedMedia_completesWithClippedDuration() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer = new Transformer.Builder(context).build();
long clippingStartMs = 10_000;
long clippingEndMs = 11_000;
@ -472,13 +451,11 @@ public class TransformerEndToEndTest {
public void
clippedAndRotatedMedia_withNoOpEffect_completesWithClippedDurationAndCorrectOrientation()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer = new Transformer.Builder(context).build();
long clippingStartMs = 10_000;
long clippingEndMs = 11_000;
@ -513,13 +490,11 @@ public class TransformerEndToEndTest {
@Test
public void clippedMedia_trimOptimizationEnabled_fallbackToNormalExportUponFormatMismatch()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer =
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
long clippingStartMs = 10_000;
@ -552,13 +527,11 @@ public class TransformerEndToEndTest {
public void
clippedAndRotatedMedia_trimOptimizationEnabledButFormatsMismatch_fallsbackWithCorrectOrientationOutput()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer =
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
long clippingStartMs = 10_000;
@ -600,13 +573,11 @@ public class TransformerEndToEndTest {
public void
clippedMedia_trimOptimizationEnabled_noKeyFrameBetweenClipTimes_fallbackToNormalExport()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer =
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
long clippingStartMs = 10_000;
@ -637,13 +608,11 @@ public class TransformerEndToEndTest {
public void
clippedMedia_trimOptimizationEnabled_noKeyFramesAfterClipStart_fallbackToNormalExport()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
Transformer transformer =
new Transformer.Builder(context).experimentalSetTrimOptimizationEnabled(true).build();
long clippingStartMs = 14_500;
@ -898,13 +867,8 @@ public class TransformerEndToEndTest {
@Test
public void videoEncoderFormatUnsupported_completesWithError() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(new VideoUnsupportedEncoderFactory(context))
@ -930,13 +894,11 @@ public class TransformerEndToEndTest {
@Test
public void durationAdjustedSequence_completesWithCorrectDuration() throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
ImmutableList<Effect> videoEffects =
ImmutableList.of(new SpeedChangeEffect(1.5f), new SpeedChangeEffect(2f));
EditedMediaItem editedMediaItem =
@ -961,13 +923,11 @@ public class TransformerEndToEndTest {
public void durationAdjustedSequence_withForcedAudioTrack_completesWithCorrectDuration()
throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)) {
return;
}
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
ImmutableList<Effect> videoEffects = ImmutableList.of(new SpeedChangeEffect(1.5f));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(
@ -991,13 +951,8 @@ public class TransformerEndToEndTest {
@Test
public void audioVideoTranscodedFromDifferentSequences_producesExpectedResult() throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
ImmutableList<AudioProcessor> audioProcessors = ImmutableList.of(createSonic(1.2f));
ImmutableList<Effect> videoEffects = ImmutableList.of(RgbFilter.createGrayscaleFilter());
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
@ -1041,13 +996,8 @@ public class TransformerEndToEndTest {
@Test
public void loopingTranscodedAudio_producesExpectedResult() throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
EditedMediaItem audioEditedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
EditedMediaItemSequence loopingAudioSequence =
@ -1078,13 +1028,8 @@ public class TransformerEndToEndTest {
@Test
public void loopingTranscodedVideo_producesExpectedResult() throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
EditedMediaItem audioEditedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(MP3_ASSET_URI_STRING)).build();
EditedMediaItemSequence audioSequence =
@ -1250,7 +1195,7 @@ public class TransformerEndToEndTest {
@Test
public void transcode_withOutputVideoMimeTypeAv1_completesSuccessfully() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
@ -1258,9 +1203,7 @@ public class TransformerEndToEndTest {
.buildUpon()
.setSampleMimeType(MimeTypes.VIDEO_AV1)
.setCodecs(null)
.build())) {
return;
}
.build());
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
Transformer transformer =

View File

@ -22,6 +22,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FRAME_COUNT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
@ -62,13 +63,8 @@ public class TransformerMixedInputEndToEndTest {
@Test
public void videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -93,13 +89,8 @@ public class TransformerMixedInputEndToEndTest {
@Test
public void videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -124,13 +115,8 @@ public class TransformerMixedInputEndToEndTest {
public void
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -166,13 +152,8 @@ public class TransformerMixedInputEndToEndTest {
public void
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(

View File

@ -25,6 +25,7 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
@ -79,13 +80,8 @@ public final class TransformerMultiSequenceCompositionTest {
@Test
public void export_withTwoSequencesEachWithOneVideoMediaItem_succeeds() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Composition composition =
createComposition(
@ -118,13 +114,8 @@ public final class TransformerMultiSequenceCompositionTest {
@Test
public void export_withTwoSequencesOneWithVideoOneWithImage_succeeds() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Composition composition =
createComposition(
@ -157,13 +148,8 @@ public final class TransformerMultiSequenceCompositionTest {
@Test
public void export_withTwoSequencesWithVideoCompositorSettings_succeeds() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
VideoCompositorSettings pictureInPictureVideoCompositorSettings =
new VideoCompositorSettings() {

View File

@ -17,6 +17,7 @@ package androidx.media3.transformer;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.SECONDS;
@ -381,14 +382,14 @@ public class TransformerPauseResumeTest {
}
private static boolean shouldSkipDevice(String testId) throws Exception {
assumeFormatsSupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
// v26 emulators are not producing I-frames, due to which resuming export does not work as
// expected.
return AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT)
|| (Util.SDK_INT == 26 && Util.isRunningOnEmulator());
return Util.SDK_INT == 26 && Util.isRunningOnEmulator();
}
private static int getDeviceSpecificMissingFrameCount() {

View File

@ -27,6 +27,7 @@ import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
@ -80,13 +81,8 @@ public final class TransformerSequenceEffectTest {
@Test
public void export_withNoCompositionPresentationAndWithPerMediaItemEffects() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
OverlayEffect overlayEffect = createOverlayEffect();
Composition composition =
createComposition(
@ -132,13 +128,8 @@ public final class TransformerSequenceEffectTest {
&& (Ascii.equalsIgnoreCase(Util.MODEL, "redmi 6a")
|| Ascii.equalsIgnoreCase(Util.MODEL, "vivo 1820")));
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Composition composition =
createComposition(
Presentation.createForWidthAndHeight(
@ -174,13 +165,8 @@ public final class TransformerSequenceEffectTest {
@Test
public void export_withCompositionPresentationAndNoVideoEffects() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
@ -203,13 +189,8 @@ public final class TransformerSequenceEffectTest {
@Test
public void export_withCompositionPresentationAndNoVideoEffectsForFirstMediaItem()
throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
@ -231,17 +212,10 @@ public final class TransformerSequenceEffectTest {
@Test
public void export_withBt601AndBt709MediaItems() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
assumeFormatsSupported(
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
@ -263,17 +237,10 @@ public final class TransformerSequenceEffectTest {
@Test
public void export_withBt601VideoAndBt709ImageMediaItems() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
assumeFormatsSupported(
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),

View File

@ -17,6 +17,7 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static org.junit.Assume.assumeTrue;
import android.content.Context;
@ -57,13 +58,8 @@ public class TransformerWithInAppMuxerEndToEndAndroidTest {
// Use MP4_ASSET_FORMAT for H265_MP4_ASSET_URI_STRING test skipping as well, because emulators
// signal a lack of support for H265_MP4's actual format, but pass this test when using
// MP4_ASSET_FORMAT for skipping.
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ MP4_ASSET_FORMAT)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
Transformer transformer =
new Transformer.Builder(context)
.setMuxerFactory(new InAppMuxer.Factory.Builder().build())

View File

@ -25,6 +25,7 @@ import static androidx.media3.test.utils.VideoFrameProcessorTestRunner.VIDEO_FRA
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsHdrEditing;
import static androidx.media3.transformer.mh.UnoptimizedGlEffect.NO_OP_EFFECT;
@ -50,7 +51,6 @@ import androidx.media3.effect.OverlayEffect;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.TextureBitmapReader;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
@ -120,13 +120,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
@Test
public void noEffects_matchesGoldenFile() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
@ -142,13 +140,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
@Test
public void noEffects_textureInput_matchesGoldenFile() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
getTexIdProducingFrameProcessorTestRunner(
@ -172,13 +168,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
@Test
public void bitmapOverlay_matchesGoldenFile() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
videoFrameProcessorTestRunner =
@ -198,13 +192,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
@Test
public void bitmapOverlay_textureInput_matchesGoldenFile() throws Exception {
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
ImmutableList<Effect> effects =
@ -231,10 +223,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
@ -259,10 +248,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
@ -324,10 +310,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
@ -352,10 +335,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
@ -417,10 +397,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
@ -446,10 +423,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
@ -512,10 +486,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
@ -541,10 +512,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
Context context = getApplicationContext();
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
ColorInfo colorInfo = checkNotNull(format.colorInfo);
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =

View File

@ -29,6 +29,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_PIXEL_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.ExportResult.CONVERSION_PROCESS_TRANSMUXED_AND_TRANSCODED;
import static androidx.media3.transformer.ExportResult.OPTIMIZATION_SUCCEEDED;
@ -50,7 +51,6 @@ import androidx.media3.extractor.text.DefaultSubtitleParserFactory;
import androidx.media3.test.utils.FakeExtractorOutput;
import androidx.media3.test.utils.FakeTrackOutput;
import androidx.media3.test.utils.TestUtil;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.AndroidTestUtil.ForceEncodeEncoderFactory;
import androidx.media3.transformer.DefaultEncoderFactory;
import androidx.media3.transformer.EditedMediaItem;
@ -88,13 +88,11 @@ public class ExportTest {
Context context = ApplicationProvider.getApplicationContext();
// Note: throughout this class we only check decoding capability as tests should still run if
// Transformer is able to succeed by falling back to a lower resolution.
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -124,13 +122,11 @@ public class ExportTest {
@Test
public void exportToSpecificBitrate() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
@ -156,13 +152,11 @@ public class ExportTest {
@Test
public void export4K60() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_4K60_PORTRAIT_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
// Reference: b/262710361
assumeFalse(
"Skip due to over-reported encoder capabilities",
@ -192,10 +186,8 @@ public class ExportTest {
|| Ascii.equalsIgnoreCase(Util.MODEL, "le2121"));
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ MP4_ASSET_8K24_FORMAT, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_8K24_FORMAT, /* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -218,7 +210,7 @@ public class ExportTest {
int downscaledHeight = 240;
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_8K24_FORMAT,
@ -226,9 +218,7 @@ public class ExportTest {
.setSampleMimeType(MimeTypes.VIDEO_H264)
.setWidth(downscaledWidth)
.setHeight(downscaledHeight)
.build())) {
return;
}
.build());
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.setTimeoutSeconds(120)
@ -250,13 +240,11 @@ public class ExportTest {
@Test
public void exportNoAudio() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -340,13 +328,11 @@ public class ExportTest {
@Test
public void exportFrameRotation() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem =
@ -371,13 +357,8 @@ public class ExportTest {
recordTestSkipped(context, testId, reason);
throw new AssumptionViolatedException(reason);
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_BT2020_SDR_FORMAT,
/* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_BT2020_SDR_FORMAT, /* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_BT2020_SDR));

View File

@ -21,8 +21,8 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECO
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
@ -78,10 +78,7 @@ public class ForceInterpretHdrVideoAsSdrTest {
.buildUpon()
.setColorInfo(ColorInfo.SDR_BT709_LIMITED)
.build();
if (skipAndLogIfFormatsUnsupported(
context, testId, decoderInputFormat, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, decoderInputFormat, /* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
EditedMediaItem editedMediaItem =
@ -121,10 +118,7 @@ public class ForceInterpretHdrVideoAsSdrTest {
.buildUpon()
.setColorInfo(ColorInfo.SDR_BT709_LIMITED)
.build();
if (skipAndLogIfFormatsUnsupported(
context, testId, decoderInputFormat, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, decoderInputFormat, /* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
EditedMediaItem editedMediaItem =

View File

@ -16,8 +16,8 @@
package androidx.media3.transformer.mh;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import android.content.Context;
@ -53,9 +53,7 @@ public final class HdrCapabilitiesUtil {
recordTestSkipped(context, testId, SKIP_REASON_NO_YUV);
throw new AssumptionViolatedException(SKIP_REASON_NO_YUV);
}
if (skipAndLogIfFormatsUnsupported(context, testId, inputFormat, /* outputFormat= */ null)) {
throw new AssumptionViolatedException("Input format is unsupported: " + inputFormat);
}
assumeFormatsSupported(context, testId, inputFormat, /* outputFormat= */ null);
}
/**

View File

@ -23,6 +23,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECON
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_DOLBY_VISION_HDR_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
@ -36,7 +37,6 @@ import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Util;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.ExportException;
@ -81,13 +81,11 @@ public final class HdrEditingTest {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10));
@ -114,13 +112,11 @@ public final class HdrEditingTest {
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10));
@ -143,10 +139,7 @@ public final class HdrEditingTest {
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ format)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_720P_4_SECOND_HDR10));
@ -171,10 +164,7 @@ public final class HdrEditingTest {
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ format)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10));
@ -199,10 +189,7 @@ public final class HdrEditingTest {
Format format = MP4_ASSET_DOLBY_VISION_HDR_FORMAT;
assumeDeviceSupportsHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ format)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR));
@ -228,10 +215,7 @@ public final class HdrEditingTest {
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
assumeDeviceDoesNotSupportHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
AtomicBoolean isFallbackListenerInvoked = new AtomicBoolean();
AtomicBoolean isToneMappingFallbackApplied = new AtomicBoolean();
@ -290,10 +274,7 @@ public final class HdrEditingTest {
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
assumeDeviceDoesNotSupportHdrEditing(testId, format);
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
AtomicBoolean isToneMappingFallbackApplied = new AtomicBoolean();
Transformer transformer =

View File

@ -21,13 +21,13 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECO
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.net.Uri;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
@ -64,13 +64,11 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
public void export_toneMapNoRequestedTranscode_hdr10File_toneMapsOrThrows() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -126,13 +124,11 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
public void export_toneMapNoRequestedTranscode_hlg10File_toneMapsOrThrows() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -188,13 +184,11 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
public void export_toneMapAndTranscode_hdr10File_toneMapsOrThrows() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
@ -251,13 +245,11 @@ public class ToneMapHdrToSdrUsingMediaCodecTest {
public void export_toneMapAndTranscode_hlg10File_toneMapsOrThrows() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)

View File

@ -16,6 +16,7 @@
package androidx.media3.transformer.mh;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeFalse;
@ -56,13 +57,11 @@ public final class TranscodeQualityTest {
throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT)) {
return;
}
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT);
// Skip on specific pre-API 34 devices where calculating SSIM fails.
assumeFalse(
(Util.SDK_INT < 33 && (Util.MODEL.equals("SM-F711U1") || Util.MODEL.equals("SM-F926U1")))
@ -101,16 +100,14 @@ public final class TranscodeQualityTest {
public void transcodeAvcToHevc_ssimIsGreaterThan90Percent() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
/* outputFormat= */ AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT
.buildUpon()
.setSampleMimeType(MimeTypes.VIDEO_H265)
.build())) {
return;
}
.build());
assumeFalse(
(Util.SDK_INT < 33 && (Util.MODEL.equals("SM-F711U1") || Util.MODEL.equals("SM-F926U1")))
|| (Util.SDK_INT == 33 && Util.MODEL.equals("LE2121")));

View File

@ -21,8 +21,8 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
@ -31,7 +31,6 @@ import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceDoe
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsHdrEditing;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeFalse;
import android.content.Context;
import androidx.annotation.Nullable;
@ -111,12 +110,11 @@ public final class TransformerSequenceEffectTestWithHdr {
@Test
public void export_withHdrThenSdr_whenHdrEditingSupported_throws() throws Exception {
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
assumeFalse(
skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null));
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),
@ -152,12 +150,11 @@ public final class TransformerSequenceEffectTestWithHdr {
assumeDeviceDoesNotSupportHdrEditing(testId, MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
assumeDeviceSupportsOpenGlToneMapping(
testId, /* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
assumeFalse(
skipAndLogIfFormatsUnsupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null));
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_720P_4_SECOND_HDR10_FORMAT,
/* outputFormat= */ null);
Composition composition =
createComposition(
Presentation.createForHeight(EXPORT_HEIGHT),

View File

@ -17,6 +17,7 @@ package androidx.media3.transformer.mh;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
@ -24,7 +25,6 @@ import android.net.Uri;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.effect.RgbFilter;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.ExportTestResult;
@ -55,10 +55,8 @@ public class TransformerWithInAppMuxerEndToEndMhTest {
@Test
public void videoEditing_forAv1Video_completesSuccessfully() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT, /* outputFormat= */ null)) {
return;
}
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT, /* outputFormat= */ null);
Transformer transformer =
new Transformer.Builder(context)
.setMuxerFactory(new InAppMuxer.Factory.Builder().build())

View File

@ -34,7 +34,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_3840W_2160H
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_3840W_2160H_5_SECOND_HIGHMOTION;
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_640W_480H_31_SECOND_ROOF_SONYXPERIAXZ3;
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_7680W_4320H_31_SECOND_ROOF_SAMSUNGS20ULTRA5G;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import android.content.Context;
import android.net.Uri;
@ -134,16 +134,14 @@ public class BitrateAnalysisTest {
}
Context context = ApplicationProvider.getApplicationContext();
if (skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri),
/* outputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri)
.buildUpon()
.setAverageBitrate(bitrate)
.build())) {
return;
}
.build());
Transformer transformer =
new Transformer.Builder(context)

View File

@ -19,6 +19,7 @@ package androidx.media3.transformer.mh.analysis;
import static androidx.media3.common.C.MEDIA_CODEC_PRIORITY_NON_REALTIME;
import static androidx.media3.common.C.MEDIA_CODEC_PRIORITY_REALTIME;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import android.content.Context;
@ -100,13 +101,11 @@ public class EncoderPerformanceAnalysisTest {
"analyzePerformance_%s_OpRate_%d_Priority_%d", filename, operatingRate, priority);
Context context = ApplicationProvider.getApplicationContext();
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri),
/* outputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri))) {
return;
}
/* outputFormat= */ AndroidTestUtil.getFormatForTestFile(fileUri));
if (Util.SDK_INT < 23) {
recordTestSkipped(

View File

@ -41,8 +41,8 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_640W_480H_3
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_7680W_4320H_31_SECOND_ROOF_SAMSUNGS20ULTRA5G;
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_854W_480H_30_SECOND_ROOF_ONEPLUSNORD2_DOWNSAMPLED;
import static androidx.media3.transformer.AndroidTestUtil.MP4_REMOTE_854W_480H_30_SECOND_ROOF_REDMINOTE9_DOWNSAMPLED;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.getFormatForTestFile;
import static androidx.media3.transformer.AndroidTestUtil.skipAndLogIfFormatsUnsupported;
import static androidx.media3.transformer.ExportTestResult.SSIM_UNSET;
import static com.google.common.collect.Iterables.getLast;
@ -144,13 +144,11 @@ public class SsimMapperTest {
String.format(
"ssim_search_VBR_%s", checkNotNull(getLast(FORWARD_SLASH_SPLITTER.split(mimeType))));
if (skipAndLogIfFormatsUnsupported(
assumeFormatsSupported(
ApplicationProvider.getApplicationContext(),
testIdPrefix + "_codecSupport",
/* inputFormat= */ getFormatForTestFile(fileUri),
/* outputFormat= */ null)) {
return;
}
/* outputFormat= */ null);
new SsimBinarySearcher(
ApplicationProvider.getApplicationContext(), testIdPrefix, fileUri, mimeType)