Fix a concurrency issue that shader capacity is set off GL thread
Also add test to cover transitions between BT709 and 601. PiperOrigin-RevId: 641224971
This commit is contained in:
parent
38a7229d96
commit
39e572ad6d
@ -191,8 +191,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
@Override
|
||||
public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) {
|
||||
checkState(samplingGlShaderProgram instanceof ExternalShaderProgram);
|
||||
externalShaderProgramInputCapacity.set(0);
|
||||
this.externalShaderProgram = (ExternalShaderProgram) samplingGlShaderProgram;
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
externalShaderProgramInputCapacity.set(0);
|
||||
this.externalShaderProgram = (ExternalShaderProgram) samplingGlShaderProgram;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
BIN
libraries/test_data/src/test/assets/media/mp4/bt601.mp4
Normal file
BIN
libraries/test_data/src/test/assets/media/mp4/bt601.mp4
Normal file
Binary file not shown.
@ -103,8 +103,8 @@ public final class AndroidTestUtil {
|
||||
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
|
||||
public static final int MP4_ASSET_FRAME_COUNT = 30;
|
||||
|
||||
public static final String BT601_ASSET_URI_STRING = "asset:///media/mp4/bt601.mov";
|
||||
public static final Format BT601_ASSET_FORMAT =
|
||||
public static final String BT601_MOV_ASSET_URI_STRING = "asset:///media/mp4/bt601.mov";
|
||||
public static final Format BT601_MOV_ASSET_FORMAT =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(VIDEO_H264)
|
||||
.setWidth(640)
|
||||
@ -119,6 +119,25 @@ public final class AndroidTestUtil {
|
||||
.setCodecs("avc1.4D001E")
|
||||
.build();
|
||||
|
||||
public static final String BT601_MP4_ASSET_URI_STRING = "asset:///media/mp4/bt601.mp4";
|
||||
public static final Format BT601_MP4_ASSET_FORMAT =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(VIDEO_H264)
|
||||
.setWidth(360)
|
||||
.setHeight(240)
|
||||
.setFrameRate(29.97f)
|
||||
.setColorInfo(
|
||||
new ColorInfo.Builder()
|
||||
.setColorSpace(C.COLOR_SPACE_BT601)
|
||||
.setColorRange(C.COLOR_RANGE_LIMITED)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_SDR)
|
||||
.build())
|
||||
.setCodecs("avc1.42C00D")
|
||||
.build();
|
||||
// Result of the following command for BT601_MP4_ASSET_URI_STRING
|
||||
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames bt601.mp4
|
||||
public static final int BT601_MP4_ASSET_FRAME_COUNT = 30;
|
||||
|
||||
public static final String MP4_PORTRAIT_ASSET_URI_STRING =
|
||||
"asset:///media/mp4/sample_portrait.mp4";
|
||||
public static final Format MP4_PORTRAIT_ASSET_FORMAT =
|
||||
|
@ -17,6 +17,9 @@
|
||||
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET_FRAME_COUNT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FRAME_COUNT;
|
||||
@ -28,7 +31,10 @@ import static com.google.common.truth.Truth.assertThat;
|
||||
import android.content.Context;
|
||||
import android.net.Uri;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.effect.Presentation;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
@ -51,6 +57,16 @@ import org.junit.runner.RunWith;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class TransformerMixedInputEndToEndTest {
|
||||
|
||||
// Image by default are encoded in H265 and BT709 SDR.
|
||||
private static final Format IMAGE_VIDEO_ENCODING_FORMAT =
|
||||
new Format.Builder()
|
||||
.setSampleMimeType(MimeTypes.VIDEO_H265)
|
||||
.setFrameRate(30.f)
|
||||
.setWidth(480)
|
||||
.setHeight(360)
|
||||
.setColorInfo(ColorInfo.SDR_BT709_LIMITED)
|
||||
.build();
|
||||
|
||||
private final Context context = ApplicationProvider.getApplicationContext();
|
||||
@Rule public final TestName testName = new TestName();
|
||||
|
||||
@ -65,7 +81,10 @@ public class TransformerMixedInputEndToEndTest {
|
||||
public void videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount()
|
||||
throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ IMAGE_VIDEO_ENCODING_FORMAT);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
@ -116,10 +135,18 @@ public class TransformerMixedInputEndToEndTest {
|
||||
|
||||
@Test
|
||||
public void
|
||||
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
|
||||
videoEditing_withComplexMixedColorSpaceSdrVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
|
||||
throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ BT601_MP4_ASSET_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ BT601_MP4_ASSET_FORMAT);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
@ -131,7 +158,9 @@ public class TransformerMixedInputEndToEndTest {
|
||||
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
EditedMediaItem imageEditedMediaItem2 =
|
||||
createImageEditedMediaItem(JPG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
EditedMediaItem videoEditedMediaItem =
|
||||
EditedMediaItem bt601VideoEditedMediaItem =
|
||||
createVideoEditedMediaItem(BT601_MP4_ASSET_URI_STRING, /* height= */ 360);
|
||||
EditedMediaItem bt709VideoEditedMediaItem =
|
||||
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
@ -139,25 +168,35 @@ public class TransformerMixedInputEndToEndTest {
|
||||
.run(
|
||||
testId,
|
||||
buildComposition(
|
||||
videoEditedMediaItem,
|
||||
videoEditedMediaItem,
|
||||
bt601VideoEditedMediaItem,
|
||||
bt709VideoEditedMediaItem,
|
||||
imageEditedMediaItem1,
|
||||
imageEditedMediaItem2,
|
||||
videoEditedMediaItem,
|
||||
bt709VideoEditedMediaItem,
|
||||
imageEditedMediaItem1,
|
||||
videoEditedMediaItem));
|
||||
bt601VideoEditedMediaItem));
|
||||
|
||||
assertThat(result.exportResult.videoFrameCount)
|
||||
.isEqualTo(3 * imageFrameCount + 4 * MP4_ASSET_FRAME_COUNT);
|
||||
.isEqualTo(
|
||||
3 * imageFrameCount + 2 * MP4_ASSET_FRAME_COUNT + 2 * BT601_MP4_ASSET_FRAME_COUNT);
|
||||
assertThat(new File(result.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
|
||||
videoEditing_withComplexMixedColorSpaceSdrVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
|
||||
throws Exception {
|
||||
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ IMAGE_VIDEO_ENCODING_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ IMAGE_VIDEO_ENCODING_FORMAT);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
@ -167,9 +206,9 @@ public class TransformerMixedInputEndToEndTest {
|
||||
int imageFrameCount = 34;
|
||||
EditedMediaItem imageEditedMediaItem =
|
||||
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
// Result of the following command:
|
||||
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
|
||||
EditedMediaItem videoEditedMediaItem =
|
||||
EditedMediaItem bt601VideoEditedMediaItem =
|
||||
createVideoEditedMediaItem(BT601_MP4_ASSET_URI_STRING, /* height= */ 480);
|
||||
EditedMediaItem bt709VideoEditedMediaItem =
|
||||
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
@ -178,15 +217,107 @@ public class TransformerMixedInputEndToEndTest {
|
||||
testId,
|
||||
buildComposition(
|
||||
imageEditedMediaItem,
|
||||
videoEditedMediaItem,
|
||||
videoEditedMediaItem,
|
||||
bt709VideoEditedMediaItem,
|
||||
bt601VideoEditedMediaItem,
|
||||
imageEditedMediaItem,
|
||||
imageEditedMediaItem,
|
||||
videoEditedMediaItem,
|
||||
bt601VideoEditedMediaItem,
|
||||
imageEditedMediaItem));
|
||||
|
||||
assertThat(result.exportResult.videoFrameCount)
|
||||
.isEqualTo(4 * imageFrameCount + 3 * MP4_ASSET_FRAME_COUNT);
|
||||
.isEqualTo(4 * imageFrameCount + MP4_ASSET_FRAME_COUNT + 2 * BT601_MP4_ASSET_FRAME_COUNT);
|
||||
assertThat(new File(result.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
|
||||
throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
|
||||
.build();
|
||||
|
||||
int imageFrameCount = 33;
|
||||
EditedMediaItem imageEditedMediaItem1 =
|
||||
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
EditedMediaItem imageEditedMediaItem2 =
|
||||
createImageEditedMediaItem(JPG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
EditedMediaItem videoEditedMediaItem1 =
|
||||
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
|
||||
EditedMediaItem videoEditedMediaItem2 =
|
||||
createVideoEditedMediaItem(BT601_MP4_ASSET_URI_STRING, /* height= */ 360);
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
.build()
|
||||
.run(
|
||||
testId,
|
||||
buildComposition(
|
||||
videoEditedMediaItem1,
|
||||
videoEditedMediaItem2,
|
||||
imageEditedMediaItem1,
|
||||
imageEditedMediaItem2,
|
||||
videoEditedMediaItem1,
|
||||
imageEditedMediaItem1,
|
||||
videoEditedMediaItem2));
|
||||
|
||||
assertThat(result.exportResult.videoFrameCount)
|
||||
.isEqualTo(
|
||||
3 * imageFrameCount + 2 * MP4_ASSET_FRAME_COUNT + 2 * BT601_MP4_ASSET_FRAME_COUNT);
|
||||
assertThat(new File(result.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
|
||||
throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ IMAGE_VIDEO_ENCODING_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_MP4_ASSET_FORMAT,
|
||||
/* outputFormat= */ IMAGE_VIDEO_ENCODING_FORMAT);
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(
|
||||
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
|
||||
.build();
|
||||
|
||||
int imageFrameCount = 34;
|
||||
EditedMediaItem imageEditedMediaItem =
|
||||
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
|
||||
EditedMediaItem videoEditedMediaItem1 =
|
||||
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
|
||||
EditedMediaItem videoEditedMediaItem2 =
|
||||
createVideoEditedMediaItem(BT601_MP4_ASSET_URI_STRING, /* height= */ 480);
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
.build()
|
||||
.run(
|
||||
testId,
|
||||
buildComposition(
|
||||
imageEditedMediaItem,
|
||||
videoEditedMediaItem1,
|
||||
videoEditedMediaItem2,
|
||||
imageEditedMediaItem,
|
||||
imageEditedMediaItem,
|
||||
videoEditedMediaItem2,
|
||||
imageEditedMediaItem));
|
||||
|
||||
assertThat(result.exportResult.videoFrameCount)
|
||||
.isEqualTo(4 * imageFrameCount + MP4_ASSET_FRAME_COUNT + 2 * BT601_MP4_ASSET_FRAME_COUNT);
|
||||
assertThat(new File(result.filePath).length()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
|
@ -21,8 +21,8 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_SURFACE_TEXTURE_TRANSFORM_FIX;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MOV_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.BT601_MOV_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_FORMAT;
|
||||
@ -228,18 +228,20 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ BT601_ASSET_FORMAT,
|
||||
/* outputFormat= */ BT601_ASSET_FORMAT);
|
||||
/* inputFormat= */ BT601_MOV_ASSET_FORMAT,
|
||||
/* outputFormat= */ BT601_MOV_ASSET_FORMAT);
|
||||
List<MediaCodecInfo> mediaCodecInfoList =
|
||||
MediaCodecSelector.DEFAULT.getDecoderInfos(
|
||||
checkNotNull(BT601_ASSET_FORMAT.sampleMimeType),
|
||||
checkNotNull(BT601_MOV_ASSET_FORMAT.sampleMimeType),
|
||||
/* requiresSecureDecoder= */ false,
|
||||
/* requiresTunnelingDecoder= */ false);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
clippedVideo(
|
||||
BT601_ASSET_URI_STRING, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
NO_EFFECT,
|
||||
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
|
||||
|
||||
boolean atLeastOneDecoderSucceeds = false;
|
||||
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
|
||||
@ -556,12 +558,12 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
BT601_ASSET_URI_STRING,
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
clippedVideo(MP4_ASSET_URI_STRING, NO_EFFECT, SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS));
|
||||
@ -581,12 +583,12 @@ public final class TransformerSequenceEffectTest {
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
|
||||
assumeFormatsSupported(
|
||||
context, testId, /* inputFormat= */ BT601_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
context, testId, /* inputFormat= */ BT601_MOV_ASSET_FORMAT, /* outputFormat= */ null);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
Presentation.createForHeight(EXPORT_HEIGHT),
|
||||
clippedVideo(
|
||||
BT601_ASSET_URI_STRING,
|
||||
BT601_MOV_ASSET_URI_STRING,
|
||||
ImmutableList.of(RgbFilter.createInvertedFilter()),
|
||||
SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS),
|
||||
oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT));
|
||||
|
Loading…
x
Reference in New Issue
Block a user