Apply all video Composition effects to single sequence exports

PiperOrigin-RevId: 675933601
This commit is contained in:
kimvde 2024-09-18 04:16:01 -07:00 committed by Copybara-Service
parent fd3d8e1782
commit 2951a2599c
10 changed files with 89 additions and 77 deletions

View File

@ -17,7 +17,6 @@
package androidx.media3.effect;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
@ -65,13 +64,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
Executor listenerExecutor,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
@Nullable Presentation presentation = null;
for (int i = 0; i < compositionEffects.size(); i++) {
Effect effect = compositionEffects.get(i);
if (effect instanceof Presentation) {
presentation = (Presentation) effect;
}
}
return new PreviewingSingleInputVideoGraph(
context,
videoFrameProcessorFactory,
@ -79,7 +71,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
debugViewProvider,
listener,
listenerExecutor,
presentation,
initialTimestampOffsetUs);
}
}
@ -91,7 +82,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
@Nullable Presentation presentation,
long initialTimestampOffsetUs) {
super(
context,
@ -103,7 +93,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
VideoCompositorSettings.DEFAULT,
// Previewing needs frame render timing.
/* renderFramesAutomatically= */ false,
presentation,
initialTimestampOffsetUs);
}

View File

@ -48,7 +48,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private final Executor listenerExecutor;
private final boolean renderFramesAutomatically;
private final long initialTimestampOffsetUs;
@Nullable private final Presentation presentation;
@Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private SurfaceInfo outputSurfaceInfo;
@ -71,7 +70,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically,
@Nullable Presentation presentation,
long initialTimestampOffsetUs) {
checkState(
VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings),
@ -84,7 +82,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
this.debugViewProvider = debugViewProvider;
this.listenerExecutor = listenerExecutor;
this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET;
}
@ -203,9 +200,4 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs;
}
@Nullable
protected Presentation getPresentation() {
return presentation;
}
}

View File

@ -597,6 +597,64 @@ public class TransformerEndToEndTest {
assertThat(new File(result.filePath).length()).isGreaterThan(0);
}
@Test
public void videoEditing_withSingleSequenceAndCompositionEffect_appliesEffect() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
InputTimestampRecordingShaderProgram timestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
ImmutableList<Effect> videoEffects =
ImmutableList.of((GlEffect) (context, useHdr) -> timestampRecordingShaderProgram);
Composition composition =
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
new TransformerAndroidTestRunner.Builder(context, transformer).build().run(testId, composition);
assertThat(timestampRecordingShaderProgram.getInputTimestampsUs()).isNotEmpty();
}
@Test
public void videoEditing_withMultiSequenceAndCompositionEffect_appliesEffect() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
InputTimestampRecordingShaderProgram timestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
ImmutableList<Effect> videoEffects =
ImmutableList.of((GlEffect) (context, useHdr) -> timestampRecordingShaderProgram);
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence(editedMediaItem),
new EditedMediaItemSequence(editedMediaItem))
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
new TransformerAndroidTestRunner.Builder(context, transformer).build().run(testId, composition);
assertThat(timestampRecordingShaderProgram.getInputTimestampsUs()).isNotEmpty();
}
@Test
public void videoOnly_completesWithConsistentDuration() throws Exception {
assumeFormatsSupported(

View File

@ -31,7 +31,6 @@ import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import android.view.SurfaceView;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size;
@ -41,8 +40,6 @@ import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.SurfaceTestActivity;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.util.concurrent.TimeoutException;
@ -67,27 +64,20 @@ public class CompositionPlayerPixelTest {
@Rule public final TestName testName = new TestName();
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Context context = getInstrumentation().getContext().getApplicationContext();
private @MonotonicNonNull CompositionPlayer player;
private @MonotonicNonNull ImageReader outputImageReader;
private String testId;
private SurfaceView surfaceView;
@Before
public void setUp() {
rule.getScenario().onActivity(activity -> surfaceView = activity.getSurfaceView());
testId = testName.getMethodName();
}
@After
public void tearDown() {
rule.getScenario().close();
getInstrumentation()
.runOnMainSync(
() -> {

View File

@ -111,8 +111,6 @@ public final class Composition {
*
* <p>The default value is {@link Effects#EMPTY}.
*
* <p>This only works with the {@code Presentation} effect.
*
* @param effects The {@link Composition} {@link Effects}.
* @return This builder.
*/

View File

@ -58,7 +58,6 @@ import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import androidx.media3.muxer.Muxer;
import com.google.common.collect.ImmutableList;
@ -1018,14 +1017,8 @@ public final class Transformer {
* EditedMediaItemSequence}, while the audio format will be determined by the {@code
* AudioMediaItem} in the second {@code EditedMediaItemSequence}.
*
* <p>This method is under development. A {@link Composition} must meet the following conditions:
*
* <ul>
* <li>The video composition {@link Presentation} effect is applied after input streams are
* composited. Other composition effects are ignored.
* </ul>
*
* <p>{@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the
* <p>Some {@linkplain Composition compositions} are not supported yet. More specifically,
* {@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the
* following conditions:
*
* <ul>

View File

@ -27,6 +27,7 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.effect.MultipleInputVideoGraph;
import androidx.media3.effect.VideoCompositorSettings;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.Executor;
@ -99,7 +100,9 @@ import java.util.concurrent.Executor;
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
registerInput(inputIndex);
return new VideoFrameProcessingWrapper(
getProcessor(inputIndex), /* presentation= */ null, getInitialTimestampOffsetUs());
getProcessor(inputIndex),
/* postProcessingEffects= */ ImmutableList.of(),
getInitialTimestampOffsetUs());
}
@Override

View File

@ -20,13 +20,11 @@ import static androidx.media3.common.VideoFrameProcessor.RENDER_OUTPUT_FRAME_WIT
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.SingleInputVideoGraph;
import androidx.media3.effect.VideoCompositorSettings;
import java.util.List;
@ -60,13 +58,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
@Nullable Presentation presentation = null;
for (int i = 0; i < compositionEffects.size(); i++) {
Effect effect = compositionEffects.get(i);
if (effect instanceof Presentation) {
presentation = (Presentation) effect;
}
}
return new TransformerSingleInputVideoGraph(
context,
videoFrameProcessorFactory,
@ -76,11 +67,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically,
presentation,
compositionEffects,
initialTimestampOffsetUs);
}
}
private final List<Effect> compositionEffects;
private @MonotonicNonNull VideoFrameProcessingWrapper videoFrameProcessingWrapper;
private TransformerSingleInputVideoGraph(
@ -92,7 +84,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically,
@Nullable Presentation presentation,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
context,
@ -103,8 +95,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
listenerExecutor,
videoCompositorSettings,
renderFramesAutomatically,
presentation,
initialTimestampOffsetUs);
this.compositionEffects = compositionEffects;
}
@Override
@ -113,7 +105,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
registerInput(inputIndex);
videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper(
getProcessor(inputIndex), getPresentation(), getInitialTimestampOffsetUs());
getProcessor(inputIndex), compositionEffects, getInitialTimestampOffsetUs());
return videoFrameProcessingWrapper;
}

View File

@ -132,13 +132,10 @@ public final class TransformerUtil {
TransformationRequest transformationRequest,
Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper) {
if (composition.sequences.size() > 1
|| composition.sequences.get(sequenceIndex).editedMediaItems.size() > 1) {
return !composition.transmuxVideo;
}
EditedMediaItem firstEditedMediaItem =
composition.sequences.get(sequenceIndex).editedMediaItems.get(0);
if (encoderFactory.videoNeedsEncoding()) {
return true;
}
@ -156,9 +153,15 @@ public final class TransformerUtil {
if (inputFormat.pixelWidthHeightRatio != 1f) {
return true;
}
ImmutableList<Effect> videoEffects = firstEditedMediaItem.effects.videoEffects;
return !videoEffects.isEmpty()
&& maybeCalculateTotalRotationDegreesAppliedInEffects(videoEffects, inputFormat) == -1;
EditedMediaItem firstEditedMediaItem =
composition.sequences.get(sequenceIndex).editedMediaItems.get(0);
ImmutableList<Effect> combinedEffects =
new ImmutableList.Builder<Effect>()
.addAll(firstEditedMediaItem.effects.videoEffects)
.addAll(composition.effects.videoEffects)
.build();
return !combinedEffects.isEmpty()
&& maybeCalculateTotalRotationDegreesAppliedInEffects(combinedEffects, inputFormat) == -1;
}
/**

View File

@ -33,7 +33,6 @@ import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
@ -41,18 +40,18 @@ import java.util.concurrent.atomic.AtomicLong;
/** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */
/* package */ final class VideoFrameProcessingWrapper implements GraphInput {
private final VideoFrameProcessor videoFrameProcessor;
private final AtomicLong mediaItemOffsetUs;
private final List<Effect> postProcessingEffects;
private final long initialTimestampOffsetUs;
@Nullable final Presentation presentation;
private final AtomicLong mediaItemOffsetUs;
public VideoFrameProcessingWrapper(
VideoFrameProcessor videoFrameProcessor,
@Nullable Presentation presentation,
List<Effect> postProcessingEffects,
long initialTimestampOffsetUs) {
this.videoFrameProcessor = videoFrameProcessor;
this.mediaItemOffsetUs = new AtomicLong();
this.postProcessingEffects = postProcessingEffects;
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.presentation = presentation;
mediaItemOffsetUs = new AtomicLong();
}
@Override
@ -65,11 +64,16 @@ import java.util.concurrent.atomic.AtomicLong;
durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs);
if (decodedFormat != null) {
Size decodedSize = getDecodedSize(decodedFormat);
ImmutableList<Effect> combinedEffects =
new ImmutableList.Builder<Effect>()
.addAll(editedMediaItem.effects.videoEffects)
.addAll(postProcessingEffects)
.build();
videoFrameProcessor.registerInputStream(
isSurfaceAssetLoaderMediaItem
? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION
: getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation),
combinedEffects,
new FrameInfo.Builder(
checkNotNull(decodedFormat.colorInfo),
decodedSize.getWidth(),
@ -137,16 +141,6 @@ import java.util.concurrent.atomic.AtomicLong;
return new Size(decodedWidth, decodedHeight);
}
private static ImmutableList<Effect> createEffectListWithPresentation(
List<Effect> effects, @Nullable Presentation presentation) {
if (presentation == null) {
return ImmutableList.copyOf(effects);
}
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
effectsWithPresentationBuilder.addAll(effects).add(presentation);
return effectsWithPresentationBuilder.build();
}
private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;