Apply all video Composition effects to single sequence exports

PiperOrigin-RevId: 675933601
This commit is contained in:
kimvde 2024-09-18 04:16:01 -07:00 committed by Copybara-Service
parent fd3d8e1782
commit 2951a2599c
10 changed files with 89 additions and 77 deletions

View File

@ -17,7 +17,6 @@
package androidx.media3.effect; package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
@ -65,13 +64,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
Executor listenerExecutor, Executor listenerExecutor,
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs) {
@Nullable Presentation presentation = null;
for (int i = 0; i < compositionEffects.size(); i++) {
Effect effect = compositionEffects.get(i);
if (effect instanceof Presentation) {
presentation = (Presentation) effect;
}
}
return new PreviewingSingleInputVideoGraph( return new PreviewingSingleInputVideoGraph(
context, context,
videoFrameProcessorFactory, videoFrameProcessorFactory,
@ -79,7 +71,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
debugViewProvider, debugViewProvider,
listener, listener,
listenerExecutor, listenerExecutor,
presentation,
initialTimestampOffsetUs); initialTimestampOffsetUs);
} }
} }
@ -91,7 +82,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
Listener listener, Listener listener,
Executor listenerExecutor, Executor listenerExecutor,
@Nullable Presentation presentation,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs) {
super( super(
context, context,
@ -103,7 +93,6 @@ public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
VideoCompositorSettings.DEFAULT, VideoCompositorSettings.DEFAULT,
// Previewing needs frame render timing. // Previewing needs frame render timing.
/* renderFramesAutomatically= */ false, /* renderFramesAutomatically= */ false,
presentation,
initialTimestampOffsetUs); initialTimestampOffsetUs);
} }

View File

@ -48,7 +48,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
private final Executor listenerExecutor; private final Executor listenerExecutor;
private final boolean renderFramesAutomatically; private final boolean renderFramesAutomatically;
private final long initialTimestampOffsetUs; private final long initialTimestampOffsetUs;
@Nullable private final Presentation presentation;
@Nullable private VideoFrameProcessor videoFrameProcessor; @Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private SurfaceInfo outputSurfaceInfo; @Nullable private SurfaceInfo outputSurfaceInfo;
@ -71,7 +70,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
@Nullable Presentation presentation,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs) {
checkState( checkState(
VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings), VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings),
@ -84,7 +82,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
this.debugViewProvider = debugViewProvider; this.debugViewProvider = debugViewProvider;
this.listenerExecutor = listenerExecutor; this.listenerExecutor = listenerExecutor;
this.renderFramesAutomatically = renderFramesAutomatically; this.renderFramesAutomatically = renderFramesAutomatically;
this.presentation = presentation;
this.initialTimestampOffsetUs = initialTimestampOffsetUs; this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.inputIndex = C.INDEX_UNSET; this.inputIndex = C.INDEX_UNSET;
} }
@ -203,9 +200,4 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
protected long getInitialTimestampOffsetUs() { protected long getInitialTimestampOffsetUs() {
return initialTimestampOffsetUs; return initialTimestampOffsetUs;
} }
@Nullable
protected Presentation getPresentation() {
return presentation;
}
} }

View File

@ -597,6 +597,64 @@ public class TransformerEndToEndTest {
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
} }
@Test
public void videoEditing_withSingleSequenceAndCompositionEffect_appliesEffect() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
InputTimestampRecordingShaderProgram timestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
ImmutableList<Effect> videoEffects =
ImmutableList.of((GlEffect) (context, useHdr) -> timestampRecordingShaderProgram);
Composition composition =
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem))
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
new TransformerAndroidTestRunner.Builder(context, transformer).build().run(testId, composition);
assertThat(timestampRecordingShaderProgram.getInputTimestampsUs()).isNotEmpty();
}
@Test
public void videoEditing_withMultiSequenceAndCompositionEffect_appliesEffect() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET.uri));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
InputTimestampRecordingShaderProgram timestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
ImmutableList<Effect> videoEffects =
ImmutableList.of((GlEffect) (context, useHdr) -> timestampRecordingShaderProgram);
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence(editedMediaItem),
new EditedMediaItemSequence(editedMediaItem))
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
new TransformerAndroidTestRunner.Builder(context, transformer).build().run(testId, composition);
assertThat(timestampRecordingShaderProgram.getInputTimestampsUs()).isNotEmpty();
}
@Test @Test
public void videoOnly_completesWithConsistentDuration() throws Exception { public void videoOnly_completesWithConsistentDuration() throws Exception {
assumeFormatsSupported( assumeFormatsSupported(

View File

@ -31,7 +31,6 @@ import android.graphics.Bitmap;
import android.graphics.PixelFormat; import android.graphics.PixelFormat;
import android.media.Image; import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.view.SurfaceView;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.util.ConditionVariable; import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -41,8 +40,6 @@ import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem; import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence; import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects; import androidx.media3.transformer.Effects;
import androidx.media3.transformer.SurfaceTestActivity;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
@ -67,27 +64,20 @@ public class CompositionPlayerPixelTest {
@Rule public final TestName testName = new TestName(); @Rule public final TestName testName = new TestName();
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Context context = getInstrumentation().getContext().getApplicationContext(); private final Context context = getInstrumentation().getContext().getApplicationContext();
private @MonotonicNonNull CompositionPlayer player; private @MonotonicNonNull CompositionPlayer player;
private @MonotonicNonNull ImageReader outputImageReader; private @MonotonicNonNull ImageReader outputImageReader;
private String testId; private String testId;
private SurfaceView surfaceView;
@Before @Before
public void setUp() { public void setUp() {
rule.getScenario().onActivity(activity -> surfaceView = activity.getSurfaceView());
testId = testName.getMethodName(); testId = testName.getMethodName();
} }
@After @After
public void tearDown() { public void tearDown() {
rule.getScenario().close();
getInstrumentation() getInstrumentation()
.runOnMainSync( .runOnMainSync(
() -> { () -> {

View File

@ -111,8 +111,6 @@ public final class Composition {
* *
* <p>The default value is {@link Effects#EMPTY}. * <p>The default value is {@link Effects#EMPTY}.
* *
* <p>This only works with the {@code Presentation} effect.
*
* @param effects The {@link Composition} {@link Effects}. * @param effects The {@link Composition} {@link Effects}.
* @return This builder. * @return This builder.
*/ */

View File

@ -58,7 +58,6 @@ import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.DebugTraceUtil; import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.DefaultVideoFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory; import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import androidx.media3.muxer.Muxer; import androidx.media3.muxer.Muxer;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -1018,14 +1017,8 @@ public final class Transformer {
* EditedMediaItemSequence}, while the audio format will be determined by the {@code * EditedMediaItemSequence}, while the audio format will be determined by the {@code
* AudioMediaItem} in the second {@code EditedMediaItemSequence}. * AudioMediaItem} in the second {@code EditedMediaItemSequence}.
* *
* <p>This method is under development. A {@link Composition} must meet the following conditions: * <p>Some {@linkplain Composition compositions} are not supported yet. More specifically,
* * {@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the
* <ul>
* <li>The video composition {@link Presentation} effect is applied after input streams are
* composited. Other composition effects are ignored.
* </ul>
*
* <p>{@linkplain EditedMediaItemSequence Sequences} within the {@link Composition} must meet the
* following conditions: * following conditions:
* *
* <ul> * <ul>

View File

@ -27,6 +27,7 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph; import androidx.media3.common.VideoGraph;
import androidx.media3.effect.MultipleInputVideoGraph; import androidx.media3.effect.MultipleInputVideoGraph;
import androidx.media3.effect.VideoCompositorSettings; import androidx.media3.effect.VideoCompositorSettings;
import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
@ -99,7 +100,9 @@ import java.util.concurrent.Executor;
public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException { public GraphInput createInput(int inputIndex) throws VideoFrameProcessingException {
registerInput(inputIndex); registerInput(inputIndex);
return new VideoFrameProcessingWrapper( return new VideoFrameProcessingWrapper(
getProcessor(inputIndex), /* presentation= */ null, getInitialTimestampOffsetUs()); getProcessor(inputIndex),
/* postProcessingEffects= */ ImmutableList.of(),
getInitialTimestampOffsetUs());
} }
@Override @Override

View File

@ -20,13 +20,11 @@ import static androidx.media3.common.VideoFrameProcessor.RENDER_OUTPUT_FRAME_WIT
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.SingleInputVideoGraph; import androidx.media3.effect.SingleInputVideoGraph;
import androidx.media3.effect.VideoCompositorSettings; import androidx.media3.effect.VideoCompositorSettings;
import java.util.List; import java.util.List;
@ -60,13 +58,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
List<Effect> compositionEffects, List<Effect> compositionEffects,
long initialTimestampOffsetUs, long initialTimestampOffsetUs,
boolean renderFramesAutomatically) { boolean renderFramesAutomatically) {
@Nullable Presentation presentation = null;
for (int i = 0; i < compositionEffects.size(); i++) {
Effect effect = compositionEffects.get(i);
if (effect instanceof Presentation) {
presentation = (Presentation) effect;
}
}
return new TransformerSingleInputVideoGraph( return new TransformerSingleInputVideoGraph(
context, context,
videoFrameProcessorFactory, videoFrameProcessorFactory,
@ -76,11 +67,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
listenerExecutor, listenerExecutor,
videoCompositorSettings, videoCompositorSettings,
renderFramesAutomatically, renderFramesAutomatically,
presentation, compositionEffects,
initialTimestampOffsetUs); initialTimestampOffsetUs);
} }
} }
private final List<Effect> compositionEffects;
private @MonotonicNonNull VideoFrameProcessingWrapper videoFrameProcessingWrapper; private @MonotonicNonNull VideoFrameProcessingWrapper videoFrameProcessingWrapper;
private TransformerSingleInputVideoGraph( private TransformerSingleInputVideoGraph(
@ -92,7 +84,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Executor listenerExecutor, Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings, VideoCompositorSettings videoCompositorSettings,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
@Nullable Presentation presentation, List<Effect> compositionEffects,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs) {
super( super(
context, context,
@ -103,8 +95,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
listenerExecutor, listenerExecutor,
videoCompositorSettings, videoCompositorSettings,
renderFramesAutomatically, renderFramesAutomatically,
presentation,
initialTimestampOffsetUs); initialTimestampOffsetUs);
this.compositionEffects = compositionEffects;
} }
@Override @Override
@ -113,7 +105,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
registerInput(inputIndex); registerInput(inputIndex);
videoFrameProcessingWrapper = videoFrameProcessingWrapper =
new VideoFrameProcessingWrapper( new VideoFrameProcessingWrapper(
getProcessor(inputIndex), getPresentation(), getInitialTimestampOffsetUs()); getProcessor(inputIndex), compositionEffects, getInitialTimestampOffsetUs());
return videoFrameProcessingWrapper; return videoFrameProcessingWrapper;
} }

View File

@ -132,13 +132,10 @@ public final class TransformerUtil {
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper) { MuxerWrapper muxerWrapper) {
if (composition.sequences.size() > 1 if (composition.sequences.size() > 1
|| composition.sequences.get(sequenceIndex).editedMediaItems.size() > 1) { || composition.sequences.get(sequenceIndex).editedMediaItems.size() > 1) {
return !composition.transmuxVideo; return !composition.transmuxVideo;
} }
EditedMediaItem firstEditedMediaItem =
composition.sequences.get(sequenceIndex).editedMediaItems.get(0);
if (encoderFactory.videoNeedsEncoding()) { if (encoderFactory.videoNeedsEncoding()) {
return true; return true;
} }
@ -156,9 +153,15 @@ public final class TransformerUtil {
if (inputFormat.pixelWidthHeightRatio != 1f) { if (inputFormat.pixelWidthHeightRatio != 1f) {
return true; return true;
} }
ImmutableList<Effect> videoEffects = firstEditedMediaItem.effects.videoEffects; EditedMediaItem firstEditedMediaItem =
return !videoEffects.isEmpty() composition.sequences.get(sequenceIndex).editedMediaItems.get(0);
&& maybeCalculateTotalRotationDegreesAppliedInEffects(videoEffects, inputFormat) == -1; ImmutableList<Effect> combinedEffects =
new ImmutableList.Builder<Effect>()
.addAll(firstEditedMediaItem.effects.videoEffects)
.addAll(composition.effects.videoEffects)
.build();
return !combinedEffects.isEmpty()
&& maybeCalculateTotalRotationDegreesAppliedInEffects(combinedEffects, inputFormat) == -1;
} }
/** /**

View File

@ -33,7 +33,6 @@ import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimestampIterator; import androidx.media3.common.util.TimestampIterator;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -41,18 +40,18 @@ import java.util.concurrent.atomic.AtomicLong;
/** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */ /** A wrapper for {@link VideoFrameProcessor} that handles {@link GraphInput} events. */
/* package */ final class VideoFrameProcessingWrapper implements GraphInput { /* package */ final class VideoFrameProcessingWrapper implements GraphInput {
private final VideoFrameProcessor videoFrameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final AtomicLong mediaItemOffsetUs; private final List<Effect> postProcessingEffects;
private final long initialTimestampOffsetUs; private final long initialTimestampOffsetUs;
@Nullable final Presentation presentation; private final AtomicLong mediaItemOffsetUs;
public VideoFrameProcessingWrapper( public VideoFrameProcessingWrapper(
VideoFrameProcessor videoFrameProcessor, VideoFrameProcessor videoFrameProcessor,
@Nullable Presentation presentation, List<Effect> postProcessingEffects,
long initialTimestampOffsetUs) { long initialTimestampOffsetUs) {
this.videoFrameProcessor = videoFrameProcessor; this.videoFrameProcessor = videoFrameProcessor;
this.mediaItemOffsetUs = new AtomicLong(); this.postProcessingEffects = postProcessingEffects;
this.initialTimestampOffsetUs = initialTimestampOffsetUs; this.initialTimestampOffsetUs = initialTimestampOffsetUs;
this.presentation = presentation; mediaItemOffsetUs = new AtomicLong();
} }
@Override @Override
@ -65,11 +64,16 @@ import java.util.concurrent.atomic.AtomicLong;
durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs); durationUs = editedMediaItem.getDurationAfterEffectsApplied(durationUs);
if (decodedFormat != null) { if (decodedFormat != null) {
Size decodedSize = getDecodedSize(decodedFormat); Size decodedSize = getDecodedSize(decodedFormat);
ImmutableList<Effect> combinedEffects =
new ImmutableList.Builder<Effect>()
.addAll(editedMediaItem.effects.videoEffects)
.addAll(postProcessingEffects)
.build();
videoFrameProcessor.registerInputStream( videoFrameProcessor.registerInputStream(
isSurfaceAssetLoaderMediaItem isSurfaceAssetLoaderMediaItem
? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION ? VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION
: getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)), : getInputTypeForMimeType(checkNotNull(decodedFormat.sampleMimeType)),
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation), combinedEffects,
new FrameInfo.Builder( new FrameInfo.Builder(
checkNotNull(decodedFormat.colorInfo), checkNotNull(decodedFormat.colorInfo),
decodedSize.getWidth(), decodedSize.getWidth(),
@ -137,16 +141,6 @@ import java.util.concurrent.atomic.AtomicLong;
return new Size(decodedWidth, decodedHeight); return new Size(decodedWidth, decodedHeight);
} }
private static ImmutableList<Effect> createEffectListWithPresentation(
List<Effect> effects, @Nullable Presentation presentation) {
if (presentation == null) {
return ImmutableList.copyOf(effects);
}
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
effectsWithPresentationBuilder.addAll(effects).add(presentation);
return effectsWithPresentationBuilder.build();
}
private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(String sampleMimeType) { private static @VideoFrameProcessor.InputType int getInputTypeForMimeType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) { if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP; return INPUT_TYPE_BITMAP;