Effect: Implement TimestampWrapper.

To allow applying an effect only on a range of timestamps.

PiperOrigin-RevId: 515615662
This commit is contained in:
huangdarwin 2023-03-10 14:02:09 +00:00 committed by tonihei
parent c7350f368f
commit 6fd6781b8d
7 changed files with 233 additions and 2 deletions

View File

@ -167,7 +167,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void signalEndOfCurrentInputStream() {
frameProcessingStarted = true;
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
if (streamOffsetUsQueue.isEmpty()) {
// No input stream to end.
return;
}
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);

View File

@ -102,7 +102,7 @@ public interface GlShaderProgram {
/**
* Called when the {@link GlShaderProgram} will not produce further output frames belonging to
* the current output stream.
* the current output stream. May be called multiple times for one output stream.
*/
default void onCurrentOutputStreamEnded() {}
}

View File

@ -0,0 +1,67 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import androidx.annotation.IntRange;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/**
* Applies a {@link GlEffect} from {@code startTimeUs} to {@code endTimeUs}, and no change on all
* other timestamps.
*/
public class TimestampWrapper implements GlEffect {
public final GlEffect glEffect;
public final long startTimeUs;
public final long endTimeUs;
/**
* Creates a new instance.
*
* @param glEffect The {@link GlEffect} to apply, from {@code startTimeUs} to {@code endTimeUs}.
* This instance must not change the output dimensions.
* @param startTimeUs The time to begin applying {@code glEffect} on. Must be non-negative.
* @param endTimeUs The time to stop applying {code glEffect} on. Must be non-negative.
*/
public TimestampWrapper(
GlEffect glEffect, @IntRange(from = 0) long startTimeUs, @IntRange(from = 0) long endTimeUs) {
// TODO(b/272063508): Allow TimestampWrapper to take in a glEffect that changes the output
// dimensions, likely by moving the configure() method from SingleFrameGlShaderProgram to
// GlShaderProgram, so that we can detect the output dimensions of the
// glEffect.toGlShaderProgram.
checkArgument(
startTimeUs >= 0 && endTimeUs >= 0, "startTimeUs and endTimeUs must be non-negative.");
checkArgument(endTimeUs > startTimeUs, "endTimeUs should be after startTimeUs.");
this.glEffect = glEffect;
this.startTimeUs = startTimeUs;
this.endTimeUs = endTimeUs;
}
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws VideoFrameProcessingException {
return new TimestampWrapperShaderProgram(context, useHdr, /* timestampWrapper= */ this);
}
@Override
public boolean isNoOp(int inputWidth, int inputHeight) {
return glEffect.isNoOp(inputWidth, inputHeight);
}
}

View File

@ -0,0 +1,118 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import android.content.Context;
import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.util.concurrent.Executor;
/** Applies a {@link TimestampWrapper} to apply a wrapped {@link GlEffect} on certain timestamps. */
/* package */ final class TimestampWrapperShaderProgram implements GlShaderProgram {
private final GlShaderProgram copyGlShaderProgram;
private int pendingCopyGlShaderProgramFrames;
private final GlShaderProgram wrappedGlShaderProgram;
private int pendingWrappedGlShaderProgramFrames;
private final long startTimeUs;
private final long endTimeUs;
/**
* Creates a {@code TimestampWrapperShaderProgram} instance.
*
* @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @param timestampWrapper The {@link TimestampWrapper} to apply to each frame.
*/
public TimestampWrapperShaderProgram(
Context context, boolean useHdr, TimestampWrapper timestampWrapper)
throws VideoFrameProcessingException {
copyGlShaderProgram = new FrameCache(/* capacity= */ 1).toGlShaderProgram(context, useHdr);
wrappedGlShaderProgram = timestampWrapper.glEffect.toGlShaderProgram(context, useHdr);
startTimeUs = timestampWrapper.startTimeUs;
endTimeUs = timestampWrapper.endTimeUs;
}
@Override
public void setInputListener(InputListener inputListener) {
copyGlShaderProgram.setInputListener(inputListener);
wrappedGlShaderProgram.setInputListener(inputListener);
}
@Override
public void setOutputListener(OutputListener outputListener) {
copyGlShaderProgram.setOutputListener(outputListener);
wrappedGlShaderProgram.setOutputListener(outputListener);
}
@Override
public void setErrorListener(Executor errorListenerExecutor, ErrorListener errorListener) {
copyGlShaderProgram.setErrorListener(errorListenerExecutor, errorListener);
wrappedGlShaderProgram.setErrorListener(errorListenerExecutor, errorListener);
}
@Override
public void setGlObjectsProvider(GlObjectsProvider glObjectsProvider) {
copyGlShaderProgram.setGlObjectsProvider(glObjectsProvider);
wrappedGlShaderProgram.setGlObjectsProvider(glObjectsProvider);
}
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) {
pendingWrappedGlShaderProgramFrames++;
wrappedGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs);
} else {
pendingCopyGlShaderProgramFrames++;
copyGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs);
}
}
@Override
public void releaseOutputFrame(GlTextureInfo outputTexture) {
if (pendingCopyGlShaderProgramFrames > 0) {
copyGlShaderProgram.releaseOutputFrame(outputTexture);
pendingCopyGlShaderProgramFrames--;
} else if (pendingWrappedGlShaderProgramFrames > 0) {
wrappedGlShaderProgram.releaseOutputFrame(outputTexture);
pendingWrappedGlShaderProgramFrames--;
} else {
throw new IllegalArgumentException("Output texture not contained in either shader.");
}
}
@Override
public void signalEndOfCurrentInputStream() {
copyGlShaderProgram.signalEndOfCurrentInputStream();
wrappedGlShaderProgram.signalEndOfCurrentInputStream();
}
@Override
public void flush() {
copyGlShaderProgram.flush();
wrappedGlShaderProgram.flush();
}
@Override
public void release() throws VideoFrameProcessingException {
copyGlShaderProgram.release();
wrappedGlShaderProgram.release();
}
}

View File

@ -30,8 +30,11 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.effect.Contrast;
import com.google.android.exoplayer2.effect.FrameCache;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.effect.RgbFilter;
import com.google.android.exoplayer2.effect.TimestampWrapper;
import com.google.android.exoplayer2.util.Effect;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
@ -117,6 +120,41 @@ public class TransformerEndToEndTest {
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
}
@Test
public void videoEditing_effectsOverTime_completesWithConsistentFrameCount() throws Exception {
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_URI_STRING));
ImmutableList<Effect> videoEffects =
ImmutableList.of(
new TimestampWrapper(
new Contrast(.5f),
/* startTimeUs= */ 0,
/* endTimeUs= */ Math.round(.1f * C.MICROS_PER_SECOND)),
new TimestampWrapper(
new FrameCache(/* capacity= */ 5),
/* startTimeUs= */ Math.round(.2f * C.MICROS_PER_SECOND),
/* endTimeUs= */ Math.round(.3f * C.MICROS_PER_SECOND)));
Effects effects = new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects);
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(mediaItem).setEffects(effects).build();
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
int expectedFrameCount = 30;
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
/* testId= */ "videoEditing_effectsOverTime_completesWithConsistentFrameCount",
editedMediaItem);
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
}
@Test
public void videoOnly_completesWithConsistentDuration() throws Exception {
Transformer transformer =

View File

@ -527,6 +527,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
context,
addedTrackInfo.firstAssetLoaderInputFormat,
addedTrackInfo.streamStartPositionUs,
addedTrackInfo.streamOffsetUs,
transformationRequest,
firstEditedMediaItem.effects.videoEffects,
compositionPresentation,

View File

@ -61,6 +61,7 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final long streamOffsetUs;
private final AtomicLong mediaItemOffsetUs;
private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo videoFrameProcessorInputColor;
@ -77,6 +78,7 @@ import org.checkerframework.dataflow.qual.Pure;
Context context,
Format firstInputFormat,
long streamStartPositionUs,
long streamOffsetUs,
TransformationRequest transformationRequest,
ImmutableList<Effect> effects,
@Nullable Presentation presentation,
@ -90,6 +92,7 @@ import org.checkerframework.dataflow.qual.Pure;
throws ExportException {
// TODO(b/262693177) Add tests for input format change.
super(firstInputFormat, streamStartPositionUs, muxerWrapper);
this.streamOffsetUs = streamOffsetUs;
mediaItemOffsetUs = new AtomicLong();
finalFramePresentationTimeUs = C.TIME_UNSET;
@ -196,6 +199,7 @@ import org.checkerframework.dataflow.qual.Pure;
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.setStreamOffsetUs(streamOffsetUs)
.build());
}
mediaItemOffsetUs.addAndGet(durationUs);