Add a flag to control whether input bitmap resampling can be skipped

Add DefaultVideosFrameProcessor experimental flag that controls
whether input Bitmaps are sampled once for a repeating sequence of
output frames with the same contents, or once for each output frame.

PiperOrigin-RevId: 637921350
This commit is contained in:
dancho 2024-05-28 09:13:27 -07:00 committed by Copybara-Service
parent 02df88e5d9
commit 3c998ac408
4 changed files with 64 additions and 12 deletions

View File

@ -46,6 +46,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
// The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private final GlObjectsProvider glObjectsProvider;
private final boolean signalRepeatingSequence;
private @MonotonicNonNull RepeatingGainmapShaderProgram repeatingGainmapShaderProgram;
@Nullable private GlTextureInfo currentSdrGlTextureInfo;
@ -59,13 +60,18 @@ import org.checkerframework.checker.nullness.qual.Nullable;
* @param glObjectsProvider The {@link GlObjectsProvider} for using EGL and GLES.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that the
* methods of this class run on.
* @param signalRepeatingSequence Whether to repeat each input bitmap unchanged as a sequence of
* output frames. Defaults to {@code false}. That is, each output frame is treated as a new
* input bitmap.
*/
public BitmapTextureManager(
GlObjectsProvider glObjectsProvider,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
boolean signalRepeatingSequence) {
super(videoFrameProcessingTaskExecutor);
this.glObjectsProvider = glObjectsProvider;
pendingBitmaps = new LinkedBlockingQueue<>();
this.signalRepeatingSequence = signalRepeatingSequence;
}
/**
@ -215,7 +221,9 @@ import org.checkerframework.checker.nullness.qual.Nullable;
if (Util.SDK_INT >= 34 && bitmap.hasGainmap()) {
checkNotNull(repeatingGainmapShaderProgram).setGainmap(checkNotNull(bitmap.getGainmap()));
}
checkNotNull(repeatingGainmapShaderProgram).signalNewRepeatingFrameSequence();
if (signalRepeatingSequence) {
checkNotNull(repeatingGainmapShaderProgram).signalNewRepeatingFrameSequence();
}
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
}

View File

@ -143,6 +143,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private int textureOutputCapacity;
private boolean requireRegisteringAllInputFrames;
private boolean experimentalAdjustSurfaceTextureTransformationMatrix;
private boolean experimentalRepeatInputBitmapWithoutResampling;
/** Creates an instance. */
public Builder() {
@ -159,6 +160,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
requireRegisteringAllInputFrames = !factory.repeatLastRegisteredFrame;
experimentalAdjustSurfaceTextureTransformationMatrix =
factory.experimentalAdjustSurfaceTextureTransformationMatrix;
experimentalRepeatInputBitmapWithoutResampling =
factory.experimentalRepeatInputBitmapWithoutResampling;
}
/**
@ -276,6 +279,21 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
return this;
}
/**
* Sets whether {@link BitmapTextureManager} will sample from the input bitmap only once for a
* sequence of output frames.
*
* <p>Defaults to {@code false}. That is, each output frame will sample from the full
* resolution input bitmap.
*/
@CanIgnoreReturnValue
public Builder setExperimentalRepeatInputBitmapWithoutResampling(
boolean experimentalRepeatInputBitmapWithoutResampling) {
this.experimentalRepeatInputBitmapWithoutResampling =
experimentalRepeatInputBitmapWithoutResampling;
return this;
}
/** Builds an {@link DefaultVideoFrameProcessor.Factory} instance. */
public DefaultVideoFrameProcessor.Factory build() {
return new DefaultVideoFrameProcessor.Factory(
@ -285,7 +303,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
executorService,
textureOutputListener,
textureOutputCapacity,
experimentalAdjustSurfaceTextureTransformationMatrix);
experimentalAdjustSurfaceTextureTransformationMatrix,
experimentalRepeatInputBitmapWithoutResampling);
}
}
@ -296,6 +315,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Nullable private final GlTextureProducer.Listener textureOutputListener;
private final int textureOutputCapacity;
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
private final boolean experimentalRepeatInputBitmapWithoutResampling;
private Factory(
@WorkingColorSpace int sdrWorkingColorSpace,
@ -304,7 +324,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Nullable ExecutorService executorService,
@Nullable GlTextureProducer.Listener textureOutputListener,
int textureOutputCapacity,
boolean experimentalAdjustSurfaceTextureTransformationMatrix) {
boolean experimentalAdjustSurfaceTextureTransformationMatrix,
boolean experimentalRepeatInputBitmapWithoutResampling) {
this.sdrWorkingColorSpace = sdrWorkingColorSpace;
this.repeatLastRegisteredFrame = repeatLastRegisteredFrame;
this.glObjectsProvider = glObjectsProvider;
@ -313,6 +334,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
this.textureOutputCapacity = textureOutputCapacity;
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
this.experimentalRepeatInputBitmapWithoutResampling =
experimentalRepeatInputBitmapWithoutResampling;
}
public Builder buildUpon() {
@ -376,7 +399,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
textureOutputListener,
textureOutputCapacity,
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix));
experimentalAdjustSurfaceTextureTransformationMatrix,
experimentalRepeatInputBitmapWithoutResampling));
try {
return defaultVideoFrameProcessorFuture.get();
@ -745,7 +769,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Nullable GlTextureProducer.Listener textureOutputListener,
int textureOutputCapacity,
boolean repeatLastRegisteredFrame,
boolean experimentalAdjustSurfaceTextureTransformationMatrix)
boolean experimentalAdjustSurfaceTextureTransformationMatrix,
boolean experimentalRepeatInputBitmapWithoutResampling)
throws GlUtil.GlException, VideoFrameProcessingException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
int[] configAttributes =
@ -777,7 +802,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/* samplingShaderProgramErrorListener= */ listener::onError,
sdrWorkingColorSpace,
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix);
experimentalAdjustSurfaceTextureTransformationMatrix,
experimentalRepeatInputBitmapWithoutResampling);
FinalShaderProgramWrapper finalShaderProgramWrapper =
new FinalShaderProgramWrapper(

View File

@ -67,7 +67,8 @@ import org.checkerframework.checker.nullness.qual.Nullable;
GlShaderProgram.ErrorListener samplingShaderProgramErrorListener,
@WorkingColorSpace int sdrWorkingColorSpace,
boolean repeatLastRegisteredFrame,
boolean experimentalAdjustSurfaceTextureTransformationMatrix)
boolean experimentalAdjustSurfaceTextureTransformationMatrix,
boolean experimentalRepeatInputBitmapWithoutResampling)
throws VideoFrameProcessingException {
this.context = context;
this.outputColorInfo = outputColorInfo;
@ -91,7 +92,11 @@ import org.checkerframework.checker.nullness.qual.Nullable;
experimentalAdjustSurfaceTextureTransformationMatrix)));
inputs.put(
INPUT_TYPE_BITMAP,
new Input(new BitmapTextureManager(glObjectsProvider, videoFrameProcessingTaskExecutor)));
new Input(
new BitmapTextureManager(
glObjectsProvider,
videoFrameProcessingTaskExecutor,
/* signalRepeatingSequence= */ experimentalRepeatInputBitmapWithoutResampling)));
inputs.put(
INPUT_TYPE_TEXTURE_ID,
new Input(new TexIdTextureManager(glObjectsProvider, videoFrameProcessingTaskExecutor)));

View File

@ -23,11 +23,11 @@ import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Util;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.EditedMediaItem;
@ -103,9 +103,22 @@ public class TranscodeSpeedTest {
testId,
/* inputFormat= */ AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
outputFormat);
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setExperimentalRepeatInputBitmapWithoutResampling(true)
.build();
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
boolean isHighPerformance = Util.SDK_INT >= 31 && Build.SOC_MODEL.startsWith("Tensor");
new Transformer.Builder(context)
.setVideoMimeType(MimeTypes.VIDEO_H264)
.setVideoFrameProcessorFactory(videoFrameProcessorFactory)
.build();
boolean isHighPerformance =
Ascii.toLowerCase(Util.MODEL).contains("pixel")
&& (Ascii.toLowerCase(Util.MODEL).contains("6")
|| Ascii.toLowerCase(Util.MODEL).contains("7")
|| Ascii.toLowerCase(Util.MODEL).contains("8")
|| Ascii.toLowerCase(Util.MODEL).contains("fold")
|| Ascii.toLowerCase(Util.MODEL).contains("tablet"));
if (Util.SDK_INT == 33 && Ascii.toLowerCase(Util.MODEL).contains("pixel 6")) {
// Pixel 6 is usually quick, unless it's on API 33.
isHighPerformance = false;