Update frame & texture processors to handle SDR image input

GLEffectsFrameProcessor, MatrixShaderProgram and FinalMatrixShaderProgramWrapper are currently setup to handle the input frames coming from an external input (i.e. a video decoder). Image input is loaded into Bitmap objects at the start of the pipeline, so they are not produced externally. The changes provide a way for the frame processing pipeline to handle this "internal" (i.e. non-external) input.

PiperOrigin-RevId: 508645244
This commit is contained in:
tofunmi 2023-02-10 14:32:07 +00:00 committed by christosts
parent 706431059c
commit 574424f626
9 changed files with 554 additions and 95 deletions

View File

@ -127,20 +127,25 @@ public interface FrameProcessor {
/** /**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}. * Provides an input {@link Bitmap} to the {@link FrameProcessor}.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_IMAGE} as the {@code inputTrackType}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}. * @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds. * @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per * @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
*/ */
// TODO(b/262693274): Remove duration & frameRate parameters when EditedMediaItem can be signalled // TODO(b/262693274): Remove duration and frameRate parameters when EditedMediaItem can be
// down to the processors. // signalled down to the processors.
void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate); void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/** /**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. * Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_VIDEO} as the {@code inputTrackType}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
*/ */
Surface getInputSurface(); Surface getInputSurface();
@ -167,6 +172,9 @@ public interface FrameProcessor {
* *
* <p>Must be called before rendering a frame to the frame processor's input surface. * <p>Must be called before rendering a frame to the frame processor's input surface.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_VIDEO} as the {@code inputTrackType}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
@ -178,6 +186,9 @@ public interface FrameProcessor {
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered} * Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet. * but not processed off the {@linkplain #getInputSurface() input surface} yet.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_VIDEO} as the {@code inputTrackType}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
*/ */
int getPendingInputFrameCount(); int getPendingInputFrameCount();
@ -235,6 +246,9 @@ public interface FrameProcessor {
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this * <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns. * method are no longer considered to be registered when this method returns.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_VIDEO} as the {@code inputTrackType}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored. * <p>{@link Listener} methods invoked prior to calling this method should be ignored.
*/ */
void flush(); void flush();

View File

@ -15,6 +15,7 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.C.TRACK_TYPE_IMAGE;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.effect.OverlayShaderProgramPixelTest.OVERLAY_PNG_ASSET_PATH; import static androidx.media3.effect.OverlayShaderProgramPixelTest.OVERLAY_PNG_ASSET_PATH;
@ -54,6 +55,8 @@ import org.junit.runner.RunWith;
public final class GlEffectsFrameProcessorPixelTest { public final class GlEffectsFrameProcessorPixelTest {
public static final String ORIGINAL_PNG_ASSET_PATH = public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/image_input_with_wrapped_crop.png";
// This file is generated on a Pixel 7, because the emulator isn't able to generate this file. // This file is generated on a Pixel 7, because the emulator isn't able to generate this file.
public static final String BITMAP_OVERLAY_PNG_ASSET_PATH = public static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
@ -102,6 +105,50 @@ public final class GlEffectsFrameProcessorPixelTest {
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
@Test
public void noEffects_withImageInput_matchesGoldenFile() throws Exception {
String testId = "noEffects_withImageInput_matchesGoldenFile";
frameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputTrackType(TRACK_TYPE_IMAGE)
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
@Test
public void wrappedCrop_withImageInput_matchesGoldenFile() throws Exception {
String testId = "wrappedCrop_withImageInput_matchesGoldenFile";
frameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputTrackType(TRACK_TYPE_IMAGE)
.setEffects(
new GlEffectWrapper(
new Crop(
/* left= */ -0.5f,
/* right= */ 0.5f,
/* bottom= */ -0.5f,
/* top= */ 0.5f)))
.build();
Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(originalBitmap);
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
// TODO(b/262693274): Once texture deletion is added to InternalTextureManager.java, add a test
// queuing multiple input bitmaps to ensure successfully completion without errors.
@Test @Test
public void noEffects_withFrameCache_matchesGoldenFile() throws Exception { public void noEffects_withFrameCache_matchesGoldenFile() throws Exception {
String testId = "noEffects_withFrameCache_matchesGoldenFile"; String testId = "noEffects_withFrameCache_matchesGoldenFile";
@ -238,9 +285,10 @@ public final class GlEffectsFrameProcessorPixelTest {
frameProcessorTestRunner = frameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
new Crop(/* left= */ -.5f, /* right= */ .5f, /* bottom= */ -.5f, /* top= */ .5f), new Crop(
/* left= */ -0.5f, /* right= */ 0.5f, /* bottom= */ -0.5f, /* top= */ 0.5f),
Presentation.createForAspectRatio( Presentation.createForAspectRatio(
/* aspectRatio= */ .5f, Presentation.LAYOUT_SCALE_TO_FIT)) /* aspectRatio= */ 0.5f, Presentation.LAYOUT_SCALE_TO_FIT))
.build(); .build();
Bitmap expectedBitmap = readBitmap(CROP_THEN_ASPECT_RATIO_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(CROP_THEN_ASPECT_RATIO_PNG_ASSET_PATH);

View File

@ -0,0 +1,103 @@
#version 100
// Copyright 2023 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ES 2 fragment shader that:
// 1. Samples from an input texture created from an internal texture (e.g. a
// texture created from a bitmap), with uTexSampler copying from this texture
// to the current output.
// 2. Transforms the electrical colors to optical colors using the SMPTE 170M
// EOTF.
// 3. Applies a 4x4 RGB color matrix to change the pixel colors.
// 4. Outputs as requested by uOutputColorTransfer. Use COLOR_TRANSFER_LINEAR
// for outputting to intermediate shaders, or COLOR_TRANSFER_SDR_VIDEO to
// output electrical colors via an OETF (e.g. to an encoder).
precision mediump float;
uniform sampler2D uTexSampler;
uniform mat4 uRgbMatrix;
varying vec2 vTexSamplingCoord;
// C.java#ColorTransfer value.
// Only COLOR_TRANSFER_LINEAR and COLOR_TRANSFER_SDR_VIDEO are allowed.
uniform int uOutputColorTransfer;
const float inverseGamma = 0.4500;
const float gamma = 1.0 / inverseGamma;
// Transforms a single channel from electrical to optical SDR using the SMPTE
// 170M OETF.
float smpte170mEotfSingleChannel(float electricalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return electricalChannel < 0.0812
? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma);
}
// Transforms electrical to optical SDR using the SMPTE 170M EOTF.
vec3 smpte170mEotf(vec3 electricalColor) {
return vec3(
smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b));
}
// Transforms a single channel from optical to electrical SDR.
float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018
? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
}
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) {
return vec3(
smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.b));
}
// Applies the appropriate OETF to convert linear optical signals to nonlinear
// electrical signals. Input and output are both normalized to [0, 1].
highp vec3 applyOetf(highp vec3 linearColor) {
// LINT.IfChange(color_transfer)
const int COLOR_TRANSFER_LINEAR = 1;
const int COLOR_TRANSFER_SDR_VIDEO = 3;
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
return linearColor;
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mOetf(linearColor);
} else {
// Output red as an obviously visible error.
return vec3(1.0, 0.0, 0.0);
}
}
void main() {
vec2 vTexSamplingCoordFlipped =
vec2(vTexSamplingCoord.x, 1.0 - vTexSamplingCoord.y);
// Whereas the Android system uses the top-left corner as (0,0) of the
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
// texture gets flipped. We flip the texture vertically to ensure the
// orientation of the output is correct.
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoordFlipped);
vec3 linearInputColor = smpte170mEotf(inputColor.rgb);
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);
gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a);
}

View File

@ -71,7 +71,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final EGLDisplay eglDisplay; private final EGLDisplay eglDisplay;
private final EGLContext eglContext; private final EGLContext eglContext;
private final DebugViewProvider debugViewProvider; private final DebugViewProvider debugViewProvider;
private final boolean sampleFromExternalTexture; private final boolean sampleFromInputTexture;
private final boolean isInputExternal;
private final ColorInfo inputColorInfo; private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo; private final ColorInfo outputColorInfo;
private final boolean releaseFramesAutomatically; private final boolean releaseFramesAutomatically;
@ -106,9 +107,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
ImmutableList<GlMatrixTransformation> matrixTransformations, ImmutableList<GlMatrixTransformation> matrixTransformations,
ImmutableList<RgbMatrix> rgbMatrices, ImmutableList<RgbMatrix> rgbMatrices,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
boolean sampleFromExternalTexture,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
boolean sampleFromInputTexture,
boolean isInputExternal,
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor frameProcessorListenerExecutor, Executor frameProcessorListenerExecutor,
FrameProcessor.Listener frameProcessorListener) { FrameProcessor.Listener frameProcessorListener) {
@ -118,7 +120,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.eglDisplay = eglDisplay; this.eglDisplay = eglDisplay;
this.eglContext = eglContext; this.eglContext = eglContext;
this.debugViewProvider = debugViewProvider; this.debugViewProvider = debugViewProvider;
this.sampleFromExternalTexture = sampleFromExternalTexture; this.sampleFromInputTexture = sampleFromInputTexture;
this.isInputExternal = isInputExternal;
this.inputColorInfo = inputColorInfo; this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo; this.outputColorInfo = outputColorInfo;
this.releaseFramesAutomatically = releaseFramesAutomatically; this.releaseFramesAutomatically = releaseFramesAutomatically;
@ -402,14 +405,24 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
MatrixShaderProgram matrixShaderProgram; MatrixShaderProgram matrixShaderProgram;
ImmutableList<GlMatrixTransformation> expandedMatrixTransformations = ImmutableList<GlMatrixTransformation> expandedMatrixTransformations =
matrixTransformationListBuilder.build(); matrixTransformationListBuilder.build();
if (sampleFromExternalTexture) { if (sampleFromInputTexture) {
if (isInputExternal) {
matrixShaderProgram = matrixShaderProgram =
MatrixShaderProgram.createWithExternalSampler( MatrixShaderProgram.createWithExternalSampler(
context, context,
expandedMatrixTransformations, expandedMatrixTransformations,
rgbMatrices, rgbMatrices,
/* inputColorInfo= */ inputColorInfo, inputColorInfo,
/* outputColorInfo= */ outputColorInfo); outputColorInfo);
} else {
matrixShaderProgram =
MatrixShaderProgram.createWithInternalSampler(
context,
expandedMatrixTransformations,
rgbMatrices,
inputColorInfo,
outputColorInfo);
}
} else { } else {
matrixShaderProgram = matrixShaderProgram =
MatrixShaderProgram.createApplyingOetf( MatrixShaderProgram.createApplyingOetf(

View File

@ -16,6 +16,7 @@
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static com.google.common.collect.Iterables.getLast; import static com.google.common.collect.Iterables.getLast;
@ -108,6 +109,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkArgument(inputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR); checkArgument(inputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
checkArgument(outputColorInfo.isValid()); checkArgument(outputColorInfo.isValid());
checkArgument(outputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR); checkArgument(outputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
checkArgument(inputTrackType == C.TRACK_TYPE_VIDEO || inputTrackType == C.TRACK_TYPE_IMAGE);
if (inputColorInfo.colorSpace != outputColorInfo.colorSpace if (inputColorInfo.colorSpace != outputColorInfo.colorSpace
|| ColorInfo.isTransferHdr(inputColorInfo) != ColorInfo.isTransferHdr(outputColorInfo)) { || ColorInfo.isTransferHdr(inputColorInfo) != ColorInfo.isTransferHdr(outputColorInfo)) {
@ -134,6 +136,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
debugViewProvider, debugViewProvider,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
/* isInputExternal= */ inputTrackType == C.TRACK_TYPE_VIDEO,
releaseFramesAutomatically, releaseFramesAutomatically,
singleThreadExecutorService, singleThreadExecutorService,
listenerExecutor, listenerExecutor,
@ -167,6 +170,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
boolean isInputExternal,
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
ExecutorService singleThreadExecutorService, ExecutorService singleThreadExecutorService,
Executor executor, Executor executor,
@ -208,6 +212,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
debugViewProvider, debugViewProvider,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
isInputExternal,
releaseFramesAutomatically, releaseFramesAutomatically,
executor, executor,
listener); listener);
@ -219,6 +224,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
return new GlEffectsFrameProcessor( return new GlEffectsFrameProcessor(
eglDisplay, eglDisplay,
eglContext, eglContext,
isInputExternal,
frameProcessingTaskExecutor, frameProcessingTaskExecutor,
shaderPrograms, shaderPrograms,
releaseFramesAutomatically); releaseFramesAutomatically);
@ -243,6 +249,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
boolean isInputExternal,
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor executor, Executor executor,
Listener listener) Listener listener)
@ -251,7 +258,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder = ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
new ImmutableList.Builder<>(); new ImmutableList.Builder<>();
ImmutableList.Builder<RgbMatrix> rgbMatrixListBuilder = new ImmutableList.Builder<>(); ImmutableList.Builder<RgbMatrix> rgbMatrixListBuilder = new ImmutableList.Builder<>();
boolean sampleFromExternalTexture = true; boolean sampleFromInputTexture = true;
ColorInfo linearColorInfo = ColorInfo linearColorInfo =
new ColorInfo( new ColorInfo(
outputColorInfo.colorSpace, outputColorInfo.colorRange, C.COLOR_TRANSFER_LINEAR, null); outputColorInfo.colorSpace, outputColorInfo.colorRange, C.COLOR_TRANSFER_LINEAR, null);
@ -275,16 +282,18 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
matrixTransformationListBuilder.build(); matrixTransformationListBuilder.build();
ImmutableList<RgbMatrix> rgbMatrices = rgbMatrixListBuilder.build(); ImmutableList<RgbMatrix> rgbMatrices = rgbMatrixListBuilder.build();
boolean isOutputTransferHdr = ColorInfo.isTransferHdr(outputColorInfo); boolean isOutputTransferHdr = ColorInfo.isTransferHdr(outputColorInfo);
if (!matrixTransformations.isEmpty() || !rgbMatrices.isEmpty() || sampleFromExternalTexture) { if (!matrixTransformations.isEmpty() || !rgbMatrices.isEmpty() || sampleFromInputTexture) {
MatrixShaderProgram matrixShaderProgram; MatrixShaderProgram matrixShaderProgram;
if (sampleFromExternalTexture) { if (sampleFromInputTexture) {
if (isInputExternal) {
matrixShaderProgram = matrixShaderProgram =
MatrixShaderProgram.createWithExternalSampler( MatrixShaderProgram.createWithExternalSampler(
context, context, matrixTransformations, rgbMatrices, inputColorInfo, linearColorInfo);
matrixTransformations, } else {
rgbMatrices, matrixShaderProgram =
/* inputColorInfo= */ inputColorInfo, MatrixShaderProgram.createWithInternalSampler(
/* outputColorInfo= */ linearColorInfo); context, matrixTransformations, rgbMatrices, inputColorInfo, linearColorInfo);
}
} else { } else {
matrixShaderProgram = matrixShaderProgram =
MatrixShaderProgram.create( MatrixShaderProgram.create(
@ -293,7 +302,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
shaderProgramListBuilder.add(matrixShaderProgram); shaderProgramListBuilder.add(matrixShaderProgram);
matrixTransformationListBuilder = new ImmutableList.Builder<>(); matrixTransformationListBuilder = new ImmutableList.Builder<>();
rgbMatrixListBuilder = new ImmutableList.Builder<>(); rgbMatrixListBuilder = new ImmutableList.Builder<>();
sampleFromExternalTexture = false; sampleFromInputTexture = false;
} }
shaderProgramListBuilder.add(glEffect.toGlShaderProgram(context, isOutputTransferHdr)); shaderProgramListBuilder.add(glEffect.toGlShaderProgram(context, isOutputTransferHdr));
} }
@ -306,9 +315,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
matrixTransformationListBuilder.build(), matrixTransformationListBuilder.build(),
rgbMatrixListBuilder.build(), rgbMatrixListBuilder.build(),
debugViewProvider, debugViewProvider,
sampleFromExternalTexture, /* inputColorInfo= */ sampleFromInputTexture ? inputColorInfo : linearColorInfo,
/* inputColorInfo= */ sampleFromExternalTexture ? inputColorInfo : linearColorInfo,
outputColorInfo, outputColorInfo,
sampleFromInputTexture,
isInputExternal,
releaseFramesAutomatically, releaseFramesAutomatically,
executor, executor,
listener)); listener));
@ -343,8 +353,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private final EGLDisplay eglDisplay; private final EGLDisplay eglDisplay;
private final EGLContext eglContext; private final EGLContext eglContext;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final ExternalTextureManager inputExternalTextureManager; private @MonotonicNonNull InternalTextureManager inputInternalTextureManager;
private final Surface inputSurface; private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager;
// TODO(262693274): Move this variable to ExternalTextureManager.
private @MonotonicNonNull Surface inputExternalSurface;
private final boolean releaseFramesAutomatically; private final boolean releaseFramesAutomatically;
private final FinalMatrixShaderProgramWrapper finalShaderProgramWrapper; private final FinalMatrixShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms; private final ImmutableList<GlShaderProgram> allShaderPrograms;
@ -361,6 +373,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private GlEffectsFrameProcessor( private GlEffectsFrameProcessor(
EGLDisplay eglDisplay, EGLDisplay eglDisplay,
EGLContext eglContext, EGLContext eglContext,
boolean isInputExternal,
FrameProcessingTaskExecutor frameProcessingTaskExecutor, FrameProcessingTaskExecutor frameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms, ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically) boolean releaseFramesAutomatically)
@ -372,14 +385,23 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
this.releaseFramesAutomatically = releaseFramesAutomatically; this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!shaderPrograms.isEmpty()); checkState(!shaderPrograms.isEmpty());
checkState(shaderPrograms.get(0) instanceof ExternalShaderProgram);
checkState(getLast(shaderPrograms) instanceof FinalMatrixShaderProgramWrapper); checkState(getLast(shaderPrograms) instanceof FinalMatrixShaderProgramWrapper);
ExternalShaderProgram inputExternalShaderProgram =
(ExternalShaderProgram) shaderPrograms.get(0); GlShaderProgram inputShaderProgram = shaderPrograms.get(0);
if (isInputExternal) {
checkState(inputShaderProgram instanceof ExternalShaderProgram);
inputExternalTextureManager = inputExternalTextureManager =
new ExternalTextureManager(inputExternalShaderProgram, frameProcessingTaskExecutor); new ExternalTextureManager(
inputExternalShaderProgram.setInputListener(inputExternalTextureManager); (ExternalShaderProgram) inputShaderProgram, frameProcessingTaskExecutor);
inputSurface = new Surface(inputExternalTextureManager.getSurfaceTexture()); inputShaderProgram.setInputListener(inputExternalTextureManager);
inputExternalSurface = new Surface(inputExternalTextureManager.getSurfaceTexture());
} else {
inputInternalTextureManager =
new InternalTextureManager(inputShaderProgram, frameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputInternalTextureManager);
}
finalShaderProgramWrapper = (FinalMatrixShaderProgramWrapper) getLast(shaderPrograms); finalShaderProgramWrapper = (FinalMatrixShaderProgramWrapper) getLast(shaderPrograms);
allShaderPrograms = shaderPrograms; allShaderPrograms = shaderPrograms;
previousStreamOffsetUs = C.TIME_UNSET; previousStreamOffsetUs = C.TIME_UNSET;
@ -400,19 +422,27 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* call this method after instantiation to ensure that buffers are handled at full resolution. See * call this method after instantiation to ensure that buffers are handled at full resolution. See
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information. * {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
* *
* <p>This method should only be used for when the {@link FrameProcessor} was created with {@link
* C#TRACK_TYPE_VIDEO} as the {@code inputTrackType}.
*
* @param width The default width for input buffers, in pixels. * @param width The default width for input buffers, in pixels.
* @param height The default height for input buffers, in pixels. * @param height The default height for input buffers, in pixels.
*/ */
public void setInputDefaultBufferSize(int width, int height) { public void setInputDefaultBufferSize(int width, int height) {
inputExternalTextureManager.getSurfaceTexture().setDefaultBufferSize(width, height); checkNotNull(inputExternalTextureManager)
.getSurfaceTexture()
.setDefaultBufferSize(width, height);
} }
@Override @Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {} public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
checkNotNull(inputInternalTextureManager)
.queueInputBitmap(inputBitmap, durationUs, frameRate, /* useHdr= */ false);
}
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return inputSurface; return checkNotNull(inputExternalSurface);
} }
@Override @Override
@ -431,12 +461,12 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkStateNotNull( checkStateNotNull(
nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames"); nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames");
inputExternalTextureManager.registerInputFrame(nextInputFrameInfo); checkNotNull(inputExternalTextureManager).registerInputFrame(nextInputFrameInfo);
} }
@Override @Override
public int getPendingInputFrameCount() { public int getPendingInputFrameCount() {
return inputExternalTextureManager.getPendingFrameCount(); return checkNotNull(inputExternalTextureManager).getPendingFrameCount();
} }
@Override @Override
@ -457,15 +487,20 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
public void signalEndOfInput() { public void signalEndOfInput() {
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
inputStreamEnded = true; inputStreamEnded = true;
if (inputInternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
}
if (inputExternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput); frameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
} }
}
@Override @Override
public void flush() { public void flush() {
try { try {
frameProcessingTaskExecutor.flush(); frameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1); CountDownLatch latch = new CountDownLatch(1);
inputExternalTextureManager.setOnFlushCompleteListener(latch::countDown); checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown);
frameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush); frameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await(); latch.await();
inputExternalTextureManager.setOnFlushCompleteListener(null); inputExternalTextureManager.setOnFlushCompleteListener(null);
@ -483,8 +518,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected); throw new IllegalStateException(unexpected);
} }
if (inputExternalTextureManager != null) {
inputExternalTextureManager.release(); inputExternalTextureManager.release();
inputSurface.release(); checkNotNull(inputExternalSurface).release();
}
} }
/** /**

View File

@ -0,0 +1,159 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static java.lang.Math.round;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.annotation.WorkerThread;
import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
/** Forwards a frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for consumption. */
@UnstableApi
/* package */ class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private int downstreamShaderProgramCapacity;
private int availableFrameCount;
private long currentPresentationTimeUs;
private long totalDurationUs;
private boolean inputEnded;
public InternalTextureManager(
GlShaderProgram shaderProgram, FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
this.shaderProgram = shaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
pendingBitmaps = new LinkedBlockingQueue<>();
}
@Override
public void onReadyToAcceptInputFrame() {
frameProcessingTaskExecutor.submit(
() -> {
downstreamShaderProgramCapacity++;
maybeQueueToShaderProgram();
});
}
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
// TODO(b/262693274): Delete texture when last duplicate of the frame comes back from the shader
// program and change to only allocate one texId at a time. A change to method signature to
// include presentationTimeUs will probably be needed to do this.
frameProcessingTaskExecutor.submit(
() -> {
if (availableFrameCount == 0) {
signalEndOfInput();
}
});
}
public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) {
frameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr));
}
@WorkerThread
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws FrameProcessingException {
if (inputEnded) {
return;
}
try {
int bitmapTexId =
GlUtil.createTexture(
bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ useHdr);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapTexId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
TextureInfo textureInfo =
new TextureInfo(
bitmapTexId, /* fboId= */ C.INDEX_UNSET, bitmap.getWidth(), bitmap.getHeight());
int timeIncrementUs = round(C.MICROS_PER_SECOND / frameRate);
availableFrameCount += round((frameRate * durationUs) / C.MICROS_PER_SECOND);
totalDurationUs += durationUs;
pendingBitmaps.add(
new BitmapFrameSequenceInfo(textureInfo, timeIncrementUs, totalDurationUs));
} catch (GlUtil.GlException e) {
throw FrameProcessingException.from(e);
}
maybeQueueToShaderProgram();
}
@WorkerThread
private void maybeQueueToShaderProgram() {
if (inputEnded || availableFrameCount == 0 || downstreamShaderProgramCapacity == 0) {
return;
}
availableFrameCount--;
downstreamShaderProgramCapacity--;
BitmapFrameSequenceInfo currentFrame = checkNotNull(pendingBitmaps.peek());
shaderProgram.queueInputFrame(currentFrame.textureInfo, currentPresentationTimeUs);
currentPresentationTimeUs += currentFrame.timeIncrementUs;
if (currentPresentationTimeUs >= currentFrame.endPresentationTimeUs) {
pendingBitmaps.remove();
}
}
/**
* Signals the end of the input.
*
* @see FrameProcessor#signalEndOfInput()
*/
public void signalEndOfInput() {
frameProcessingTaskExecutor.submit(
() -> {
if (inputEnded) {
return;
}
inputEnded = true;
shaderProgram.signalEndOfCurrentInputStream();
});
}
/**
* Value class specifying information to generate all the frames associated with a specific {@link
* Bitmap}.
*/
private static final class BitmapFrameSequenceInfo {
public final TextureInfo textureInfo;
public final long timeIncrementUs;
public final long endPresentationTimeUs;
public BitmapFrameSequenceInfo(
TextureInfo textureInfo, long timeIncrementUs, long endPresentationTimeUs) {
this.textureInfo = textureInfo;
this.timeIncrementUs = timeIncrementUs;
this.endPresentationTimeUs = endPresentationTimeUs;
}
}
}

View File

@ -69,6 +69,8 @@ import java.util.List;
"shaders/fragment_shader_transformation_external_yuv_es3.glsl"; "shaders/fragment_shader_transformation_external_yuv_es3.glsl";
private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH = private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH =
"shaders/fragment_shader_transformation_sdr_external_es2.glsl"; "shaders/fragment_shader_transformation_sdr_external_es2.glsl";
private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_INTERNAL_PATH =
"shaders/fragment_shader_transformation_sdr_internal_es2.glsl";
private static final ImmutableList<float[]> NDC_SQUARE = private static final ImmutableList<float[]> NDC_SQUARE =
ImmutableList.of( ImmutableList.of(
new float[] {-1, -1, 0, 1}, new float[] {-1, -1, 0, 1},
@ -163,6 +165,48 @@ import java.util.List;
useHdr); useHdr);
} }
/**
* Creates a new instance.
*
* <p>Input will be sampled from an internal (i.e. regular) texture.
*
* <p>Applies the {@linkplain ColorInfo#colorTransfer inputColorInfo EOTF} to convert from
* electrical color input, to intermediate optical {@link GlShaderProgram} color output, before
* {@code matrixTransformations} and {@code rgbMatrices} are applied. Also applies the {@linkplain
* ColorInfo#colorTransfer outputColorInfo OETF}, if needed, to convert back to an electrical
* color output.
*
* @param context The {@link Context}.
* @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to
* apply to each frame in order. Can be empty to apply no vertex transformations.
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations.
* @param inputColorInfo The input electrical (nonlinear) {@link ColorInfo}.
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
*/
public static MatrixShaderProgram createWithInternalSampler(
Context context,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
checkState(
!ColorInfo.isTransferHdr(inputColorInfo),
"MatrixShaderProgram doesn't support HDR internal sampler input yet.");
GlProgram glProgram =
createGlProgram(
context,
VERTEX_SHADER_TRANSFORMATION_PATH,
FRAGMENT_SHADER_TRANSFORMATION_SDR_INTERNAL_PATH);
return createWithSampler(
glProgram, matrixTransformations, rgbMatrices, inputColorInfo, outputColorInfo);
}
/** /**
* Creates a new instance. * Creates a new instance.
* *
@ -204,50 +248,12 @@ import java.util.List;
isInputTransferHdr isInputTransferHdr
? FRAGMENT_SHADER_TRANSFORMATION_EXTERNAL_YUV_ES3_PATH ? FRAGMENT_SHADER_TRANSFORMATION_EXTERNAL_YUV_ES3_PATH
: FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH; : FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH;
GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); return createWithSampler(
createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath),
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer; matrixTransformations,
if (isInputTransferHdr) { rgbMatrices,
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020); inputColorInfo,
outputColorInfo);
// In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input.");
}
glProgram.setFloatsUniform(
"uYuvToRgbColorTransform",
inputColorInfo.colorRange == C.COLOR_RANGE_FULL
? BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX
: BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX);
checkArgument(ColorInfo.isTransferHdr(inputColorInfo));
glProgram.setIntUniform("uInputColorTransfer", inputColorInfo.colorTransfer);
// TODO(b/239735341): Add a setBooleanUniform method to GlProgram.
glProgram.setIntUniform(
"uApplyHdrToSdrToneMapping",
/* value= */ (outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020) ? 1 : 0);
checkArgument(
outputColorTransfer != Format.NO_VALUE && outputColorTransfer != C.COLOR_TRANSFER_SDR);
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
} else {
checkArgument(
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020,
"Converting from SDR to HDR is not supported.");
checkArgument(inputColorInfo.colorSpace == outputColorInfo.colorSpace);
checkArgument(
outputColorTransfer == C.COLOR_TRANSFER_SDR
|| outputColorTransfer == C.COLOR_TRANSFER_LINEAR);
// The SDR shader automatically applies an COLOR_TRANSFER_SDR EOTF.
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
}
return new MatrixShaderProgram(
glProgram,
ImmutableList.copyOf(matrixTransformations),
ImmutableList.copyOf(rgbMatrices),
outputColorInfo.colorTransfer,
isInputTransferHdr);
} }
/** /**
@ -305,6 +311,58 @@ import java.util.List;
outputIsHdr); outputIsHdr);
} }
private static MatrixShaderProgram createWithSampler(
GlProgram glProgram,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
if (isInputTransferHdr) {
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
// In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input.");
}
glProgram.setFloatsUniform(
"uYuvToRgbColorTransform",
inputColorInfo.colorRange == C.COLOR_RANGE_FULL
? BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX
: BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX);
checkArgument(ColorInfo.isTransferHdr(inputColorInfo));
glProgram.setIntUniform("uInputColorTransfer", inputColorInfo.colorTransfer);
// TODO(b/239735341): Add a setBooleanUniform method to GlProgram.
glProgram.setIntUniform(
"uApplyHdrToSdrToneMapping",
/* value= */ (outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020) ? 1 : 0);
checkArgument(
outputColorTransfer != Format.NO_VALUE && outputColorTransfer != C.COLOR_TRANSFER_SDR);
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
} else {
checkArgument(
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020,
"Converting from SDR to HDR is not supported.");
checkArgument(inputColorInfo.colorSpace == outputColorInfo.colorSpace);
checkArgument(
outputColorTransfer == C.COLOR_TRANSFER_SDR
|| outputColorTransfer == C.COLOR_TRANSFER_LINEAR);
// The SDR shader automatically applies an COLOR_TRANSFER_SDR EOTF.
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
}
return new MatrixShaderProgram(
glProgram,
ImmutableList.copyOf(matrixTransformations),
ImmutableList.copyOf(rgbMatrices),
outputColorInfo.colorTransfer,
isInputTransferHdr);
}
/** /**
* Creates a new instance. * Creates a new instance.
* *

View File

@ -62,10 +62,12 @@ public final class FrameProcessorTestRunner {
private float pixelWidthHeightRatio; private float pixelWidthHeightRatio;
private @MonotonicNonNull ColorInfo inputColorInfo; private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull ColorInfo outputColorInfo; private @MonotonicNonNull ColorInfo outputColorInfo;
private @C.TrackType int inputTrackType;
/** Creates a new instance with default values. */ /** Creates a new instance with default values. */
public Builder() { public Builder() {
pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO; pixelWidthHeightRatio = DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO;
inputTrackType = C.TRACK_TYPE_VIDEO;
} }
/** /**
@ -168,6 +170,16 @@ public final class FrameProcessorTestRunner {
this.outputColorInfo = outputColorInfo; this.outputColorInfo = outputColorInfo;
return this; return this;
} }
/**
* Sets the input track type. See {@link FrameProcessor.Factory#create}.
*
* <p>The default value is {@link C#TRACK_TYPE_VIDEO}.
*/
@CanIgnoreReturnValue
public Builder setInputTrackType(@C.TrackType int inputTrackType) {
this.inputTrackType = inputTrackType;
return this;
}
public FrameProcessorTestRunner build() throws FrameProcessingException { public FrameProcessorTestRunner build() throws FrameProcessingException {
checkStateNotNull(testId, "testId must be set."); checkStateNotNull(testId, "testId must be set.");
@ -182,7 +194,8 @@ public final class FrameProcessorTestRunner {
effects == null ? ImmutableList.of() : effects, effects == null ? ImmutableList.of() : effects,
pixelWidthHeightRatio, pixelWidthHeightRatio,
inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo, inputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : inputColorInfo,
outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo); outputColorInfo == null ? ColorInfo.SDR_BT709_LIMITED : outputColorInfo,
inputTrackType);
} }
} }
@ -211,7 +224,8 @@ public final class FrameProcessorTestRunner {
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
float pixelWidthHeightRatio, float pixelWidthHeightRatio,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo,
@C.TrackType int inputTrackType)
throws FrameProcessingException { throws FrameProcessingException {
this.testId = testId; this.testId = testId;
this.videoAssetPath = videoAssetPath; this.videoAssetPath = videoAssetPath;
@ -226,7 +240,7 @@ public final class FrameProcessorTestRunner {
DebugViewProvider.NONE, DebugViewProvider.NONE,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
C.TRACK_TYPE_VIDEO, inputTrackType,
/* releaseFramesAutomatically= */ true, /* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new FrameProcessor.Listener() { new FrameProcessor.Listener() {
@ -278,6 +292,19 @@ public final class FrameProcessorTestRunner {
} }
}, },
frameProcessor.getInputSurface()); frameProcessor.getInputSurface());
return endFrameProcessingAndGetImage();
}
public Bitmap processImageFrameAndEnd(Bitmap inputBitmap) throws Exception {
frameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
frameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
return endFrameProcessingAndGetImage();
}
private Bitmap endFrameProcessingAndGetImage() throws Exception {
frameProcessor.signalEndOfInput(); frameProcessor.signalEndOfInput();
Thread.sleep(FRAME_PROCESSING_WAIT_MS); Thread.sleep(FRAME_PROCESSING_WAIT_MS);