Work around SurfaceTexture implicit scale

If MediaCodec allocates passes an image buffer with a cropped region,
SurfaceTexture.getTransformMatrix will cut off 2 pixels from each dimensions.
The resulting videos will appear a little stretched.

This patch inspects the SurfaceTexture transform matrix, and guesses what the
unscaled transform matrix should be.
Behind experimentalAdjustSurfaceTextureTransformationMatrix flag

PiperOrigin-RevId: 635721267
This commit is contained in:
dancho 2024-05-21 01:43:54 -07:00 committed by Copybara-Service
parent c409623ca0
commit b047e81e02
21 changed files with 724 additions and 32 deletions

View File

@ -46,6 +46,8 @@ public final class GlProgram {
private final Map<String, Attribute> attributeByName;
private final Map<String, Uniform> uniformByName;
private boolean externalTexturesRequireNearestSampling;
/**
* Compiles a GL shader program from vertex and fragment shader GLSL GLES20 code.
*
@ -219,10 +221,20 @@ public final class GlProgram {
attribute.bind();
}
for (Uniform uniform : uniforms) {
uniform.bind();
uniform.bind(externalTexturesRequireNearestSampling);
}
}
/**
* Sets whether to sample external textures with GL_NEAREST.
*
* <p>The default value is {@code false}.
*/
public void setExternalTexturesRequireNearestSampling(
boolean externalTexturesRequireNearestSampling) {
this.externalTexturesRequireNearestSampling = externalTexturesRequireNearestSampling;
}
/** Returns the length of the null-terminated C string in {@code cString}. */
private static int getCStringLength(byte[] cString) {
for (int i = 0; i < cString.length; ++i) {
@ -363,7 +375,8 @@ public final class GlProgram {
}
/**
* Configures {@link #bind()} to use the specified {@code texId} for this sampler uniform.
* Configures {@link #bind(boolean)} to use the specified {@code texId} for this sampler
* uniform.
*
* @param texId The GL texture identifier from which to sample.
* @param texUnitIndex The GL texture unit index.
@ -373,22 +386,22 @@ public final class GlProgram {
this.texUnitIndex = texUnitIndex;
}
/** Configures {@link #bind()} to use the specified {@code int} {@code value}. */
/** Configures {@link #bind(boolean)} to use the specified {@code int} {@code value}. */
public void setInt(int value) {
this.intValue[0] = value;
}
/** Configures {@link #bind()} to use the specified {@code int[]} {@code value}. */
/** Configures {@link #bind(boolean)} to use the specified {@code int[]} {@code value}. */
public void setInts(int[] value) {
System.arraycopy(value, /* srcPos= */ 0, this.intValue, /* destPos= */ 0, value.length);
}
/** Configures {@link #bind()} to use the specified {@code float} {@code value}. */
/** Configures {@link #bind(boolean)} to use the specified {@code float} {@code value}. */
public void setFloat(float value) {
this.floatValue[0] = value;
}
/** Configures {@link #bind()} to use the specified {@code float[]} {@code value}. */
/** Configures {@link #bind(boolean)} to use the specified {@code float[]} {@code value}. */
public void setFloats(float[] value) {
System.arraycopy(value, /* srcPos= */ 0, this.floatValue, /* destPos= */ 0, value.length);
}
@ -398,8 +411,12 @@ public final class GlProgram {
* #setFloat(float)} or {@link #setFloats(float[])}.
*
* <p>Should be called before each drawing call.
*
* @param externalTexturesRequireNearestSampling Whether the external texture requires
* GL_NEAREST sampling to avoid sampling from undefined region, which could happen when
* using GL_LINEAR.
*/
public void bind() throws GlUtil.GlException {
public void bind(boolean externalTexturesRequireNearestSampling) throws GlUtil.GlException {
switch (type) {
case GLES20.GL_INT:
GLES20.glUniform1iv(location, /* count= */ 1, intValue, /* offset= */ 0);
@ -455,7 +472,10 @@ public final class GlProgram {
type == GLES20.GL_SAMPLER_2D
? GLES20.GL_TEXTURE_2D
: GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
texIdValue);
texIdValue,
type == GLES20.GL_SAMPLER_2D && !externalTexturesRequireNearestSampling
? GLES20.GL_LINEAR
: GLES20.GL_NEAREST);
GLES20.glUniform1i(location, texUnitIndex);
GlUtil.checkGlError();
break;

View File

@ -632,7 +632,7 @@ public final class GlUtil {
*/
public static int createExternalTexture() throws GlException {
int texId = generateTexture();
bindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId);
bindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId, GLES20.GL_LINEAR);
return texId;
}
@ -687,7 +687,7 @@ public final class GlUtil {
throws GlException {
assertValidTextureSize(width, height);
int texId = generateTexture();
bindTexture(GLES20.GL_TEXTURE_2D, texId);
bindTexture(GLES20.GL_TEXTURE_2D, texId, GLES20.GL_LINEAR);
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D,
/* level= */ 0,
@ -713,26 +713,29 @@ public final class GlUtil {
/** Sets the {@code texId} to contain the {@link Bitmap bitmap} data and size. */
public static void setTexture(int texId, Bitmap bitmap) throws GlException {
assertValidTextureSize(bitmap.getWidth(), bitmap.getHeight());
bindTexture(GLES20.GL_TEXTURE_2D, texId);
bindTexture(GLES20.GL_TEXTURE_2D, texId, GLES20.GL_LINEAR);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
checkGlError();
}
/**
* Binds the texture of the given type with default configuration of GL_LINEAR filtering and
* Binds the texture of the given type with the specified MIN and MAG sampling filter and
* GL_CLAMP_TO_EDGE wrapping.
*
* @param textureTarget The target to which the texture is bound, e.g. {@link
* GLES20#GL_TEXTURE_2D} for a two-dimensional texture or {@link
* GLES11Ext#GL_TEXTURE_EXTERNAL_OES} for an external texture.
* @param texId The texture identifier.
* @param sampleFilter The texture sample filter for both {@link GLES20#GL_TEXTURE_MAG_FILTER} and
* {@link GLES20#GL_TEXTURE_MIN_FILTER}.
*/
public static void bindTexture(int textureTarget, int texId) throws GlException {
public static void bindTexture(int textureTarget, int texId, int sampleFilter)
throws GlException {
GLES20.glBindTexture(textureTarget, texId);
checkGlError();
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, sampleFilter);
checkGlError();
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, sampleFilter);
checkGlError();
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
checkGlError();

View File

@ -68,6 +68,7 @@ public final class DebugTraceUtil {
EVENT_OUTPUT_ENDED,
EVENT_REGISTER_NEW_INPUT_STREAM,
EVENT_SURFACE_TEXTURE_INPUT,
EVENT_SURFACE_TEXTURE_TRANSFORM_FIX,
EVENT_QUEUE_FRAME,
EVENT_QUEUE_BITMAP,
EVENT_QUEUE_TEXTURE,
@ -95,6 +96,7 @@ public final class DebugTraceUtil {
public static final String EVENT_OUTPUT_ENDED = "OutputEnded";
public static final String EVENT_REGISTER_NEW_INPUT_STREAM = "RegisterNewInputStream";
public static final String EVENT_SURFACE_TEXTURE_INPUT = "SurfaceTextureInput";
public static final String EVENT_SURFACE_TEXTURE_TRANSFORM_FIX = "SurfaceTextureTransformFix";
public static final String EVENT_QUEUE_FRAME = "QueueFrame";
public static final String EVENT_QUEUE_BITMAP = "QueueBitmap";
public static final String EVENT_QUEUE_TEXTURE = "QueueTexture";
@ -196,7 +198,9 @@ public final class DebugTraceUtil {
EVENT_OUTPUT_TEXTURE_RENDERED,
EVENT_RECEIVE_END_OF_ALL_INPUT,
EVENT_SIGNAL_ENDED))
.put(COMPONENT_EXTERNAL_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(
COMPONENT_EXTERNAL_TEXTURE_MANAGER,
ImmutableList.of(EVENT_SIGNAL_EOS, EVENT_SURFACE_TEXTURE_TRANSFORM_FIX))
.put(COMPONENT_BITMAP_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(COMPONENT_TEX_ID_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(COMPONENT_COMPOSITOR, ImmutableList.of(EVENT_OUTPUT_TEXTURE_RENDERED))

View File

@ -269,6 +269,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @param sdrWorkingColorSpace The {@link WorkingColorSpace} to apply effects in.
* @param sampleWithNearest Whether external textures require GL_NEAREST sampling.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
@ -276,7 +277,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Context context,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@WorkingColorSpace int sdrWorkingColorSpace)
@WorkingColorSpace int sdrWorkingColorSpace,
boolean sampleWithNearest)
throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
String vertexShaderFilePath =
@ -304,6 +306,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
"uApplyHdrToSdrToneMapping",
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020 ? GL_TRUE : GL_FALSE);
}
glProgram.setExternalTexturesRequireNearestSampling(sampleWithNearest);
return createWithSampler(glProgram, inputColorInfo, outputColorInfo, sdrWorkingColorSpace);
}

View File

@ -137,6 +137,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private GlTextureProducer.@MonotonicNonNull Listener textureOutputListener;
private int textureOutputCapacity;
private boolean requireRegisteringAllInputFrames;
private boolean experimentalAdjustSurfaceTextureTransformationMatrix;
/** Creates an instance. */
public Builder() {
@ -151,6 +152,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
textureOutputListener = factory.textureOutputListener;
textureOutputCapacity = factory.textureOutputCapacity;
requireRegisteringAllInputFrames = !factory.repeatLastRegisteredFrame;
experimentalAdjustSurfaceTextureTransformationMatrix =
factory.experimentalAdjustSurfaceTextureTransformationMatrix;
}
/**
@ -253,6 +256,21 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
return this;
}
/**
* Sets whether the {@link SurfaceTexture#getTransformMatrix(float[])} is adjusted to remove
* the scale that cuts off a 1- or 2-texel border around the edge of a crop.
*
* <p>When set, programs sampling GL_TEXTURE_EXTERNAL_OES from {@link SurfaceTexture} must not
* attempt to access data in any cropped region, including via GL_LINEAR resampling filter.
*/
@CanIgnoreReturnValue
public Builder setExperimentalAdjustSurfaceTextureTransformationMatrix(
boolean experimentalAdjustSurfaceTextureTransformationMatrix) {
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
return this;
}
/** Builds an {@link DefaultVideoFrameProcessor.Factory} instance. */
public DefaultVideoFrameProcessor.Factory build() {
return new DefaultVideoFrameProcessor.Factory(
@ -261,7 +279,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
glObjectsProvider == null ? new DefaultGlObjectsProvider() : glObjectsProvider,
executorService,
textureOutputListener,
textureOutputCapacity);
textureOutputCapacity,
experimentalAdjustSurfaceTextureTransformationMatrix);
}
}
@ -271,6 +290,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Nullable private final ExecutorService executorService;
@Nullable private final GlTextureProducer.Listener textureOutputListener;
private final int textureOutputCapacity;
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
private Factory(
@WorkingColorSpace int sdrWorkingColorSpace,
@ -278,13 +298,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
GlObjectsProvider glObjectsProvider,
@Nullable ExecutorService executorService,
@Nullable GlTextureProducer.Listener textureOutputListener,
int textureOutputCapacity) {
int textureOutputCapacity,
boolean experimentalAdjustSurfaceTextureTransformationMatrix) {
this.sdrWorkingColorSpace = sdrWorkingColorSpace;
this.repeatLastRegisteredFrame = repeatLastRegisteredFrame;
this.glObjectsProvider = glObjectsProvider;
this.executorService = executorService;
this.textureOutputListener = textureOutputListener;
this.textureOutputCapacity = textureOutputCapacity;
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
}
public Builder buildUpon() {
@ -347,7 +370,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
glObjectsProvider,
textureOutputListener,
textureOutputCapacity,
repeatLastRegisteredFrame));
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix));
try {
return defaultVideoFrameProcessorFuture.get();
@ -715,7 +739,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
GlObjectsProvider glObjectsProvider,
@Nullable GlTextureProducer.Listener textureOutputListener,
int textureOutputCapacity,
boolean repeatLastRegisteredFrame)
boolean repeatLastRegisteredFrame,
boolean experimentalAdjustSurfaceTextureTransformationMatrix)
throws GlUtil.GlException, VideoFrameProcessingException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
int[] configAttributes =
@ -746,7 +771,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/* errorListenerExecutor= */ videoFrameProcessorListenerExecutor,
/* samplingShaderProgramErrorListener= */ listener::onError,
sdrWorkingColorSpace,
repeatLastRegisteredFrame);
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix);
FinalShaderProgramWrapper finalShaderProgramWrapper =
new FinalShaderProgramWrapper(

View File

@ -24,9 +24,12 @@ import static androidx.media3.effect.DebugTraceUtil.COMPONENT_VFP;
import static androidx.media3.effect.DebugTraceUtil.EVENT_QUEUE_FRAME;
import static androidx.media3.effect.DebugTraceUtil.EVENT_SIGNAL_EOS;
import static androidx.media3.effect.DebugTraceUtil.EVENT_SURFACE_TEXTURE_INPUT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_SURFACE_TEXTURE_TRANSFORM_FIX;
import static java.lang.Math.abs;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.graphics.SurfaceTexture;
import android.opengl.GLES31;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
@ -54,6 +57,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private static final String TAG = "ExtTexMgr";
private static final String TIMER_THREAD_NAME = "ExtTexMgr:Timer";
private static final int[] TRANSFORMATION_MATRIX_EXPECTED_ZERO_INDICES = {
2, 3, 6, 7, 8, 9, 11, 14
};
// In the worst case, we should be able to differentiate between numbers of the form
// A / B and (A + 1) / (B + 1) where A and B are around video resolution.
// For 8K, width = 7680.
// abs(7679 / 7680 - 7680 / 7681) > 1e-8. We pick EPSILON = 1e-9.
private static final float EPSILON = 1e-9f;
/**
* The time out in milliseconds after calling signalEndOfCurrentInputStream after which the input
@ -79,6 +90,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final ScheduledExecutorService scheduledExecutorService;
private final AtomicInteger externalShaderProgramInputCapacity;
private final boolean repeatLastRegisteredFrame;
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
private int availableFrameCount;
private boolean currentInputStreamEnded;
@ -104,6 +116,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* can call {@link #registerInputFrame(FrameInfo)} only once. Else, every input frame needs to
* be {@linkplain #registerInputFrame(FrameInfo) registered} before they are made available on
* the {@linkplain #getInputSurface() input Surface}.
* @param experimentalAdjustSurfaceTextureTransformationMatrix if {@code true}, the {@link
* SurfaceTexture#getTransformMatrix(float[])} will be adjusted to remove the scale that cuts
* off a 1- or 2-texel border around the edge of a crop.
* @throws VideoFrameProcessingException If a problem occurs while creating the external texture.
*/
// The onFrameAvailableListener will not be invoked until the constructor returns.
@ -111,11 +126,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public ExternalTextureManager(
GlObjectsProvider glObjectsProvider,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
boolean repeatLastRegisteredFrame)
boolean repeatLastRegisteredFrame,
boolean experimentalAdjustSurfaceTextureTransformationMatrix)
throws VideoFrameProcessingException {
super(videoFrameProcessingTaskExecutor);
this.glObjectsProvider = glObjectsProvider;
this.repeatLastRegisteredFrame = repeatLastRegisteredFrame;
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
try {
externalTexId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) {
@ -362,11 +380,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
externalShaderProgramInputCapacity.decrementAndGet();
surfaceTexture.getTransformMatrix(textureTransformMatrix);
checkNotNull(externalShaderProgram).setTextureTransformMatrix(textureTransformMatrix);
long frameTimeNs = surfaceTexture.getTimestamp();
long offsetToAddUs = currentFrame.offsetToAddUs;
// Correct presentationTimeUs so that GlShaderPrograms don't see the stream offset.
long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs;
if (experimentalAdjustSurfaceTextureTransformationMatrix) {
removeSurfaceTextureScaleFromTransformMatrix(
textureTransformMatrix, presentationTimeUs, currentFrame.width, currentFrame.height);
}
checkNotNull(externalShaderProgram).setTextureTransformMatrix(textureTransformMatrix);
checkNotNull(externalShaderProgram)
.queueInputFrame(
glObjectsProvider,
@ -383,4 +406,154 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
DebugTraceUtil.logEvent(COMPONENT_VFP, EVENT_QUEUE_FRAME, presentationTimeUs);
// If the queued frame is the last frame, end of stream will be signaled onInputFrameProcessed.
}
/**
* Adjusts textureTransformMatrix inplace to remove any scaling applied by {@link
* SurfaceTexture#getTransformMatrix(float[])}. This method preserves cropping.
*
* <p>This method requires that textureTransformMatrix is a 4x4 column-major matrix that applies a
* linear scale and transform to OpenGL coordinates of the form (s, t, 0, 1).
*
* @param textureTransformMatrix The matrix to be modified inplace.
* @param presentationTimeUs The presentation time of the frame being processed.
* @param visibleWidth The expected visible width in pixels of the texture.
* @param visibleHeight The expected visible height in pixels of the texture.
*/
private static void removeSurfaceTextureScaleFromTransformMatrix(
float[] textureTransformMatrix,
long presentationTimeUs,
int visibleWidth,
int visibleHeight) {
boolean isMatrixUnexpected = false;
isMatrixUnexpected |= (textureTransformMatrix.length != 16);
for (int i : TRANSFORMATION_MATRIX_EXPECTED_ZERO_INDICES) {
isMatrixUnexpected |= (abs(textureTransformMatrix[i]) > EPSILON);
}
isMatrixUnexpected |= (abs(textureTransformMatrix[10] - 1f) > EPSILON);
isMatrixUnexpected |= (abs(textureTransformMatrix[15] - 1f) > EPSILON);
int widthScaleIndex = C.INDEX_UNSET;
int widthTranslationIndex = C.INDEX_UNSET;
int heightScaleIndex = C.INDEX_UNSET;
int heightTranslationIndex = C.INDEX_UNSET;
if (abs(textureTransformMatrix[0]) > EPSILON && abs(textureTransformMatrix[5]) > EPSILON) {
// 0 or 180 degree rotation. T maps width to width.
widthScaleIndex = 0;
widthTranslationIndex = 12;
heightScaleIndex = 5;
heightTranslationIndex = 13;
isMatrixUnexpected |= (abs(textureTransformMatrix[1]) > EPSILON);
isMatrixUnexpected |= (abs(textureTransformMatrix[4]) > EPSILON);
} else if (abs(textureTransformMatrix[1]) > EPSILON
&& abs(textureTransformMatrix[4]) > EPSILON) {
// 90 or 270 rotation. T swaps width and height.
widthScaleIndex = 1;
widthTranslationIndex = 13;
heightScaleIndex = 4;
heightTranslationIndex = 12;
isMatrixUnexpected |= (abs(textureTransformMatrix[0]) > EPSILON);
isMatrixUnexpected |= (abs(textureTransformMatrix[5]) > EPSILON);
} else {
isMatrixUnexpected = true;
}
if (isMatrixUnexpected) {
DebugTraceUtil.logEvent(
COMPONENT_EXTERNAL_TEXTURE_MANAGER,
EVENT_SURFACE_TEXTURE_TRANSFORM_FIX,
presentationTimeUs,
/* extraFormat= */ "Unable to apply SurfaceTexture fix");
return;
}
float widthScale = textureTransformMatrix[widthScaleIndex];
float widthTranslation = textureTransformMatrix[widthTranslationIndex];
if (abs(widthScale) + EPSILON < 1f) {
// Applying a scale to the width means that some region of the texture must be cropped.
// Try to guess what the scale would be if SurfaceTexture didn't trim a few more pixels, in
// addition to the required crop.
float adjustedWidthScale =
Math.copySign(
guessScaleWithoutSurfaceTextureTrim(abs(widthScale), visibleWidth), widthScale);
float adjustedWidthTranslation = 0.5f * (widthScale - adjustedWidthScale) + widthTranslation;
DebugTraceUtil.logEvent(
COMPONENT_EXTERNAL_TEXTURE_MANAGER,
EVENT_SURFACE_TEXTURE_TRANSFORM_FIX,
presentationTimeUs,
/* extraFormat= */ "Width scale adjusted.");
textureTransformMatrix[widthScaleIndex] = adjustedWidthScale;
// Update translation to preserve midpoint. T(0.5, 0, 0, 1) remains fixed.
textureTransformMatrix[widthTranslationIndex] = adjustedWidthTranslation;
}
float heightScale = textureTransformMatrix[heightScaleIndex];
float heightTranslation = textureTransformMatrix[heightTranslationIndex];
if (abs(heightScale) + EPSILON < 1f) {
// Applying a scale to the height means that some region of the texture must be cropped.
// Try to guess what the scale would be if SurfaceTexture didn't didn't trim a few more
// pixels, in addition to the required crop.
float adjustedHeightScale =
Math.copySign(
guessScaleWithoutSurfaceTextureTrim(abs(heightScale), visibleHeight), heightScale);
float adjustedHeightTranslation =
0.5f * (heightScale - adjustedHeightScale) + heightTranslation;
DebugTraceUtil.logEvent(
COMPONENT_EXTERNAL_TEXTURE_MANAGER,
EVENT_SURFACE_TEXTURE_TRANSFORM_FIX,
presentationTimeUs,
/* extraFormat= */ "Height scale adjusted.");
textureTransformMatrix[heightScaleIndex] = adjustedHeightScale;
// Update translation to preserve midpoint. T(0, 0.5, 0, 1) remains fixed.
textureTransformMatrix[heightTranslationIndex] = adjustedHeightTranslation;
}
}
/**
* Guess what the 1-D texture coordinate scale would be if SurfaceTexture was cropping without
* trimming a few extra pixels and stretching the image.
*
* <p>This method needs to guess:
*
* <ul>
* <li>bufferSize = texture buffer size in texels. This should be the parameter value {@code
* visibleLength}, rounded up to a near multiple of 2.
* <p>Maybe it's rounded up to a multiple of 16 because of H.264 macroblock sizes. Maybe
* it's rounded up to 128 because of SIMD instructions.
* <p>bufferSize cannot be read reliably via {@link GLES31#glGetTexLevelParameteriv(int,
* int, int, int[], int)} across devices.
* <p>bufferSize cannot be read reliably from the decoder's {@link
* android.media.MediaFormat} across decoder implementations.
* <li>trim = number of pixels trimmed by {@link SurfaceTexture} in addition to the cropped
* region required for buffer SIMD alignment. As of the time of writing, this will be 0, 1
* or 2.
* </ul>
*
* <p>This method will use the guessed bufferSize and trim values that most closely approximate
* surfaceTextureScale.
*
* @param surfaceTextureScale the absolute value of the scaling factor from {@link
* SurfaceTexture#getTransformMatrix(float[])}. It has the form {@code (visibleLength - trim)
* / bufferSize}.
* @param visibleLength Expected size in pixels of the visible range.
* @return Scale without trim, of the form visibleLength / bufferSize.
*/
private static float guessScaleWithoutSurfaceTextureTrim(
float surfaceTextureScale, int visibleLength) {
float bestGuess = 1;
float scaleWithoutTrim = 1;
for (int align = 2; align <= 256; align *= 2) {
int candidateBufferSize = ((visibleLength + align - 1) / align) * align;
for (int trimmedPixels = 0; trimmedPixels <= 2; trimmedPixels++) {
float guess = ((float) visibleLength - trimmedPixels) / candidateBufferSize;
if (abs(guess - surfaceTextureScale) < abs(bestGuess - surfaceTextureScale)) {
bestGuess = guess;
scaleWithoutTrim = (float) visibleLength / candidateBufferSize;
}
}
}
if (abs(bestGuess - surfaceTextureScale) > EPSILON) {
// Best guess is too far off. Accept that we'll scale.
return surfaceTextureScale;
}
return scaleWithoutTrim;
}
}

View File

@ -53,6 +53,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
private final Executor errorListenerExecutor;
private final SparseArray<Input> inputs;
private final @WorkingColorSpace int sdrWorkingColorSpace;
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
private @MonotonicNonNull GlShaderProgram downstreamShaderProgram;
private @MonotonicNonNull TextureManager activeTextureManager;
@ -65,7 +66,8 @@ import org.checkerframework.checker.nullness.qual.Nullable;
Executor errorListenerExecutor,
GlShaderProgram.ErrorListener samplingShaderProgramErrorListener,
@WorkingColorSpace int sdrWorkingColorSpace,
boolean repeatLastRegisteredFrame)
boolean repeatLastRegisteredFrame,
boolean experimentalAdjustSurfaceTextureTransformationMatrix)
throws VideoFrameProcessingException {
this.context = context;
this.outputColorInfo = outputColorInfo;
@ -75,13 +77,18 @@ import org.checkerframework.checker.nullness.qual.Nullable;
this.samplingShaderProgramErrorListener = samplingShaderProgramErrorListener;
this.inputs = new SparseArray<>();
this.sdrWorkingColorSpace = sdrWorkingColorSpace;
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
// TODO(b/274109008): Investigate lazy instantiating the texture managers.
inputs.put(
INPUT_TYPE_SURFACE,
new Input(
new ExternalTextureManager(
glObjectsProvider, videoFrameProcessingTaskExecutor, repeatLastRegisteredFrame)));
glObjectsProvider,
videoFrameProcessingTaskExecutor,
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix)));
inputs.put(
INPUT_TYPE_BITMAP,
new Input(new BitmapTextureManager(glObjectsProvider, videoFrameProcessingTaskExecutor)));
@ -99,7 +106,11 @@ import org.checkerframework.checker.nullness.qual.Nullable;
case INPUT_TYPE_SURFACE:
samplingShaderProgram =
DefaultShaderProgram.createWithExternalSampler(
context, inputColorInfo, outputColorInfo, sdrWorkingColorSpace);
context,
inputColorInfo,
outputColorInfo,
sdrWorkingColorSpace,
experimentalAdjustSurfaceTextureTransformationMatrix);
break;
case INPUT_TYPE_BITMAP:
case INPUT_TYPE_TEXTURE_ID:

View File

@ -316,7 +316,7 @@ public final class VideoDecoderGLSurfaceView extends GLSurfaceView
for (int i = 0; i < 3; i++) {
GLES20.glUniform1i(program.getUniformLocation(TEXTURE_UNIFORMS[i]), i);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GlUtil.bindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GlUtil.bindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i], GLES20.GL_LINEAR);
}
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {

View File

@ -137,6 +137,16 @@ public final class AndroidTestUtil {
.setFrameRate(30.0f)
.build();
public static final String MP4_ASSET_CHECKERBOARD_VIDEO_URI_STRING =
"asset:///media/mp4/checkerboard_854x356_avc_baseline.mp4";
public static final Format MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT =
new Format.Builder()
.setSampleMimeType(VIDEO_H264)
.setWidth(854)
.setHeight(356)
.setFrameRate(25.0f)
.build();
public static final String MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING =
"asset:///media/mp4/sample_with_increasing_timestamps.mp4";
public static final Format MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT =
@ -246,8 +256,8 @@ public final class AndroidTestUtil {
public static final Format MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT =
new Format.Builder()
.setSampleMimeType(VIDEO_AV1)
.setWidth(1920)
.setHeight(1080)
.setWidth(720)
.setHeight(1280)
.setFrameRate(59.94f)
.setColorInfo(
new ColorInfo.Builder()

View File

@ -20,14 +20,20 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIX
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.test.utils.TestUtil.assertBitmapsAreSimilar;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static com.google.common.truth.Truth.assertWithMessage;
import android.content.Context;
import android.graphics.Bitmap;
import androidx.annotation.Nullable;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Util;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.mediacodec.MediaCodecInfo;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
@ -35,9 +41,19 @@ import java.util.List;
/** Utility class for checking testing {@link EditedMediaItemSequence} instances. */
public final class SequenceEffectTestUtil {
public static final ImmutableList<Effect> NO_EFFECT = ImmutableList.of();
public static final long SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS = 50;
/**
* Luma PSNR values between 30 and 50 are considered good for lossy compression (See <a
* href="https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio#Quality_estimation_with_PSNR">Quality
* estimation with PSNR</a> ). Other than that, the values in this files are pretty arbitrary -- 1
* more and tests start failing on some devices.
*/
public static final float PSNR_THRESHOLD = 35f;
public static final float PSNR_THRESHOLD_HD = 41f;
private static final String PNG_ASSET_BASE_PATH =
"test-generated-goldens/transformer_sequence_effect_test";
public static final long SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS = 50;
private SequenceEffectTestUtil() {}
@ -120,4 +136,106 @@ public final class SequenceEffectTestUtil {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
}
}
/**
* Asserts that the first frame extracted from the video in filePath matches output in {@link
* #PNG_ASSET_BASE_PATH}/{@code testId}_0.png.
*
* <p>Also saves the first frame as a bitmap, in case they differ from expected.
*/
public static void assertFirstFrameMatchesExpectedPsnrAndSave(
Context context, String testId, String filePath, float psnrThreshold)
throws IOException, InterruptedException {
Bitmap firstEncodedFrame = extractBitmapsFromVideo(context, filePath).get(0);
assertBitmapsMatchExpectedPsnrAndSave(
ImmutableList.of(firstEncodedFrame), testId, psnrThreshold);
}
private static void assertBitmapsMatchExpectedPsnrAndSave(
List<Bitmap> actualBitmaps, String testId, float psnrThreshold) throws IOException {
for (int i = 0; i < actualBitmaps.size(); i++) {
maybeSaveTestBitmap(
testId, /* bitmapLabel= */ String.valueOf(i), actualBitmaps.get(i), /* path= */ null);
}
for (int i = 0; i < actualBitmaps.size(); i++) {
String subTestId = testId + "_" + i;
String expectedPath = Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId);
Bitmap expectedBitmap = readBitmap(expectedPath);
assertBitmapsAreSimilar(expectedBitmap, actualBitmaps.get(i), psnrThreshold);
}
}
/**
* Returns whether the MediaCodecInfo decoder is known to produce incorrect colours on this
* device.
*
* <p>Washed out colours are probably caused by incorrect color space assumptions by MediaCodec.
*/
public static boolean decoderProducesWashedOutColours(MediaCodecInfo mediaCodecInfo) {
return mediaCodecInfo.name.equals("OMX.google.h264.decoder")
&& (Util.MODEL.equals("ANE-LX1")
|| Util.MODEL.equals("MHA-L29")
|| Util.MODEL.equals("COR-L29"));
}
/**
* Tries to export the {@link Composition} with a high quality {@link Transformer} created via
* {@link #createHqTransformer} with the requested {@code decoderMediaCodecInfo}.
*
* @return The {@link ExportTestResult} when successful, or {@code null} if decoding fails.
* @throws Exception The cause of the export not completing.
*/
@Nullable
public static ExportTestResult tryToExportCompositionWithDecoder(
String testId, Context context, MediaCodecInfo decoderMediaCodecInfo, Composition composition)
throws Exception {
try {
return new TransformerAndroidTestRunner.Builder(
context, createHqTransformer(context, decoderMediaCodecInfo))
.build()
.run(testId, composition);
} catch (ExportException exportException) {
if (exportException.errorCode == ExportException.ERROR_CODE_DECODING_FAILED
|| exportException.errorCode == ExportException.ERROR_CODE_DECODING_FORMAT_UNSUPPORTED
|| exportException.errorCode == ExportException.ERROR_CODE_DECODER_INIT_FAILED) {
return null;
}
throw exportException;
}
}
/**
* Creates a high quality {@link Transformer} instance.
*
* <p>The {@link Transformer} is configured to select a specific decoder, use experimental
* high-quality {@link DefaultVideoFrameProcessor} configuration, and a large value for {@link
* VideoEncoderSettings#bitrate}.
*/
public static Transformer createHqTransformer(
Context context, MediaCodecInfo decoderMediaCodecInfo) {
Codec.DecoderFactory decoderFactory =
new DefaultDecoderFactory.Builder(context)
.setMediaCodecSelector(
(mimeType, requiresSecureDecoder, requiresTunnelingDecoder) ->
ImmutableList.of(decoderMediaCodecInfo))
.build();
AssetLoader.Factory assetLoaderFactory =
new DefaultAssetLoaderFactory(context, decoderFactory, Clock.DEFAULT);
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setExperimentalAdjustSurfaceTextureTransformationMatrix(true)
.build();
Codec.EncoderFactory encoderFactory =
new DefaultEncoderFactory.Builder(context)
.setRequestedVideoEncoderSettings(
new VideoEncoderSettings.Builder().setBitrate(30_000_000).build())
.build();
return new Transformer.Builder(context)
.setAssetLoaderFactory(assetLoaderFactory)
.setVideoFrameProcessorFactory(videoFrameProcessorFactory)
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(encoderFactory))
.build();
}
}

View File

@ -19,40 +19,59 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_SURFACE_TEXTURE_TRANSFORM_FIX;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.AndroidTestUtil.BT601_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.BT601_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.JPG_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_VIDEO_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD_VIDEO_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD;
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertFirstFrameMatchesExpectedPsnrAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.SequenceEffectTestUtil.decoderProducesWashedOutColours;
import static androidx.media3.transformer.SequenceEffectTestUtil.oneFrameFromImage;
import static androidx.media3.transformer.SequenceEffectTestUtil.tryToExportCompositionWithDecoder;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeFalse;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Util;
import androidx.media3.effect.BitmapOverlay;
import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.OverlayEffect;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.RgbFilter;
import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.exoplayer.mediacodec.MediaCodecInfo;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -80,6 +99,11 @@ public final class TransformerSequenceEffectTest {
testId = testName.getMethodName();
}
@After
public void tearDown() {
DebugTraceUtil.enableTracing = false;
}
@Test
public void export_withNoCompositionPresentationAndWithPerMediaItemEffects() throws Exception {
assumeFormatsSupported(
@ -119,6 +143,205 @@ public final class TransformerSequenceEffectTest {
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@Test
public void export1080x720_withAllAvailableDecoders_doesNotStretchOutputOnAny() throws Exception {
assumeFormatsSupported(
context, testId, /* inputFormat= */ MP4_ASSET_FORMAT, /* outputFormat= */ MP4_ASSET_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_ASSET_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(
MP4_ASSET_URI_STRING, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
if (decoderProducesWashedOutColours(mediaCodecInfo)) {
continue;
}
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
@Test
public void export720x1080_withAllAvailableDecoders_doesNotStretchOutputOnAny() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_PORTRAIT_ASSET_FORMAT,
/* outputFormat= */ MP4_PORTRAIT_ASSET_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_PORTRAIT_ASSET_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(
MP4_PORTRAIT_ASSET_URI_STRING,
NO_EFFECT,
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
if (decoderProducesWashedOutColours(mediaCodecInfo)) {
continue;
}
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
@Test
public void export640x428_withAllAvailableDecoders_doesNotStretchOutputOnAny() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ BT601_ASSET_FORMAT,
/* outputFormat= */ BT601_ASSET_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(BT601_ASSET_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(
BT601_ASSET_URI_STRING, NO_EFFECT, /* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
if (decoderProducesWashedOutColours(mediaCodecInfo)) {
continue;
}
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
@Test
public void export1080x720Av1_withAllAvailableDecoders_doesNotStretchOutputOnAny()
throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT,
/* outputFormat= */ MP4_ASSET_AV1_VIDEO_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_ASSET_AV1_VIDEO_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(
MP4_ASSET_AV1_VIDEO_URI_STRING,
NO_EFFECT,
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
if (decoderProducesWashedOutColours(mediaCodecInfo)) {
continue;
}
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
@Test
public void export854x356_withAllAvailableDecoders_doesNotStretchOutputOnAny() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_ASSET_CHECKERBOARD_VIDEO_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
Presentation.createForWidthAndHeight(
/* width= */ 320, /* height= */ 240, Presentation.LAYOUT_SCALE_TO_FIT),
clippedVideo(
MP4_ASSET_CHECKERBOARD_VIDEO_URI_STRING,
NO_EFFECT,
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
DebugTraceUtil.enableTracing = true;
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
if (decoderProducesWashedOutColours(mediaCodecInfo)) {
continue;
}
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
String traceSummary = DebugTraceUtil.generateTraceSummary();
assertThat(traceSummary.indexOf(EVENT_SURFACE_TEXTURE_TRANSFORM_FIX)).isNotEqualTo(-1);
}
@Test
public void export_withCompositionPresentationAndWithPerMediaItemEffects() throws Exception {
// Reference: b/296225823#comment5

View File

@ -18,15 +18,23 @@
package androidx.media3.transformer.mh;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT;
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertFirstFrameMatchesExpectedPsnrAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.SequenceEffectTestUtil.tryToExportCompositionWithDecoder;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceDoesNotSupportHdrEditing;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsHdrEditing;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
@ -34,11 +42,14 @@ import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.RgbFilter;
import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.exoplayer.mediacodec.MediaCodecInfo;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.ExportException;
@ -48,6 +59,7 @@ import androidx.media3.transformer.TransformerAndroidTestRunner;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -179,4 +191,78 @@ public final class TransformerSequenceEffectTestWithHdr {
assertBitmapsMatchExpectedAndSave(
extractBitmapsFromVideo(context, checkNotNull(result.filePath)), testId);
}
@Test
public void export1920x1080Hlg_withAllAvailableDecoders_doesNotStretchOutputOnAny()
throws Exception {
assumeDeviceSupportsHdrEditing(testId, MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(
MP4_ASSET_1080P_5_SECOND_HLG10,
NO_EFFECT,
/* endPositionMs= */ C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
@Test
public void export720x1280Av1Hdr10_withAllAvailableDecoders_doesNotStretchOutputOnAny()
throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT,
/* outputFormat= */ MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT);
List<MediaCodecInfo> mediaCodecInfoList =
MediaCodecSelector.DEFAULT.getDecoderInfos(
checkNotNull(MP4_ASSET_AV1_2_SECOND_HDR10_FORMAT.sampleMimeType),
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false);
Composition composition =
createComposition(
/* presentation= */ null,
clippedVideo(MP4_ASSET_AV1_2_SECOND_HDR10, NO_EFFECT, C.MILLIS_PER_SECOND / 4));
boolean atLeastOneDecoderSucceeds = false;
for (MediaCodecInfo mediaCodecInfo : mediaCodecInfoList) {
@Nullable
ExportTestResult result =
tryToExportCompositionWithDecoder(testId, context, mediaCodecInfo, composition);
if (result == null) {
continue;
}
atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD);
}
assertThat(atLeastOneDecoderSucceeds).isTrue();
}
}

View File

@ -75,12 +75,14 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
private Listener listener;
private boolean enableDecoderFallback;
private @C.Priority int codecPriority;
private MediaCodecSelector mediaCodecSelector;
/** Creates a new {@link Builder}. */
public Builder(Context context) {
this.context = context.getApplicationContext();
listener = (codecName, codecInitializationExceptions) -> {};
codecPriority = C.PRIORITY_PROCESSING_FOREGROUND;
mediaCodecSelector = MediaCodecSelector.DEFAULT;
}
/** Sets the {@link Listener}. */
@ -128,6 +130,17 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
return this;
}
/**
* Sets the {@link MediaCodecSelector} used when selecting a decoder.
*
* <p>The default value is {@link MediaCodecSelector#DEFAULT}
*/
@CanIgnoreReturnValue
public Builder setMediaCodecSelector(MediaCodecSelector mediaCodecSelector) {
this.mediaCodecSelector = mediaCodecSelector;
return this;
}
/** Creates an instance of {@link DefaultDecoderFactory}, using defaults if values are unset. */
public DefaultDecoderFactory build() {
return new DefaultDecoderFactory(this);
@ -138,6 +151,7 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
private final boolean enableDecoderFallback;
private final Listener listener;
private final @C.Priority int codecPriority;
private final MediaCodecSelector mediaCodecSelector;
/**
* @deprecated Use {@link Builder} instead.
@ -169,6 +183,7 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
this.enableDecoderFallback = builder.enableDecoderFallback;
this.listener = builder.listener;
this.codecPriority = builder.codecPriority;
this.mediaCodecSelector = builder.mediaCodecSelector;
}
@Override
@ -241,7 +256,7 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
decoderInfos =
MediaCodecUtil.getDecoderInfosSortedByFormatSupport(
MediaCodecUtil.getDecoderInfosSoftMatch(
MediaCodecSelector.DEFAULT,
mediaCodecSelector,
format,
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false),