Move bitmap coordinate flip out of fragment shader
Fragment shaders in OpenGL ES shader language aren't guaranteed to support highp, required to correctly represent pixel coordinates inside large images (e.g. 1920x1080). This change moves coordinate mirroring for images out of fragment shader. Fixes http://Issue: androidx/media#1331 PiperOrigin-RevId: 635732208
This commit is contained in:
parent
b047e81e02
commit
ae240606db
@ -155,21 +155,8 @@ highp vec3 convertToOutputColors(highp vec3 workingColors) {
|
||||
}
|
||||
}
|
||||
|
||||
vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord) {
|
||||
if (uInputColorTransfer == COLOR_TRANSFER_SRGB) {
|
||||
// Whereas the Android system uses the top-left corner as (0,0) of the
|
||||
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
|
||||
// texture gets flipped. We flip the texture vertically to ensure the
|
||||
// orientation of the output is correct.
|
||||
return vec2(originalTexSamplingCoord.x, 1.0 - originalTexSamplingCoord.y);
|
||||
} else {
|
||||
return originalTexSamplingCoord;
|
||||
}
|
||||
}
|
||||
|
||||
void main() {
|
||||
vec4 inputColor =
|
||||
texture2D(uTexSampler, getAdjustedTexSamplingCoord(vTexSamplingCoord));
|
||||
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
|
||||
vec3 workingColors = convertToWorkingColors(inputColor.rgb);
|
||||
vec4 transformedColors = uRgbMatrix * vec4(workingColors, 1);
|
||||
gl_FragColor =
|
||||
|
@ -178,20 +178,12 @@ highp vec3 applyOetf(highp vec3 linearColor) {
|
||||
}
|
||||
}
|
||||
|
||||
vec2 getVTexSamplingCoord() {
|
||||
// Whereas the Android system uses the top-left corner as (0,0) of the
|
||||
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
|
||||
// texture gets flipped. We flip the texture vertically to ensure the
|
||||
// orientation of the output is correct.
|
||||
return vec2(vTexSamplingCoord.x, 1.0 - vTexSamplingCoord.y);
|
||||
}
|
||||
|
||||
// Reference:
|
||||
// https://developer.android.com/reference/android/graphics/Gainmap#applying-a-gainmap-manually
|
||||
// Reference Implementation:
|
||||
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/base/libs/hwui/effects/GainmapRenderer.cpp;l=117-146;drc=fadc20184ccb27fe15bb862e6e03fa6d05d41eac
|
||||
highp vec3 applyGainmapToBase(vec4 S) {
|
||||
vec4 G = texture(uGainmapTexSampler, getVTexSamplingCoord());
|
||||
vec4 G = texture(uGainmapTexSampler, vTexSamplingCoord);
|
||||
float W = clamp((log(HDR_SDR_RATIO) - log(uDisplayRatioSdr)) /
|
||||
(log(uDisplayRatioHdr) - log(uDisplayRatioSdr)),
|
||||
0.0, 1.0);
|
||||
@ -225,7 +217,7 @@ highp vec3 bt709ToBt2020(vec3 bt709Color) {
|
||||
}
|
||||
|
||||
void main() {
|
||||
vec4 baseElectricalColor = texture(uTexSampler, getVTexSamplingCoord());
|
||||
vec4 baseElectricalColor = texture(uTexSampler, vTexSamplingCoord);
|
||||
float alpha = baseElectricalColor.a;
|
||||
vec4 baseOpticalColor = vec4(applyEotf(baseElectricalColor.xyz), alpha);
|
||||
vec3 opticalBt709Color = applyGainmapToBase(baseOpticalColor);
|
||||
|
@ -248,7 +248,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
"uApplyHdrToSdrToneMapping",
|
||||
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020 ? GL_TRUE : GL_FALSE);
|
||||
}
|
||||
return createWithSampler(glProgram, inputColorInfo, outputColorInfo, sdrWorkingColorSpace);
|
||||
ImmutableList<GlMatrixTransformation> matrixTransformations = ImmutableList.of();
|
||||
if (inputType == INPUT_TYPE_BITMAP) {
|
||||
matrixTransformations =
|
||||
ImmutableList.of(
|
||||
(MatrixTransformation)
|
||||
presentationTimeUs -> {
|
||||
android.graphics.Matrix mirrorY = new android.graphics.Matrix();
|
||||
mirrorY.setScale(/* sx= */ 1, /* sy= */ -1);
|
||||
return mirrorY;
|
||||
});
|
||||
}
|
||||
return createWithSampler(
|
||||
glProgram, inputColorInfo, outputColorInfo, sdrWorkingColorSpace, matrixTransformations);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -308,7 +320,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
glProgram.setExternalTexturesRequireNearestSampling(sampleWithNearest);
|
||||
|
||||
return createWithSampler(glProgram, inputColorInfo, outputColorInfo, sdrWorkingColorSpace);
|
||||
return createWithSampler(
|
||||
glProgram,
|
||||
inputColorInfo,
|
||||
outputColorInfo,
|
||||
sdrWorkingColorSpace,
|
||||
/* matrixTransformations= */ ImmutableList.of());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -379,7 +396,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
GlProgram glProgram,
|
||||
ColorInfo inputColorInfo,
|
||||
ColorInfo outputColorInfo,
|
||||
@WorkingColorSpace int sdrWorkingColorSpace) {
|
||||
@WorkingColorSpace int sdrWorkingColorSpace,
|
||||
ImmutableList<GlMatrixTransformation> matrixTransformations) {
|
||||
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
|
||||
boolean isExpandingColorGamut =
|
||||
(inputColorInfo.colorSpace == C.COLOR_SPACE_BT709
|
||||
@ -416,7 +434,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
return new DefaultShaderProgram(
|
||||
glProgram,
|
||||
/* matrixTransformations= */ ImmutableList.of(),
|
||||
matrixTransformations,
|
||||
/* rgbMatrices= */ ImmutableList.of(),
|
||||
outputColorInfo.colorTransfer,
|
||||
/* useHdr= */ isInputTransferHdr || isExpandingColorGamut);
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 1.9 MiB |
Binary file not shown.
After Width: | Height: | Size: 834 KiB |
@ -71,6 +71,8 @@ public final class AndroidTestUtil {
|
||||
new ScaleAndRotateTransformation.Builder().setRotationDegrees(45).build()));
|
||||
|
||||
public static final String PNG_ASSET_URI_STRING = "asset:///media/png/media3test.png";
|
||||
public static final String PNG_ASSET_LINES_1080P_URI_STRING =
|
||||
"asset:///media/png/loremipsum_1920x720.png";
|
||||
public static final String JPG_ASSET_URI_STRING = "asset:///media/jpeg/london.jpg";
|
||||
public static final String JPG_PORTRAIT_ASSET_URI_STRING = "asset:///media/jpeg/tokyo.jpg";
|
||||
public static final String ULTRA_HDR_URI_STRING = "asset:///media/jpeg/ultraHDR.jpg";
|
||||
|
@ -32,8 +32,10 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_CHECKERBOARD
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_LINES_1080P_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
@ -342,6 +344,59 @@ public final class TransformerSequenceEffectTest {
|
||||
assertThat(traceSummary.indexOf(EVENT_SURFACE_TEXTURE_TRANSFORM_FIX)).isNotEqualTo(-1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void export_image_samplesFromTextureCorrectly() throws Exception {
|
||||
assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
|
||||
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT);
|
||||
Composition composition =
|
||||
createComposition(
|
||||
/* presentation= */ null,
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri(PNG_ASSET_LINES_1080P_URI_STRING))
|
||||
.setFrameRate(30)
|
||||
.setDurationUs(C.MICROS_PER_SECOND / 4)
|
||||
.build());
|
||||
// Some devices need a very high bitrate to avoid encoding artifacts.
|
||||
int bitrate = 30_000_000;
|
||||
if (Ascii.equalsIgnoreCase(Util.MODEL, "mi a2 lite")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "redmi 8")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "sm-f711u1")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "sm-f916u1")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "sm-f926u1")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "sm-g981u1")
|
||||
|| Ascii.equalsIgnoreCase(Util.MODEL, "tb-q706")) {
|
||||
// And some devices need a lower bitrate because VideoDecodingWrapper fails to decode high
|
||||
// bitrate output, or FrameworkMuxer fails to mux.
|
||||
bitrate = 10_000_000;
|
||||
}
|
||||
Codec.EncoderFactory encoderFactory =
|
||||
new DefaultEncoderFactory.Builder(context)
|
||||
.setRequestedVideoEncoderSettings(
|
||||
new VideoEncoderSettings.Builder().setBitrate(bitrate).build())
|
||||
.build();
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setEncoderFactory(new AndroidTestUtil.ForceEncodeEncoderFactory(encoderFactory))
|
||||
.setVideoMimeType("video/avc")
|
||||
.build();
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, transformer)
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(checkNotNull(result).filePath).isNotNull();
|
||||
// The PSNR threshold was chosen based on:
|
||||
// Pixel 8 with coordinate rounding error during texture sampling, hits PSNR 23.4. With fix ->
|
||||
// 29.5
|
||||
// Realmi C11 with bug fix hits PSNR 29.94
|
||||
// rmx3563 -> 28.8
|
||||
assertFirstFrameMatchesExpectedPsnrAndSave(
|
||||
context, testId, checkNotNull(result.filePath), 28.5f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void export_withCompositionPresentationAndWithPerMediaItemEffects() throws Exception {
|
||||
// Reference: b/296225823#comment5
|
||||
|
Loading…
x
Reference in New Issue
Block a user