Maintain a consistent luminance range across HDR content in effects

PQ and HLG have different luminance ranges (max 10k nits and max 1k nits resp). In GL, colors work in a normalised 0 to 1 scale, so for PQ content, 1=10k nits and and for HLG content, 1=1k nits.

This cl scales and normalises PQ content appropriately so that all HDR content works in the HLG luminance range. This fixes two things

1. Conversions between HLG and PQ are "fixed" (before the output colors looked too bright or too dark depending on which way you are converting)
2. color-altering effects will be able to work consistently across HLG and PQ content

1 is tested in this cl. 2 will be tested when ultra HDR overlays are implemented, both cases have been manually tested to ensure the output looks correct on a screen.

PiperOrigin-RevId: 636851701
This commit is contained in:
tofunmi 2024-05-24 03:02:58 -07:00 committed by Copybara-Service
parent cfd29e04f3
commit db6144e7dd
8 changed files with 120 additions and 7 deletions

View File

@ -134,6 +134,8 @@
colorspace. colorspace.
* Allow defining indeterminate z-order of EditedMediaItemSequences * Allow defining indeterminate z-order of EditedMediaItemSequences
([#1055](https://github.com/androidx/media/pull/1055)). ([#1055](https://github.com/androidx/media/pull/1055)).
* Maintain a consistent luminance range across different HDR content (uses
the HLG range).
* Muxers: * Muxers:
* IMA extension: * IMA extension:
* Promote API that is required for apps to play * Promote API that is required for apps to play

View File

@ -31,8 +31,13 @@ uniform int uOutputColorTransfer;
uniform mat3 uColorTransform; uniform mat3 uColorTransform;
uniform mat4 uRgbMatrix; uniform mat4 uRgbMatrix;
// Output color for an obviously visible error. // Output colors for an obviously visible error.
const vec3 ERROR_COLOR_RED = vec3(1.0, 0.0, 0.0); const vec3 ERROR_COLOR_RED = vec3(1.0, 0.0, 0.0);
const vec3 ERROR_COLOR_BLUE = vec3(0.0, 0.0, 1.0);
// LINT.IfChange(color_transfer)
const int COLOR_TRANSFER_ST2084 = 6;
const int COLOR_TRANSFER_HLG = 7;
// HLG OETF for one channel. // HLG OETF for one channel.
highp float hlgOetfSingleChannel(highp float linearChannel) { highp float hlgOetfSingleChannel(highp float linearChannel) {
@ -75,9 +80,6 @@ highp vec3 pqOetf(highp vec3 linearColor) {
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
// electrical signals. Input and output are both normalized to [0, 1]. // electrical signals. Input and output are both normalized to [0, 1].
highp vec3 applyOetf(highp vec3 linearColor) { highp vec3 applyOetf(highp vec3 linearColor) {
// LINT.IfChange(color_transfer)
const int COLOR_TRANSFER_ST2084 = 6;
const int COLOR_TRANSFER_HLG = 7;
if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) { if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) {
return pqOetf(linearColor); return pqOetf(linearColor);
} else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) { } else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) {
@ -87,9 +89,22 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
} }
vec3 normalizeHdrLuminance(vec3 inputColor) {
const float PQ_MAX_LUMINANCE = 10000.0;
const float HLG_MAX_LUMINANCE = 1000.0;
if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) {
return inputColor * HLG_MAX_LUMINANCE / PQ_MAX_LUMINANCE;
} else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) {
return inputColor;
} else {
return ERROR_COLOR_BLUE;
}
}
void main() { void main() {
vec4 inputColor = texture(uTexSampler, vTexSamplingCoord); vec4 inputColor = texture(uTexSampler, vTexSamplingCoord);
// transformedColors is an optical color. // transformedColors is an optical color.
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1); vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
outColor = vec4(applyOetf(transformedColors.rgb), inputColor.a); outColor = vec4(applyOetf(normalizeHdrLuminance(transformedColors.rgb)),
inputColor.a);
} }

View File

@ -284,13 +284,25 @@ vec3 yuvToRgb(vec3 yuv) {
return clamp(uYuvToRgbColorTransform * (yuv - yuvOffset), 0.0, 1.0); return clamp(uYuvToRgbColorTransform * (yuv - yuvOffset), 0.0, 1.0);
} }
vec3 scaleHdrLuminance(vec3 inputColor) {
const float PQ_MAX_LUMINANCE = 10000.0;
const float HLG_MAX_LUMINANCE = 1000.0;
if (uInputColorTransfer == COLOR_TRANSFER_ST2084) {
return inputColor * PQ_MAX_LUMINANCE / HLG_MAX_LUMINANCE;
} else if (uInputColorTransfer == COLOR_TRANSFER_HLG) {
return inputColor;
} else {
return ERROR_COLOR_BLUE;
}
}
void main() { void main() {
vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz; vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz;
vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv)); vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv));
vec4 opticalColor = vec4 opticalColor =
(uApplyHdrToSdrToneMapping == 1) (uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0) ? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0); : vec4(scaleHdrLuminance(opticalColorBt2020), 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor; vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0); outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
} }

View File

@ -271,13 +271,25 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
} }
vec3 scaleHdrLuminance(vec3 inputColor) {
const float PQ_MAX_LUMINANCE = 10000.0;
const float HLG_MAX_LUMINANCE = 1000.0;
if (uInputColorTransfer == COLOR_TRANSFER_ST2084) {
return inputColor * PQ_MAX_LUMINANCE / HLG_MAX_LUMINANCE;
} else if (uInputColorTransfer == COLOR_TRANSFER_HLG) {
return inputColor;
} else {
return ERROR_COLOR_BLUE;
}
}
void main() { void main() {
vec3 opticalColorBt2020 = vec3 opticalColorBt2020 =
applyEotf(texture(uTexSampler, vTexSamplingCoord).xyz); applyEotf(texture(uTexSampler, vTexSamplingCoord).xyz);
vec4 opticalColor = vec4 opticalColor =
(uApplyHdrToSdrToneMapping == 1) (uApplyHdrToSdrToneMapping == 1)
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0) ? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0); : vec4(scaleHdrLuminance(opticalColorBt2020), 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor; vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0); outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 MiB

After

Width:  |  Height:  |  Size: 20 MiB

View File

@ -90,6 +90,10 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
"test-generated-goldens/hdr-goldens/ultrahdr_to_hlg.png"; "test-generated-goldens/hdr-goldens/ultrahdr_to_hlg.png";
private static final String ULTRA_HDR_TO_PQ_PNG_ASSET_PATH = private static final String ULTRA_HDR_TO_PQ_PNG_ASSET_PATH =
"test-generated-goldens/hdr-goldens/ultrahdr_to_pq.png"; "test-generated-goldens/hdr-goldens/ultrahdr_to_pq.png";
private static final String HLG_TO_PQ_PNG_ASSET_PATH =
"test-generated-goldens/hdr-goldens/original_hlg10_to_pq.png";
private static final String PQ_TO_HLG_PNG_ASSET_PATH =
"test-generated-goldens/hdr-goldens/original_hdr10_to_hlg.png";
/** Input SDR video of which we only use the first frame. */ /** Input SDR video of which we only use the first frame. */
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4"; private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
@ -243,6 +247,40 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16); .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
} }
@Test
public void noEffects_hlg10InputAndHdr10Output_matchesGoldenFile() throws Exception {
Context context = getApplicationContext();
Format inputFormat = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
Format outputFormat =
inputFormat
.buildUpon()
.setColorInfo(
new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT2020)
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_ST2084)
.build())
.build();
assumeDeviceSupportsHdrEditing(testId, inputFormat);
assumeFormatsSupported(context, testId, inputFormat, outputFormat);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputColorInfo(outputFormat.colorInfo)
.setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING)
.build();
Bitmap expectedBitmap = readBitmap(HLG_TO_PQ_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
expectedBitmap, actualBitmap);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
}
@Test @Test
public void noEffects_hlg10TextureInput_matchesGoldenFile() throws Exception { public void noEffects_hlg10TextureInput_matchesGoldenFile() throws Exception {
Context context = getApplicationContext(); Context context = getApplicationContext();
@ -330,6 +368,40 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16); .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
} }
@Test
public void noEffects_hdr10InputAndHlg10Output_matchesGoldenFile() throws Exception {
Context context = getApplicationContext();
Format inputFormat = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
Format outputFormat =
inputFormat
.buildUpon()
.setColorInfo(
new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT2020)
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_HLG)
.build())
.build();
assumeDeviceSupportsHdrEditing(testId, inputFormat);
assumeFormatsSupported(context, testId, inputFormat, outputFormat);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputColorInfo(outputFormat.colorInfo)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.build();
Bitmap expectedBitmap = readBitmap(PQ_TO_HLG_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
expectedBitmap, actualBitmap);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
}
@Test @Test
public void noEffects_hdr10TextureInput_matchesGoldenFile() throws Exception { public void noEffects_hdr10TextureInput_matchesGoldenFile() throws Exception {
Context context = getApplicationContext(); Context context = getApplicationContext();