HDR: Implement GlEffectsFrameProcessor HLG to SDR tone-mapping.
Adds COLOR_TRANSFER_GAMMA_2_2, to match behavior in other HDR to SDR tone mapping implementations. PiperOrigin-RevId: 495371736
This commit is contained in:
parent
9ba03db704
commit
a66dae8bfa
@ -1078,8 +1078,8 @@ public final class C {
|
|||||||
// LINT.IfChange(color_transfer)
|
// LINT.IfChange(color_transfer)
|
||||||
/**
|
/**
|
||||||
* Video color transfer characteristics. One of {@link Format#NO_VALUE}, {@link
|
* Video color transfer characteristics. One of {@link Format#NO_VALUE}, {@link
|
||||||
* #COLOR_TRANSFER_LINEAR}, {@link #COLOR_TRANSFER_SDR}, {@link #COLOR_TRANSFER_ST2084} or {@link
|
* #COLOR_TRANSFER_LINEAR}, {@link #COLOR_TRANSFER_SDR}, {@link #COLOR_TRANSFER_GAMMA_2_2} {@link
|
||||||
* #COLOR_TRANSFER_HLG}.
|
* #COLOR_TRANSFER_ST2084} or {@link #COLOR_TRANSFER_HLG}.
|
||||||
*/
|
*/
|
||||||
@UnstableApi
|
@UnstableApi
|
||||||
@Documented
|
@Documented
|
||||||
@ -1089,14 +1089,20 @@ public final class C {
|
|||||||
Format.NO_VALUE,
|
Format.NO_VALUE,
|
||||||
COLOR_TRANSFER_LINEAR,
|
COLOR_TRANSFER_LINEAR,
|
||||||
COLOR_TRANSFER_SDR,
|
COLOR_TRANSFER_SDR,
|
||||||
|
COLOR_TRANSFER_GAMMA_2_2,
|
||||||
COLOR_TRANSFER_ST2084,
|
COLOR_TRANSFER_ST2084,
|
||||||
COLOR_TRANSFER_HLG
|
COLOR_TRANSFER_HLG
|
||||||
})
|
})
|
||||||
public @interface ColorTransfer {}
|
public @interface ColorTransfer {}
|
||||||
/** See {@link MediaFormat#COLOR_TRANSFER_LINEAR}. */
|
/** See {@link MediaFormat#COLOR_TRANSFER_LINEAR}. */
|
||||||
@UnstableApi public static final int COLOR_TRANSFER_LINEAR = MediaFormat.COLOR_TRANSFER_LINEAR;
|
@UnstableApi public static final int COLOR_TRANSFER_LINEAR = MediaFormat.COLOR_TRANSFER_LINEAR;
|
||||||
/** See {@link MediaFormat#COLOR_TRANSFER_SDR_VIDEO}. */
|
/** See {@link MediaFormat#COLOR_TRANSFER_SDR_VIDEO}. The SMPTE 170M transfer function. */
|
||||||
@UnstableApi public static final int COLOR_TRANSFER_SDR = MediaFormat.COLOR_TRANSFER_SDR_VIDEO;
|
@UnstableApi public static final int COLOR_TRANSFER_SDR = MediaFormat.COLOR_TRANSFER_SDR_VIDEO;
|
||||||
|
/**
|
||||||
|
* See {@link android.hardware.DataSpace#TRANSFER_GAMMA2_2}. The Gamma 2.2 transfer function, used
|
||||||
|
* for some SDR use-cases like tone-mapping.
|
||||||
|
*/
|
||||||
|
@UnstableApi public static final int COLOR_TRANSFER_GAMMA_2_2 = 10;
|
||||||
/** See {@link MediaFormat#COLOR_TRANSFER_ST2084}. */
|
/** See {@link MediaFormat#COLOR_TRANSFER_ST2084}. */
|
||||||
@UnstableApi public static final int COLOR_TRANSFER_ST2084 = MediaFormat.COLOR_TRANSFER_ST2084;
|
@UnstableApi public static final int COLOR_TRANSFER_ST2084 = MediaFormat.COLOR_TRANSFER_ST2084;
|
||||||
/** See {@link MediaFormat#COLOR_TRANSFER_HLG}. */
|
/** See {@link MediaFormat#COLOR_TRANSFER_HLG}. */
|
||||||
|
@ -101,23 +101,13 @@ public final class ColorInfo implements Bundleable {
|
|||||||
|| colorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084);
|
|| colorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** The {@link C.ColorSpace}. */
|
||||||
* The color space of the video. Valid values are {@link C#COLOR_SPACE_BT601}, {@link
|
|
||||||
* C#COLOR_SPACE_BT709}, {@link C#COLOR_SPACE_BT2020} or {@link Format#NO_VALUE} if unknown.
|
|
||||||
*/
|
|
||||||
public final @C.ColorSpace int colorSpace;
|
public final @C.ColorSpace int colorSpace;
|
||||||
|
|
||||||
/**
|
/** The {@link C.ColorRange}. */
|
||||||
* The color range of the video. Valid values are {@link C#COLOR_RANGE_LIMITED}, {@link
|
|
||||||
* C#COLOR_RANGE_FULL} or {@link Format#NO_VALUE} if unknown.
|
|
||||||
*/
|
|
||||||
public final @C.ColorRange int colorRange;
|
public final @C.ColorRange int colorRange;
|
||||||
|
|
||||||
/**
|
/** The {@link C.ColorTransfer}. */
|
||||||
* The color transfer characteristics of the video. Valid values are {@link
|
|
||||||
* C#COLOR_TRANSFER_LINEAR}, {@link C#COLOR_TRANSFER_HLG}, {@link C#COLOR_TRANSFER_ST2084}, {@link
|
|
||||||
* C#COLOR_TRANSFER_SDR} or {@link Format#NO_VALUE} if unknown.
|
|
||||||
*/
|
|
||||||
public final @C.ColorTransfer int colorTransfer;
|
public final @C.ColorTransfer int colorTransfer;
|
||||||
|
|
||||||
/** HdrStaticInfo as defined in CTA-861.3, or null if none specified. */
|
/** HdrStaticInfo as defined in CTA-861.3, or null if none specified. */
|
||||||
@ -145,6 +135,17 @@ public final class ColorInfo implements Bundleable {
|
|||||||
this.hdrStaticInfo = hdrStaticInfo;
|
this.hdrStaticInfo = hdrStaticInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether this instance is valid.
|
||||||
|
*
|
||||||
|
* <p>This instance is valid if no members are {@link Format#NO_VALUE}.
|
||||||
|
*/
|
||||||
|
public boolean isValid() {
|
||||||
|
return colorSpace != Format.NO_VALUE
|
||||||
|
&& colorRange != Format.NO_VALUE
|
||||||
|
&& colorTransfer != Format.NO_VALUE;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(@Nullable Object obj) {
|
public boolean equals(@Nullable Object obj) {
|
||||||
if (this == obj) {
|
if (this == obj) {
|
||||||
|
@ -31,6 +31,7 @@ import android.opengl.GLES20;
|
|||||||
import android.opengl.GLES30;
|
import android.opengl.GLES30;
|
||||||
import android.opengl.Matrix;
|
import android.opengl.Matrix;
|
||||||
import androidx.annotation.DoNotInline;
|
import androidx.annotation.DoNotInline;
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.annotation.RequiresApi;
|
import androidx.annotation.RequiresApi;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -190,7 +191,7 @@ public final class GlUtil {
|
|||||||
* Returns whether the {@value #EXTENSION_YUV_TARGET} extension is supported.
|
* Returns whether the {@value #EXTENSION_YUV_TARGET} extension is supported.
|
||||||
*
|
*
|
||||||
* <p>This extension allows sampling raw YUV values from an external texture, which is required
|
* <p>This extension allows sampling raw YUV values from an external texture, which is required
|
||||||
* for HDR.
|
* for HDR input.
|
||||||
*/
|
*/
|
||||||
public static boolean isYuvTargetExtensionSupported() {
|
public static boolean isYuvTargetExtensionSupported() {
|
||||||
if (Util.SDK_INT < 17) {
|
if (Util.SDK_INT < 17) {
|
||||||
@ -232,27 +233,27 @@ public final class GlUtil {
|
|||||||
*/
|
*/
|
||||||
@RequiresApi(17)
|
@RequiresApi(17)
|
||||||
public static EGLContext createEglContext(EGLDisplay eglDisplay) throws GlException {
|
public static EGLContext createEglContext(EGLDisplay eglDisplay) throws GlException {
|
||||||
return createEglContext(eglDisplay, EGL_CONFIG_ATTRIBUTES_RGBA_8888);
|
return createEglContext(eglDisplay, /* openGlVersion= */ 2, EGL_CONFIG_ATTRIBUTES_RGBA_8888);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new {@link EGLContext} for the specified {@link EGLDisplay}.
|
* Creates a new {@link EGLContext} for the specified {@link EGLDisplay}.
|
||||||
*
|
*
|
||||||
* @param eglDisplay The {@link EGLDisplay} to create an {@link EGLContext} for.
|
* @param eglDisplay The {@link EGLDisplay} to create an {@link EGLContext} for.
|
||||||
|
* @param openGlVersion The version of OpenGL ES to configure. Accepts either {@code 2}, for
|
||||||
|
* OpenGL ES 2.0, or {@code 3}, for OpenGL ES 3.0.
|
||||||
* @param configAttributes The attributes to configure EGL with. Accepts either {@link
|
* @param configAttributes The attributes to configure EGL with. Accepts either {@link
|
||||||
* #EGL_CONFIG_ATTRIBUTES_RGBA_1010102}, which will request OpenGL ES 3.0, or {@link
|
* #EGL_CONFIG_ATTRIBUTES_RGBA_1010102}, or {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888}.
|
||||||
* #EGL_CONFIG_ATTRIBUTES_RGBA_8888}, which will request OpenGL ES 2.0.
|
|
||||||
*/
|
*/
|
||||||
@RequiresApi(17)
|
@RequiresApi(17)
|
||||||
public static EGLContext createEglContext(EGLDisplay eglDisplay, int[] configAttributes)
|
public static EGLContext createEglContext(
|
||||||
|
EGLDisplay eglDisplay, @IntRange(from = 2, to = 3) int openGlVersion, int[] configAttributes)
|
||||||
throws GlException {
|
throws GlException {
|
||||||
checkArgument(
|
checkArgument(
|
||||||
Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_8888)
|
Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_8888)
|
||||||
|| Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_1010102));
|
|| Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_1010102));
|
||||||
return Api17.createEglContext(
|
checkArgument(openGlVersion == 2 || openGlVersion == 3);
|
||||||
eglDisplay,
|
return Api17.createEglContext(eglDisplay, openGlVersion, configAttributes);
|
||||||
/* version= */ Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_1010102) ? 3 : 2,
|
|
||||||
configAttributes);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -322,6 +322,8 @@ public final class MediaFormatUtil {
|
|||||||
/** Whether this is a valid {@link C.ColorTransfer} instance. */
|
/** Whether this is a valid {@link C.ColorTransfer} instance. */
|
||||||
private static boolean isValidColorTransfer(int colorTransfer) {
|
private static boolean isValidColorTransfer(int colorTransfer) {
|
||||||
// LINT.IfChange(color_transfer)
|
// LINT.IfChange(color_transfer)
|
||||||
|
// C.COLOR_TRANSFER_GAMMA_2_2 isn't valid because MediaCodec, and hence MediaFormat, does not
|
||||||
|
// support it.
|
||||||
return colorTransfer == C.COLOR_TRANSFER_LINEAR
|
return colorTransfer == C.COLOR_TRANSFER_LINEAR
|
||||||
|| colorTransfer == C.COLOR_TRANSFER_SDR
|
|| colorTransfer == C.COLOR_TRANSFER_SDR
|
||||||
|| colorTransfer == C.COLOR_TRANSFER_ST2084
|
|| colorTransfer == C.COLOR_TRANSFER_ST2084
|
||||||
|
@ -34,6 +34,7 @@ import android.media.Image;
|
|||||||
import android.media.ImageReader;
|
import android.media.ImageReader;
|
||||||
import android.media.MediaFormat;
|
import android.media.MediaFormat;
|
||||||
import android.util.Pair;
|
import android.util.Pair;
|
||||||
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.DebugViewProvider;
|
import androidx.media3.common.DebugViewProvider;
|
||||||
import androidx.media3.common.Effect;
|
import androidx.media3.common.Effect;
|
||||||
@ -49,6 +50,7 @@ import java.util.List;
|
|||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
@ -84,9 +86,15 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/increase_brightness.png";
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/increase_brightness.png";
|
||||||
public static final String GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH =
|
public static final String GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH =
|
||||||
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscale_then_increase_red_channel.png";
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscale_then_increase_red_channel.png";
|
||||||
|
// This file is generated on a Pixel 7, because the emulator isn't able to decode HLG to generate
|
||||||
|
// this file.
|
||||||
|
public static final String TONE_MAP_HDR_TO_SDR_PNG_ASSET_PATH =
|
||||||
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/tone_map_hdr_to_sdr.png";
|
||||||
|
|
||||||
/** Input video of which we only use the first frame. */
|
/** Input video of which we only use the first frame. */
|
||||||
private static final String INPUT_MP4_ASSET_STRING = "media/mp4/sample.mp4";
|
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
|
||||||
|
/** Input HLG video of which we only use the first frame. */
|
||||||
|
private static final String INPUT_HLG_MP4_ASSET_STRING = "media/mp4/hlg-1080p.mp4";
|
||||||
/**
|
/**
|
||||||
* Time to wait for the decoded frame to populate the {@link GlEffectsFrameProcessor} instance's
|
* Time to wait for the decoded frame to populate the {@link GlEffectsFrameProcessor} instance's
|
||||||
* input surface and the {@link GlEffectsFrameProcessor} to finish processing the frame, in
|
* input surface and the {@link GlEffectsFrameProcessor} to finish processing the frame, in
|
||||||
@ -113,7 +121,7 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
@Test
|
@Test
|
||||||
public void processData_noEdits_producesExpectedOutput() throws Exception {
|
public void processData_noEdits_producesExpectedOutput() throws Exception {
|
||||||
String testId = "processData_noEdits";
|
String testId = "processData_noEdits";
|
||||||
setUpAndPrepareFirstFrame(DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO);
|
setUpAndPrepareFirstFrame();
|
||||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||||
|
|
||||||
Bitmap actualBitmap = processFirstFrameAndEnd();
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
@ -128,7 +136,12 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
@Test
|
@Test
|
||||||
public void processData_withPixelWidthHeightRatio_producesExpectedOutput() throws Exception {
|
public void processData_withPixelWidthHeightRatio_producesExpectedOutput() throws Exception {
|
||||||
String testId = "processData_withPixelWidthHeightRatio";
|
String testId = "processData_withPixelWidthHeightRatio";
|
||||||
setUpAndPrepareFirstFrame(/* pixelWidthHeightRatio= */ 2f);
|
setUpAndPrepareFirstFrame(
|
||||||
|
INPUT_SDR_MP4_ASSET_STRING,
|
||||||
|
/* pixelWidthHeightRatio= */ 2f,
|
||||||
|
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
|
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
|
/* effects= */ ImmutableList.of());
|
||||||
Bitmap expectedBitmap = readBitmap(SCALE_WIDE_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(SCALE_WIDE_PNG_ASSET_PATH);
|
||||||
|
|
||||||
Bitmap actualBitmap = processFirstFrameAndEnd();
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
@ -147,7 +160,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
Matrix translateRightMatrix = new Matrix();
|
Matrix translateRightMatrix = new Matrix();
|
||||||
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
(MatrixTransformation) (long presentationTimeNs) -> translateRightMatrix);
|
(MatrixTransformation) (long presentationTimeNs) -> translateRightMatrix);
|
||||||
Bitmap expectedBitmap = readBitmap(TRANSLATE_RIGHT_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(TRANSLATE_RIGHT_PNG_ASSET_PATH);
|
||||||
|
|
||||||
@ -167,7 +179,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
Matrix translateRightMatrix = new Matrix();
|
Matrix translateRightMatrix = new Matrix();
|
||||||
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix,
|
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix,
|
||||||
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build());
|
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build());
|
||||||
Bitmap expectedBitmap = readBitmap(TRANSLATE_THEN_ROTATE_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(TRANSLATE_THEN_ROTATE_PNG_ASSET_PATH);
|
||||||
@ -188,7 +199,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
Matrix translateRightMatrix = new Matrix();
|
Matrix translateRightMatrix = new Matrix();
|
||||||
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build(),
|
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build(),
|
||||||
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix);
|
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix);
|
||||||
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_TRANSLATE_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_TRANSLATE_PNG_ASSET_PATH);
|
||||||
@ -206,7 +216,7 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
public void processData_withPresentation_createForHeight_producesExpectedOutput()
|
public void processData_withPresentation_createForHeight_producesExpectedOutput()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String testId = "processData_withPresentation_createForHeight";
|
String testId = "processData_withPresentation_createForHeight";
|
||||||
setUpAndPrepareFirstFrame(DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, Presentation.createForHeight(480));
|
setUpAndPrepareFirstFrame(Presentation.createForHeight(480));
|
||||||
Bitmap expectedBitmap = readBitmap(REQUEST_OUTPUT_HEIGHT_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(REQUEST_OUTPUT_HEIGHT_PNG_ASSET_PATH);
|
||||||
|
|
||||||
Bitmap actualBitmap = processFirstFrameAndEnd();
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
@ -222,7 +232,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
public void processData_withCropThenPresentation_producesExpectedOutput() throws Exception {
|
public void processData_withCropThenPresentation_producesExpectedOutput() throws Exception {
|
||||||
String testId = "processData_withCropThenPresentation";
|
String testId = "processData_withCropThenPresentation";
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
new Crop(/* left= */ -.5f, /* right= */ .5f, /* bottom= */ -.5f, /* top= */ .5f),
|
new Crop(/* left= */ -.5f, /* right= */ .5f, /* bottom= */ -.5f, /* top= */ .5f),
|
||||||
Presentation.createForAspectRatio(
|
Presentation.createForAspectRatio(
|
||||||
/* aspectRatio= */ .5f, Presentation.LAYOUT_SCALE_TO_FIT));
|
/* aspectRatio= */ .5f, Presentation.LAYOUT_SCALE_TO_FIT));
|
||||||
@ -242,7 +251,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
String testId = "processData_withScaleToFitTransformation_rotate45";
|
String testId = "processData_withScaleToFitTransformation_rotate45";
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build());
|
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build());
|
||||||
Bitmap expectedBitmap = readBitmap(ROTATE45_SCALE_TO_FIT_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(ROTATE45_SCALE_TO_FIT_PNG_ASSET_PATH);
|
||||||
|
|
||||||
@ -260,7 +268,6 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
String testId = "processData_withTwoWrappedScaleToFitTransformations";
|
String testId = "processData_withTwoWrappedScaleToFitTransformations";
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
new GlEffectWrapper(new ScaleToFitTransformation.Builder().setRotationDegrees(45).build()),
|
new GlEffectWrapper(new ScaleToFitTransformation.Builder().setRotationDegrees(45).build()),
|
||||||
new GlEffectWrapper(
|
new GlEffectWrapper(
|
||||||
new ScaleToFitTransformation.Builder()
|
new ScaleToFitTransformation.Builder()
|
||||||
@ -291,10 +298,9 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
}
|
}
|
||||||
full10StepRotationAndCenterCrop.add(centerCrop);
|
full10StepRotationAndCenterCrop.add(centerCrop);
|
||||||
|
|
||||||
setUpAndPrepareFirstFrame(DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, centerCrop);
|
setUpAndPrepareFirstFrame(centerCrop);
|
||||||
Bitmap centerCropResultBitmap = processFirstFrameAndEnd();
|
Bitmap centerCropResultBitmap = processFirstFrameAndEnd();
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(full10StepRotationAndCenterCrop.build());
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, full10StepRotationAndCenterCrop.build());
|
|
||||||
Bitmap fullRotationAndCenterCropResultBitmap = processFirstFrameAndEnd();
|
Bitmap fullRotationAndCenterCropResultBitmap = processFirstFrameAndEnd();
|
||||||
|
|
||||||
maybeSaveTestBitmapToCacheDirectory(
|
maybeSaveTestBitmapToCacheDirectory(
|
||||||
@ -318,7 +324,7 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
new RgbAdjustment.Builder().setRedScale(5).build(),
|
new RgbAdjustment.Builder().setRedScale(5).build(),
|
||||||
new RgbAdjustment.Builder().setGreenScale(5).build(),
|
new RgbAdjustment.Builder().setGreenScale(5).build(),
|
||||||
new RgbAdjustment.Builder().setBlueScale(5).build());
|
new RgbAdjustment.Builder().setBlueScale(5).build());
|
||||||
setUpAndPrepareFirstFrame(DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, increaseBrightness);
|
setUpAndPrepareFirstFrame(increaseBrightness);
|
||||||
Bitmap expectedBitmap = readBitmap(INCREASE_BRIGHTNESS_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(INCREASE_BRIGHTNESS_PNG_ASSET_PATH);
|
||||||
|
|
||||||
Bitmap actualBitmap = processFirstFrameAndEnd();
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
@ -347,13 +353,11 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
new Rotation(/* degrees= */ 90),
|
new Rotation(/* degrees= */ 90),
|
||||||
centerCrop);
|
centerCrop);
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new RgbAdjustment.Builder().setRedScale(5).setBlueScale(5).setGreenScale(5).build(),
|
new RgbAdjustment.Builder().setRedScale(5).setBlueScale(5).setGreenScale(5).build(),
|
||||||
centerCrop));
|
centerCrop));
|
||||||
Bitmap centerCropAndBrightnessIncreaseResultBitmap = processFirstFrameAndEnd();
|
Bitmap centerCropAndBrightnessIncreaseResultBitmap = processFirstFrameAndEnd();
|
||||||
setUpAndPrepareFirstFrame(
|
setUpAndPrepareFirstFrame(increaseBrightnessFullRotationCenterCrop);
|
||||||
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, increaseBrightnessFullRotationCenterCrop);
|
|
||||||
|
|
||||||
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap = processFirstFrameAndEnd();
|
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap = processFirstFrameAndEnd();
|
||||||
|
|
||||||
@ -376,10 +380,8 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
public void drawFrame_grayscaleAndIncreaseRedChannel_producesGrayscaleAndRedImage()
|
public void drawFrame_grayscaleAndIncreaseRedChannel_producesGrayscaleAndRedImage()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String testId = "drawFrame_grayscaleAndIncreaseRedChannel";
|
String testId = "drawFrame_grayscaleAndIncreaseRedChannel";
|
||||||
ImmutableList<Effect> grayscaleThenIncreaseRed =
|
setUpAndPrepareFirstFrame(
|
||||||
ImmutableList.of(
|
RgbFilter.createGrayscaleFilter(), new RgbAdjustment.Builder().setRedScale(3).build());
|
||||||
RgbFilter.createGrayscaleFilter(), new RgbAdjustment.Builder().setRedScale(3).build());
|
|
||||||
setUpAndPrepareFirstFrame(DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO, grayscaleThenIncreaseRed);
|
|
||||||
Bitmap expectedBitmap = readBitmap(GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH);
|
Bitmap expectedBitmap = readBitmap(GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH);
|
||||||
|
|
||||||
Bitmap actualBitmap = processFirstFrameAndEnd();
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
@ -391,6 +393,40 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Ignore("b/261877288 Test can only run on physical devices because decoder can't decode HLG.")
|
||||||
|
public void drawHlgFrame_toneMap_producesExpectedOutput() throws Exception {
|
||||||
|
// TODO(b/239735341): Move this test to mobileharness testing.
|
||||||
|
String testId = "drawHlgFrame_toneMap";
|
||||||
|
ColorInfo hlgColor =
|
||||||
|
new ColorInfo(
|
||||||
|
C.COLOR_SPACE_BT2020,
|
||||||
|
C.COLOR_RANGE_LIMITED,
|
||||||
|
C.COLOR_TRANSFER_HLG,
|
||||||
|
/* hdrStaticInfo= */ null);
|
||||||
|
ColorInfo toneMapSdrColor =
|
||||||
|
new ColorInfo(
|
||||||
|
C.COLOR_SPACE_BT709,
|
||||||
|
C.COLOR_RANGE_LIMITED,
|
||||||
|
C.COLOR_TRANSFER_GAMMA_2_2,
|
||||||
|
/* hdrStaticInfo= */ null);
|
||||||
|
setUpAndPrepareFirstFrame(
|
||||||
|
INPUT_HLG_MP4_ASSET_STRING,
|
||||||
|
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
||||||
|
/* inputColorInfo= */ hlgColor,
|
||||||
|
/* outputColorInfo= */ toneMapSdrColor,
|
||||||
|
/* effects= */ ImmutableList.of());
|
||||||
|
Bitmap expectedBitmap = readBitmap(TONE_MAP_HDR_TO_SDR_PNG_ASSET_PATH);
|
||||||
|
|
||||||
|
Bitmap actualBitmap = processFirstFrameAndEnd();
|
||||||
|
|
||||||
|
maybeSaveTestBitmapToCacheDirectory(testId, /* bitmapLabel= */ "actual", actualBitmap);
|
||||||
|
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
|
||||||
|
float averagePixelAbsoluteDifference =
|
||||||
|
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
|
||||||
|
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(b/227624622): Add a test for HDR input after BitmapTestUtil can read HDR bitmaps, using
|
// TODO(b/227624622): Add a test for HDR input after BitmapTestUtil can read HDR bitmaps, using
|
||||||
// GlEffectWrapper to ensure usage of intermediate textures.
|
// GlEffectWrapper to ensure usage of intermediate textures.
|
||||||
|
|
||||||
@ -401,15 +437,27 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
* <p>The frame will be sent towards {@link #glEffectsFrameProcessor}, and output may be accessed
|
* <p>The frame will be sent towards {@link #glEffectsFrameProcessor}, and output may be accessed
|
||||||
* on the {@code outputImageReader}.
|
* on the {@code outputImageReader}.
|
||||||
*
|
*
|
||||||
* @param pixelWidthHeightRatio The ratio of width over height for each pixel.
|
|
||||||
* @param effects The {@link GlEffect GlEffects} to apply to the input frame.
|
* @param effects The {@link GlEffect GlEffects} to apply to the input frame.
|
||||||
*/
|
*/
|
||||||
private void setUpAndPrepareFirstFrame(float pixelWidthHeightRatio, GlEffect... effects)
|
private void setUpAndPrepareFirstFrame(GlEffect... effects) throws Exception {
|
||||||
throws Exception {
|
setUpAndPrepareFirstFrame(asList(effects));
|
||||||
setUpAndPrepareFirstFrame(pixelWidthHeightRatio, asList(effects));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setUpAndPrepareFirstFrame(float pixelWidthHeightRatio, List<Effect> effects)
|
private void setUpAndPrepareFirstFrame(List<Effect> effects) throws Exception {
|
||||||
|
setUpAndPrepareFirstFrame(
|
||||||
|
INPUT_SDR_MP4_ASSET_STRING,
|
||||||
|
DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO,
|
||||||
|
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
|
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
|
effects);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setUpAndPrepareFirstFrame(
|
||||||
|
String videoAssetPath,
|
||||||
|
float pixelWidthHeightRatio,
|
||||||
|
ColorInfo inputColorInfo,
|
||||||
|
ColorInfo outputColorInfo,
|
||||||
|
List<Effect> effects)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
glEffectsFrameProcessor =
|
glEffectsFrameProcessor =
|
||||||
checkNotNull(
|
checkNotNull(
|
||||||
@ -418,8 +466,8 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
getApplicationContext(),
|
getApplicationContext(),
|
||||||
effects,
|
effects,
|
||||||
DebugViewProvider.NONE,
|
DebugViewProvider.NONE,
|
||||||
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
inputColorInfo,
|
||||||
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
outputColorInfo,
|
||||||
/* releaseFramesAutomatically= */ true,
|
/* releaseFramesAutomatically= */ true,
|
||||||
MoreExecutors.directExecutor(),
|
MoreExecutors.directExecutor(),
|
||||||
new FrameProcessor.Listener() {
|
new FrameProcessor.Listener() {
|
||||||
@ -449,7 +497,7 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
DecodeOneFrameUtil.decodeOneAssetFileFrame(
|
DecodeOneFrameUtil.decodeOneAssetFileFrame(
|
||||||
INPUT_MP4_ASSET_STRING,
|
videoAssetPath,
|
||||||
new DecodeOneFrameUtil.Listener() {
|
new DecodeOneFrameUtil.Listener() {
|
||||||
@Override
|
@Override
|
||||||
public void onContainerExtracted(MediaFormat mediaFormat) {
|
public void onContainerExtracted(MediaFormat mediaFormat) {
|
||||||
|
@ -22,8 +22,10 @@
|
|||||||
// uYuvToRgbColorTransform, yielding electrical (HLG or PQ) BT.2020 RGB,
|
// uYuvToRgbColorTransform, yielding electrical (HLG or PQ) BT.2020 RGB,
|
||||||
// 3. Applies an EOTF based on uInputColorTransfer, yielding optical linear
|
// 3. Applies an EOTF based on uInputColorTransfer, yielding optical linear
|
||||||
// BT.2020 RGB.
|
// BT.2020 RGB.
|
||||||
// 4. Applies a 4x4 RGB color matrix to change the pixel colors.
|
// 4. Optionally applies a BT2020 to BT709 OOTF, if OpenGL tone-mapping is
|
||||||
// 5. Outputs as requested by uOutputColorTransfer. Use COLOR_TRANSFER_LINEAR
|
// requested via uApplyHdrToSdrToneMapping.
|
||||||
|
// 5. Applies a 4x4 RGB color matrix to change the pixel colors.
|
||||||
|
// 6. Outputs as requested by uOutputColorTransfer. Use COLOR_TRANSFER_LINEAR
|
||||||
// for outputting to intermediate shaders, or COLOR_TRANSFER_ST2084 /
|
// for outputting to intermediate shaders, or COLOR_TRANSFER_ST2084 /
|
||||||
// COLOR_TRANSFER_HLG to output electrical colors via an OETF (e.g. to an
|
// COLOR_TRANSFER_HLG to output electrical colors via an OETF (e.g. to an
|
||||||
// encoder).
|
// encoder).
|
||||||
@ -38,9 +40,10 @@ uniform mat4 uRgbMatrix;
|
|||||||
// C.java#ColorTransfer value.
|
// C.java#ColorTransfer value.
|
||||||
// Only COLOR_TRANSFER_ST2084 and COLOR_TRANSFER_HLG are allowed.
|
// Only COLOR_TRANSFER_ST2084 and COLOR_TRANSFER_HLG are allowed.
|
||||||
uniform int uInputColorTransfer;
|
uniform int uInputColorTransfer;
|
||||||
|
uniform int uApplyHdrToSdrToneMapping;
|
||||||
// C.java#ColorTransfer value.
|
// C.java#ColorTransfer value.
|
||||||
// Only COLOR_TRANSFER_LINEAR, COLOR_TRANSFER_ST2084, and COLOR_TRANSFER_HLG are
|
// Only COLOR_TRANSFER_LINEAR, COLOR_TRANSFER_GAMMA_2_2, COLOR_TRANSFER_ST2084,
|
||||||
// allowed.
|
// and COLOR_TRANSFER_HLG are allowed.
|
||||||
uniform int uOutputColorTransfer;
|
uniform int uOutputColorTransfer;
|
||||||
in vec2 vTexSamplingCoord;
|
in vec2 vTexSamplingCoord;
|
||||||
out vec4 outColor;
|
out vec4 outColor;
|
||||||
@ -48,7 +51,7 @@ out vec4 outColor;
|
|||||||
// TODO(b/227624622): Consider using mediump to save precision, if it won't lead
|
// TODO(b/227624622): Consider using mediump to save precision, if it won't lead
|
||||||
// to noticeable quantization errors.
|
// to noticeable quantization errors.
|
||||||
|
|
||||||
// HLG EOTF for one channel.
|
// BT.2100 / BT.2020 HLG EOTF for one channel.
|
||||||
highp float hlgEotfSingleChannel(highp float hlgChannel) {
|
highp float hlgEotfSingleChannel(highp float hlgChannel) {
|
||||||
// Specification:
|
// Specification:
|
||||||
// https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_HLG
|
// https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_HLG
|
||||||
@ -104,7 +107,36 @@ highp vec3 applyEotf(highp vec3 electricalColor) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// HLG OETF for one channel.
|
// Apply the HLG BT2020 to BT709 OOTF.
|
||||||
|
highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) {
|
||||||
|
// Reference ("HLG Reference OOTF" section):
|
||||||
|
// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf
|
||||||
|
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
|
||||||
|
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
|
||||||
|
const mat3 RGB_TO_XYZ_BT2020 = mat3(
|
||||||
|
0.63695805f, 0.26270021f, 0.00000000f,
|
||||||
|
0.14461690f, 0.67799807f, 0.02807269f,
|
||||||
|
0.16888098f, 0.05930172f, 1.06098506f);
|
||||||
|
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
|
||||||
|
const mat3 XYZ_TO_RGB_BT709 = mat3(
|
||||||
|
3.24096994f, -0.96924364f, 0.05563008f,
|
||||||
|
-1.53738318f, 1.87596750f, -0.20397696f,
|
||||||
|
-0.49861076f, 0.04155506f, 1.05697151f);
|
||||||
|
// hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000);
|
||||||
|
// nominalPeakLuminance was selected to use a 500 as a typical value, used
|
||||||
|
// in https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
|
||||||
|
// b/199162498#comment35, and
|
||||||
|
// https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf.
|
||||||
|
const float hlgGamma = 1.0735674018211279;
|
||||||
|
|
||||||
|
vec3 linearXyzBt2020 = RGB_TO_XYZ_BT2020 * linearRgbBt2020;
|
||||||
|
vec3 linearXyzBt709 =
|
||||||
|
linearXyzBt2020 * pow(linearXyzBt2020[1], hlgGamma - 1.0);
|
||||||
|
vec3 linearRgbBt709 = clamp((XYZ_TO_RGB_BT709 * linearXyzBt709), 0.0, 1.0);
|
||||||
|
return linearRgbBt709;
|
||||||
|
}
|
||||||
|
|
||||||
|
// BT.2100 / BT.2020 HLG OETF for one channel.
|
||||||
highp float hlgOetfSingleChannel(highp float linearChannel) {
|
highp float hlgOetfSingleChannel(highp float linearChannel) {
|
||||||
// Specification:
|
// Specification:
|
||||||
// https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_HLG
|
// https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_HLG
|
||||||
@ -144,17 +176,35 @@ highp vec3 pqOetf(highp vec3 linearColor) {
|
|||||||
return pow(temp, vec3(m2));
|
return pow(temp, vec3(m2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BT.709 gamma 2.2 OETF for one channel.
|
||||||
|
float gamma22OetfSingleChannel(highp float linearChannel) {
|
||||||
|
// Reference:
|
||||||
|
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
|
||||||
|
return pow(linearChannel, (1.0 / 2.2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// BT.709 gamma 2.2 OETF.
|
||||||
|
vec3 gamma22Oetf(highp vec3 linearColor) {
|
||||||
|
return vec3(
|
||||||
|
gamma22OetfSingleChannel(linearColor.r),
|
||||||
|
gamma22OetfSingleChannel(linearColor.g),
|
||||||
|
gamma22OetfSingleChannel(linearColor.b));
|
||||||
|
}
|
||||||
|
|
||||||
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
||||||
// electrical signals. Input and output are both normalized to [0, 1].
|
// electrical signals. Input and output are both normalized to [0, 1].
|
||||||
highp vec3 applyOetf(highp vec3 linearColor) {
|
highp vec3 applyOetf(highp vec3 linearColor) {
|
||||||
// LINT.IfChange(color_transfer_oetf)
|
// LINT.IfChange(color_transfer_oetf)
|
||||||
const int COLOR_TRANSFER_LINEAR = 1;
|
const int COLOR_TRANSFER_LINEAR = 1;
|
||||||
|
const int COLOR_TRANSFER_GAMMA_2_2 = 10;
|
||||||
const int COLOR_TRANSFER_ST2084 = 6;
|
const int COLOR_TRANSFER_ST2084 = 6;
|
||||||
const int COLOR_TRANSFER_HLG = 7;
|
const int COLOR_TRANSFER_HLG = 7;
|
||||||
if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) {
|
if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) {
|
||||||
return pqOetf(linearColor);
|
return pqOetf(linearColor);
|
||||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) {
|
} else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) {
|
||||||
return hlgOetf(linearColor);
|
return hlgOetf(linearColor);
|
||||||
|
} else if (uOutputColorTransfer == COLOR_TRANSFER_GAMMA_2_2) {
|
||||||
|
return gamma22Oetf(linearColor);
|
||||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
|
} else if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
|
||||||
return linearColor;
|
return linearColor;
|
||||||
} else {
|
} else {
|
||||||
@ -170,7 +220,11 @@ vec3 yuvToRgb(vec3 yuv) {
|
|||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz;
|
vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz;
|
||||||
vec4 opticalColor = vec4(applyEotf(yuvToRgb(srcYuv)), 1.0);
|
vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv));
|
||||||
|
// TODO(b/239735341): Add support for PQ tone-mapping.
|
||||||
|
vec4 opticalColor = (uApplyHdrToSdrToneMapping == 1)
|
||||||
|
? vec4(applyHlgBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
|
||||||
|
: vec4(opticalColorBt2020, 1.0);
|
||||||
vec4 transformedColors = uRgbMatrix * opticalColor;
|
vec4 transformedColors = uRgbMatrix * opticalColor;
|
||||||
outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
|
outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
|
||||||
}
|
}
|
||||||
|
@ -36,8 +36,9 @@ uniform int uOutputColorTransfer;
|
|||||||
const float inverseGamma = 0.4500;
|
const float inverseGamma = 0.4500;
|
||||||
const float gamma = 1.0 / inverseGamma;
|
const float gamma = 1.0 / inverseGamma;
|
||||||
|
|
||||||
// Transforms a single channel from electrical to optical SDR.
|
// Transforms a single channel from optical to electrical SDR using the SMPTE
|
||||||
float sdrEotfSingleChannel(float electricalChannel) {
|
// 170M OETF.
|
||||||
|
float smpte170mEotfSingleChannel(float electricalChannel) {
|
||||||
// Specification:
|
// Specification:
|
||||||
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
||||||
return electricalChannel < 0.0812
|
return electricalChannel < 0.0812
|
||||||
@ -46,15 +47,15 @@ float sdrEotfSingleChannel(float electricalChannel) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Transforms electronical to optical SDR using the SMPTE 170M EOTF.
|
// Transforms electronical to optical SDR using the SMPTE 170M EOTF.
|
||||||
vec3 sdrEotf(vec3 electricalColor) {
|
vec3 smpte170mEotf(vec3 electricalColor) {
|
||||||
return vec3(
|
return vec3(
|
||||||
sdrEotfSingleChannel(electricalColor.r),
|
smpte170mEotfSingleChannel(electricalColor.r),
|
||||||
sdrEotfSingleChannel(electricalColor.g),
|
smpte170mEotfSingleChannel(electricalColor.g),
|
||||||
sdrEotfSingleChannel(electricalColor.b));
|
smpte170mEotfSingleChannel(electricalColor.b));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transforms a single channel from optical to electrical SDR.
|
// Transforms a single channel from optical to electrical SDR.
|
||||||
float sdrOetfSingleChannel(float opticalChannel) {
|
float smpte170mOetfSingleChannel(float opticalChannel) {
|
||||||
// Specification:
|
// Specification:
|
||||||
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
||||||
return opticalChannel < 0.018
|
return opticalChannel < 0.018
|
||||||
@ -63,11 +64,11 @@ float sdrOetfSingleChannel(float opticalChannel) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
|
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
|
||||||
vec3 sdrOetf(vec3 opticalColor) {
|
vec3 smpte170mOetf(vec3 opticalColor) {
|
||||||
return vec3(
|
return vec3(
|
||||||
sdrOetfSingleChannel(opticalColor.r),
|
smpte170mOetfSingleChannel(opticalColor.r),
|
||||||
sdrOetfSingleChannel(opticalColor.g),
|
smpte170mOetfSingleChannel(opticalColor.g),
|
||||||
sdrOetfSingleChannel(opticalColor.b));
|
smpte170mOetfSingleChannel(opticalColor.b));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
||||||
@ -79,7 +80,7 @@ highp vec3 applyOetf(highp vec3 linearColor) {
|
|||||||
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
|
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
|
||||||
return linearColor;
|
return linearColor;
|
||||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
|
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
|
||||||
return sdrOetf(linearColor);
|
return smpte170mOetf(linearColor);
|
||||||
} else {
|
} else {
|
||||||
// Output red as an obviously visible error.
|
// Output red as an obviously visible error.
|
||||||
return vec3(1.0, 0.0, 0.0);
|
return vec3(1.0, 0.0, 0.0);
|
||||||
@ -88,7 +89,7 @@ highp vec3 applyOetf(highp vec3 linearColor) {
|
|||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
|
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
|
||||||
vec3 linearInputColor = sdrEotf(inputColor.rgb);
|
vec3 linearInputColor = smpte170mEotf(inputColor.rgb);
|
||||||
|
|
||||||
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);
|
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);
|
||||||
|
|
||||||
|
@ -24,11 +24,15 @@ precision mediump float;
|
|||||||
uniform sampler2D uTexSampler;
|
uniform sampler2D uTexSampler;
|
||||||
uniform mat4 uRgbMatrix;
|
uniform mat4 uRgbMatrix;
|
||||||
varying vec2 vTexSamplingCoord;
|
varying vec2 vTexSamplingCoord;
|
||||||
|
// C.java#ColorTransfer value.
|
||||||
|
// Only COLOR_TRANSFER_SDR and COLOR_TRANSFER_GAMMA_2_2 are allowed.
|
||||||
|
uniform int uOutputColorTransfer;
|
||||||
|
|
||||||
const float inverseGamma = 0.4500;
|
const float inverseGamma = 0.4500;
|
||||||
|
|
||||||
// Transforms a single channel from optical to electrical SDR.
|
// Transforms a single channel from optical to electrical SDR using the SMPTE
|
||||||
float sdrOetfSingleChannel(float opticalChannel) {
|
// 170M OETF.
|
||||||
|
float smpte170mOetfSingleChannel(float opticalChannel) {
|
||||||
// Specification:
|
// Specification:
|
||||||
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
|
||||||
return opticalChannel < 0.018
|
return opticalChannel < 0.018
|
||||||
@ -37,16 +41,47 @@ float sdrOetfSingleChannel(float opticalChannel) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
|
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
|
||||||
vec3 sdrOetf(vec3 opticalColor) {
|
vec3 smpte170mOetf(vec3 opticalColor) {
|
||||||
return vec3(
|
return vec3(
|
||||||
sdrOetfSingleChannel(opticalColor.r),
|
smpte170mOetfSingleChannel(opticalColor.r),
|
||||||
sdrOetfSingleChannel(opticalColor.g),
|
smpte170mOetfSingleChannel(opticalColor.g),
|
||||||
sdrOetfSingleChannel(opticalColor.b));
|
smpte170mOetfSingleChannel(opticalColor.b));
|
||||||
|
}
|
||||||
|
|
||||||
|
// BT.709 gamma 2.2 OETF for one channel.
|
||||||
|
float gamma22OetfSingleChannel(highp float linearChannel) {
|
||||||
|
// Reference:
|
||||||
|
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_gamma22
|
||||||
|
return pow(linearChannel, (1.0 / 2.2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// BT.709 gamma 2.2 OETF.
|
||||||
|
vec3 gamma22Oetf(highp vec3 linearColor) {
|
||||||
|
return vec3(
|
||||||
|
gamma22OetfSingleChannel(linearColor.r),
|
||||||
|
gamma22OetfSingleChannel(linearColor.g),
|
||||||
|
gamma22OetfSingleChannel(linearColor.b));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
||||||
|
// electrical signals. Input and output are both normalized to [0, 1].
|
||||||
|
highp vec3 applyOetf(highp vec3 linearColor) {
|
||||||
|
// LINT.IfChange(color_transfer_oetf)
|
||||||
|
const int COLOR_TRANSFER_SDR_VIDEO = 3;
|
||||||
|
const int COLOR_TRANSFER_GAMMA_2_2 = 10;
|
||||||
|
if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
|
||||||
|
return smpte170mOetf(linearColor);
|
||||||
|
} else if (uOutputColorTransfer == COLOR_TRANSFER_GAMMA_2_2) {
|
||||||
|
return gamma22Oetf(linearColor);
|
||||||
|
} else {
|
||||||
|
// Output red as an obviously visible error.
|
||||||
|
return vec3(1.0, 0.0, 0.0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
|
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
|
||||||
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
|
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
|
||||||
|
|
||||||
gl_FragColor = vec4(sdrOetf(transformedColors.rgb), inputColor.a);
|
gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a);
|
||||||
}
|
}
|
||||||
|
@ -60,7 +60,9 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
|
|||||||
*
|
*
|
||||||
* <p>All {@link Effect} instances must be {@link GlEffect} instances.
|
* <p>All {@link Effect} instances must be {@link GlEffect} instances.
|
||||||
*
|
*
|
||||||
* <p>Using HDR requires the {@code EXT_YUV_target} OpenGL extension.
|
* <p>Using HDR {@code inputColorInfo} requires the {@code EXT_YUV_target} OpenGL extension.
|
||||||
|
*
|
||||||
|
* <p>Using HDR {@code inputColorInfo} or {@code outputColorInfo} requires OpenGL ES 3.0.
|
||||||
*
|
*
|
||||||
* <p>Pass a {@link MoreExecutors#directExecutor() direct listenerExecutor} if invoking the
|
* <p>Pass a {@link MoreExecutors#directExecutor() direct listenerExecutor} if invoking the
|
||||||
* {@code listener} on {@link GlEffectsFrameProcessor}'s internal thread is desired.
|
* {@code listener} on {@link GlEffectsFrameProcessor}'s internal thread is desired.
|
||||||
@ -78,16 +80,26 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
|
|||||||
throws FrameProcessingException {
|
throws FrameProcessingException {
|
||||||
// TODO(b/261188041) Add tests to verify the Listener is invoked on the given Executor.
|
// TODO(b/261188041) Add tests to verify the Listener is invoked on the given Executor.
|
||||||
|
|
||||||
// TODO(b/239735341): Reduce the scope of these checks by implementing GL tone-mapping.
|
checkArgument(inputColorInfo.isValid());
|
||||||
checkArgument(
|
|
||||||
inputColorInfo.colorSpace == outputColorInfo.colorSpace,
|
|
||||||
"Conversion between HDR and SDR color spaces is not yet supported.");
|
|
||||||
checkArgument(
|
|
||||||
ColorInfo.isTransferHdr(inputColorInfo) == ColorInfo.isTransferHdr(outputColorInfo),
|
|
||||||
"Conversion between HDR and SDR color transfers is not yet supported.");
|
|
||||||
checkArgument(inputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
|
checkArgument(inputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
|
||||||
|
checkArgument(outputColorInfo.isValid());
|
||||||
checkArgument(outputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
|
checkArgument(outputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
|
||||||
|
|
||||||
|
if (inputColorInfo.colorSpace != outputColorInfo.colorSpace
|
||||||
|
|| ColorInfo.isTransferHdr(inputColorInfo) != ColorInfo.isTransferHdr(outputColorInfo)) {
|
||||||
|
// GL Tone mapping is only implemented for BT2020 to BT709 and HLG to SDR (Gamma 2.2).
|
||||||
|
// Gamma 2.2 is used instead of SMPTE 170M for SDR, despite MediaFormat's
|
||||||
|
// COLOR_TRANSFER_SDR_VIDEO being defined as SMPTE 170M. This is to match
|
||||||
|
// other known tone-mapping behavior within the Android ecosystem.
|
||||||
|
// TODO(b/239735341): Consider migrating SDR outside tone-mapping from SMPTE
|
||||||
|
// 170M to gamma 2.2.
|
||||||
|
// TODO(b/239735341): Implement PQ tone-mapping to reduce the scope of these checks.
|
||||||
|
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
|
||||||
|
checkArgument(outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020);
|
||||||
|
checkArgument(inputColorInfo.colorTransfer == C.COLOR_TRANSFER_HLG);
|
||||||
|
checkArgument(outputColorInfo.colorTransfer == C.COLOR_TRANSFER_GAMMA_2_2);
|
||||||
|
}
|
||||||
|
|
||||||
ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME);
|
ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME);
|
||||||
|
|
||||||
Future<GlEffectsFrameProcessor> glFrameProcessorFuture =
|
Future<GlEffectsFrameProcessor> glFrameProcessorFuture =
|
||||||
@ -146,7 +158,9 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
|
|||||||
ColorInfo.isTransferHdr(outputColorInfo)
|
ColorInfo.isTransferHdr(outputColorInfo)
|
||||||
? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102
|
? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102
|
||||||
: GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
|
: GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
|
||||||
EGLContext eglContext = GlUtil.createEglContext(eglDisplay, configAttributes);
|
int openGlVersion =
|
||||||
|
ColorInfo.isTransferHdr(inputColorInfo) || ColorInfo.isTransferHdr(outputColorInfo) ? 3 : 2;
|
||||||
|
EGLContext eglContext = GlUtil.createEglContext(eglDisplay, openGlVersion, configAttributes);
|
||||||
GlUtil.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
|
GlUtil.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
|
||||||
|
|
||||||
ImmutableList<GlTextureProcessor> textureProcessors =
|
ImmutableList<GlTextureProcessor> textureProcessors =
|
||||||
@ -205,7 +219,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
|
|||||||
boolean sampleFromExternalTexture = true;
|
boolean sampleFromExternalTexture = true;
|
||||||
ColorInfo linearColorInfo =
|
ColorInfo linearColorInfo =
|
||||||
new ColorInfo(
|
new ColorInfo(
|
||||||
inputColorInfo.colorSpace, inputColorInfo.colorRange, C.COLOR_TRANSFER_LINEAR, null);
|
outputColorInfo.colorSpace, outputColorInfo.colorRange, C.COLOR_TRANSFER_LINEAR, null);
|
||||||
for (int i = 0; i < effects.size(); i++) {
|
for (int i = 0; i < effects.size(); i++) {
|
||||||
Effect effect = effects.get(i);
|
Effect effect = effects.get(i);
|
||||||
checkArgument(effect instanceof GlEffect, "GlEffectsFrameProcessor only supports GlEffects");
|
checkArgument(effect instanceof GlEffect, "GlEffectsFrameProcessor only supports GlEffects");
|
||||||
|
@ -24,6 +24,7 @@ import android.opengl.Matrix;
|
|||||||
import android.util.Pair;
|
import android.util.Pair;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.FrameProcessingException;
|
import androidx.media3.common.FrameProcessingException;
|
||||||
import androidx.media3.common.util.GlProgram;
|
import androidx.media3.common.util.GlProgram;
|
||||||
import androidx.media3.common.util.GlUtil;
|
import androidx.media3.common.util.GlUtil;
|
||||||
@ -205,10 +206,6 @@ import java.util.List;
|
|||||||
|
|
||||||
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
|
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
|
||||||
if (isInputTransferHdr) {
|
if (isInputTransferHdr) {
|
||||||
// TODO(b/239735341): Remove this after implementing in-app tone-mapping.
|
|
||||||
checkArgument(
|
|
||||||
outputColorInfo.colorSpace == C.COLOR_SPACE_BT2020,
|
|
||||||
"Converting from HDR to SDR is not yet supported.");
|
|
||||||
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
|
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
|
||||||
|
|
||||||
// In HDR editing mode the decoder output is sampled in YUV.
|
// In HDR editing mode the decoder output is sampled in YUV.
|
||||||
@ -227,18 +224,18 @@ import java.util.List;
|
|||||||
inputColorTransfer == C.COLOR_TRANSFER_HLG
|
inputColorTransfer == C.COLOR_TRANSFER_HLG
|
||||||
|| inputColorTransfer == C.COLOR_TRANSFER_ST2084);
|
|| inputColorTransfer == C.COLOR_TRANSFER_ST2084);
|
||||||
glProgram.setIntUniform("uInputColorTransfer", inputColorTransfer);
|
glProgram.setIntUniform("uInputColorTransfer", inputColorTransfer);
|
||||||
|
// TODO(b/239735341): Add a setBooleanUniform method to GlProgram.
|
||||||
|
glProgram.setIntUniform(
|
||||||
|
"uApplyHdrToSdrToneMapping",
|
||||||
|
/* value= */ (outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020) ? 1 : 0);
|
||||||
checkArgument(
|
checkArgument(
|
||||||
outputColorTransfer == C.COLOR_TRANSFER_HLG
|
outputColorTransfer != Format.NO_VALUE && outputColorTransfer != C.COLOR_TRANSFER_SDR);
|
||||||
|| outputColorTransfer == C.COLOR_TRANSFER_ST2084
|
|
||||||
|| outputColorTransfer == C.COLOR_TRANSFER_LINEAR);
|
|
||||||
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
||||||
} else {
|
} else {
|
||||||
checkArgument(
|
checkArgument(
|
||||||
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020,
|
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020,
|
||||||
"Converting from SDR to HDR is not supported.");
|
"Converting from SDR to HDR is not supported.");
|
||||||
checkArgument(
|
checkArgument(inputColorInfo.colorSpace == outputColorInfo.colorSpace);
|
||||||
inputColorInfo.colorSpace == C.COLOR_SPACE_BT709
|
|
||||||
|| inputColorInfo.colorSpace == C.COLOR_SPACE_BT601);
|
|
||||||
checkArgument(
|
checkArgument(
|
||||||
outputColorTransfer == C.COLOR_TRANSFER_SDR
|
outputColorTransfer == C.COLOR_TRANSFER_SDR
|
||||||
|| outputColorTransfer == C.COLOR_TRANSFER_LINEAR);
|
|| outputColorTransfer == C.COLOR_TRANSFER_LINEAR);
|
||||||
@ -287,11 +284,17 @@ import java.util.List;
|
|||||||
: FRAGMENT_SHADER_TRANSFORMATION_SDR_OETF_ES2_PATH;
|
: FRAGMENT_SHADER_TRANSFORMATION_SDR_OETF_ES2_PATH;
|
||||||
GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
|
GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
|
||||||
|
|
||||||
|
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
|
||||||
if (outputIsHdr) {
|
if (outputIsHdr) {
|
||||||
@C.ColorTransfer int colorTransfer = outputColorInfo.colorTransfer;
|
|
||||||
checkArgument(
|
checkArgument(
|
||||||
colorTransfer == C.COLOR_TRANSFER_HLG || colorTransfer == C.COLOR_TRANSFER_ST2084);
|
outputColorTransfer == C.COLOR_TRANSFER_HLG
|
||||||
glProgram.setIntUniform("uOutputColorTransfer", colorTransfer);
|
|| outputColorTransfer == C.COLOR_TRANSFER_ST2084);
|
||||||
|
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
||||||
|
} else {
|
||||||
|
checkArgument(
|
||||||
|
outputColorTransfer == C.COLOR_TRANSFER_SDR
|
||||||
|
|| outputColorTransfer == C.COLOR_TRANSFER_GAMMA_2_2);
|
||||||
|
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new MatrixTextureProcessor(
|
return new MatrixTextureProcessor(
|
||||||
|
Binary file not shown.
After Width: | Height: | Size: 2.3 MiB |
BIN
libraries/test_data/src/test/assets/media/mp4/hlg-1080p.mp4
Normal file
BIN
libraries/test_data/src/test/assets/media/mp4/hlg-1080p.mp4
Normal file
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user