Image frame processing color transfer fix

Used an actual captured image with set color profile for test to minimise the chance of the test flaking. Also renamed the media/bitmap/overlay folder to media/bitmap/input_images for clarity.

PiperOrigin-RevId: 513273353
This commit is contained in:
tofunmi 2023-03-01 18:22:17 +00:00 committed by tonihei
parent 65afd40622
commit e425b6e082
10 changed files with 30 additions and 23 deletions

View File

@ -38,8 +38,8 @@ import org.junit.runner.RunWith;
public class DefaultVideoFrameProcessorImageFrameOutputTest { public class DefaultVideoFrameProcessorImageFrameOutputTest {
public static final String ORIGINAL_PNG_ASSET_PATH = public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH = public static final String SCALE_WIDE_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/image_input_with_wrapped_crop.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/scale_wide.png";
public static final String BITMAP_OVERLAY_PNG_ASSET_PATH = public static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
@ -66,7 +66,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH), C.MICROS_PER_SECOND, /* frameRate= */ 2); readBitmap(ORIGINAL_PNG_ASSET_PATH), C.MICROS_PER_SECOND, /* frameRate= */ 2);
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(WRAPPED_CROP_PNG_ASSET_PATH), 2 * C.MICROS_PER_SECOND, /* frameRate= */ 3); readBitmap(SCALE_WIDE_PNG_ASSET_PATH), 2 * C.MICROS_PER_SECOND, /* frameRate= */ 3);
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH), 3 * C.MICROS_PER_SECOND, /* frameRate= */ 4); readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH), 3 * C.MICROS_PER_SECOND, /* frameRate= */ 4);
videoFrameProcessorTestRunner.endFrameProcessingAndGetImage(); videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();

View File

@ -54,8 +54,11 @@ import org.junit.runner.RunWith;
public final class DefaultVideoFrameProcessorPixelTest { public final class DefaultVideoFrameProcessorPixelTest {
public static final String ORIGINAL_PNG_ASSET_PATH = public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH = public static final String IMAGE_PNG_ASSET_PATH = "media/bitmap/input_images/london.jpg";
"media/bitmap/sample_mp4_first_frame/electrical_colors/image_input_with_wrapped_crop.png"; public static final String IMAGE_TO_VIDEO_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/london_image_to_video.png";
public static final String IMAGE_TO_CROPPED_VIDEO_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/london_image_to_video_with_crop.png";
public static final String BITMAP_OVERLAY_PNG_ASSET_PATH = public static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
public static final String SCALE_WIDE_PNG_ASSET_PATH = public static final String SCALE_WIDE_PNG_ASSET_PATH =
@ -108,10 +111,11 @@ public final class DefaultVideoFrameProcessorPixelTest {
String testId = "noEffects_withImageInput_matchesGoldenFile"; String testId = "noEffects_withImageInput_matchesGoldenFile";
videoFrameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build(); getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(IMAGE_TO_VIDEO_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
expectedBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1); originalBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
Bitmap actualBitmap = videoFrameProcessorTestRunner.endFrameProcessingAndGetImage(); Bitmap actualBitmap = videoFrameProcessorTestRunner.endFrameProcessingAndGetImage();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
@ -134,8 +138,8 @@ public final class DefaultVideoFrameProcessorPixelTest {
/* bottom= */ -0.5f, /* bottom= */ -0.5f,
/* top= */ 0.5f))) /* top= */ 0.5f)))
.build(); .build();
Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(IMAGE_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(IMAGE_TO_CROPPED_VIDEO_PNG_ASSET_PATH);
videoFrameProcessorTestRunner.queueInputBitmap( videoFrameProcessorTestRunner.queueInputBitmap(
originalBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1); originalBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);

View File

@ -58,7 +58,7 @@ import org.junit.runner.RunWith;
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class OverlayShaderProgramPixelTest { public class OverlayShaderProgramPixelTest {
public static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/overlay/media3test.png"; public static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
public static final String ORIGINAL_PNG_ASSET_PATH = public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String OVERLAY_BITMAP_DEFAULT = public static final String OVERLAY_BITMAP_DEFAULT =

View File

@ -36,22 +36,23 @@ uniform int uOutputColorTransfer;
const float inverseGamma = 0.4500; const float inverseGamma = 0.4500;
const float gamma = 1.0 / inverseGamma; const float gamma = 1.0 / inverseGamma;
// Transforms a single channel from electrical to optical SDR using the SMPTE // Transforms a single channel from electrical to optical SDR using the sRGB
// 170M OETF. // EOTF.
float smpte170mEotfSingleChannel(float electricalChannel) { float srgbEotfSingleChannel(float electricalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://developer.android.com/ndk/reference/group/a-data-space#group___a_data_space_1gga2759ad19cae46646cc5f7002758c4a1cac1bef6aa3a72abbf4a651a0bfb117f96
return electricalChannel < 0.0812 return electricalChannel <= 0.04045
? electricalChannel / 4.500 ? electricalChannel / 12.92
: pow((electricalChannel + 0.099) / 1.099, gamma); : pow((electricalChannel + 0.055) / 1.055, 2.4);
} }
// Transforms electrical to optical SDR using the SMPTE 170M EOTF. // Transforms electrical to optical SDR using the sRGB EOTF.
vec3 smpte170mEotf(vec3 electricalColor) { vec3 srgbEotf(const vec3 electricalColor) {
return vec3( return vec3(
smpte170mEotfSingleChannel(electricalColor.r), srgbEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.g), srgbEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.b)); srgbEotfSingleChannel(electricalColor.b)
);
} }
// Transforms a single channel from optical to electrical SDR. // Transforms a single channel from optical to electrical SDR.
@ -95,7 +96,7 @@ void main() {
// texture gets flipped. We flip the texture vertically to ensure the // texture gets flipped. We flip the texture vertically to ensure the
// orientation of the output is correct. // orientation of the output is correct.
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoordFlipped); vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoordFlipped);
vec3 linearInputColor = smpte170mEotf(inputColor.rgb); vec3 linearInputColor = srgbEotf(inputColor.rgb);
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1); vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);

View File

@ -0,0 +1,2 @@
All the files are in the *sRGB color space* specified in IEC 61966-2-1 following the colorspace
model that is supported by [android.graphics.Bitmap](https://developer.android.com/reference/android/graphics/BitmapFactory.Options#inPreferredColorSpace).

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB