mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Support ultra HDR in effect
adding sending gainmap to shader program and applying gainmap to base in shader PiperOrigin-RevId: 616070582
This commit is contained in:
parent
e9a28beb44
commit
f39fe82bba
@ -36,8 +36,6 @@
|
||||
resolution but a very small number of frames
|
||||
([#1051](https://github.com/androidx/media/issues/1051)).
|
||||
* Extractors:
|
||||
* Fix issue where padding was not skipped when reading odd-sized chunks
|
||||
from WAV files ([#1117](https://github.com/androidx/media/pull/1117)).
|
||||
* Audio:
|
||||
* Allow renderer recovery by disabling offload if audio track fails to
|
||||
initialize in offload mode.
|
||||
@ -68,6 +66,7 @@
|
||||
* Improved PQ to SDR tone-mapping by converting color spaces.
|
||||
* Support multiple speed changes within the same `EditedMediaItem` or
|
||||
`Composition` in `SpeedChangeEffect`.
|
||||
* Support for HLG and PQ output from ultra HDR bitmap input.
|
||||
* Muxers:
|
||||
* IMA extension:
|
||||
* Session:
|
||||
|
@ -15,10 +15,6 @@
|
||||
*/
|
||||
package androidx.media3.common;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import android.graphics.Gainmap;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
|
||||
@ -55,17 +51,8 @@ public final class GlTextureInfo {
|
||||
/** The height of the texture, in pixels, or {@link C#LENGTH_UNSET} if not specified. */
|
||||
public final int height;
|
||||
|
||||
/** The {@link Gainmap} associated to this texture, or {@code null} if not specified. */
|
||||
@Nullable public final Gainmap gainmap;
|
||||
|
||||
/**
|
||||
* The OpenGL texture identifier for a gainmap associated to this contents of {@link #texId}, or
|
||||
* {@link C#INDEX_UNSET} if not specified.
|
||||
*/
|
||||
public final int gainmapTexId;
|
||||
|
||||
/**
|
||||
* Creates a new instance with {@code null} {@link Gainmap} and unspecified {@code gainmapTexId}.
|
||||
* Creates a new instance.
|
||||
*
|
||||
* @param texId The OpenGL texture identifier, or {@link C#INDEX_UNSET} if not specified.
|
||||
* @param fboId Identifier of a framebuffer object associated with the texture, or {@link
|
||||
@ -81,35 +68,6 @@ public final class GlTextureInfo {
|
||||
this.rboId = rboId;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.gainmap = null;
|
||||
this.gainmapTexId = C.INDEX_UNSET;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
*
|
||||
* @param texId The OpenGL texture identifier, or {@link C#INDEX_UNSET} if not specified.
|
||||
* @param fboId Identifier of a framebuffer object associated with the texture, or {@link
|
||||
* C#INDEX_UNSET} if not specified.
|
||||
* @param rboId Identifier of a renderbuffer object associated with the texture, or {@link
|
||||
* C#INDEX_UNSET} if not specified.
|
||||
* @param width The width of the texture, in pixels, or {@link C#LENGTH_UNSET} if not specified.
|
||||
* @param height The height of the texture, in pixels, or {@link C#LENGTH_UNSET} if not specified.
|
||||
* @param gainmap The {@link Gainmap} associated to this texture.
|
||||
* @param gainmapTexId The OpenGL texture identifier for a gainmap associated with the contents of
|
||||
* {@link #texId}.
|
||||
*/
|
||||
public GlTextureInfo(
|
||||
int texId, int fboId, int rboId, int width, int height, Gainmap gainmap, int gainmapTexId) {
|
||||
this.texId = texId;
|
||||
this.fboId = fboId;
|
||||
this.rboId = rboId;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.gainmap = gainmap;
|
||||
checkState(
|
||||
gainmapTexId != C.INDEX_UNSET, "If gainmap is non-null, the gainmapTexId must be set");
|
||||
this.gainmapTexId = gainmapTexId;
|
||||
}
|
||||
|
||||
/** Releases all information associated with this instance. */
|
||||
@ -123,8 +81,5 @@ public final class GlTextureInfo {
|
||||
if (rboId != C.INDEX_UNSET) {
|
||||
GlUtil.deleteRbo(rboId);
|
||||
}
|
||||
if (gainmapTexId != C.INDEX_UNSET) {
|
||||
GlUtil.deleteTexture(gainmapTexId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,233 @@
|
||||
#version 300 es
|
||||
// Copyright 2022 The Android Open Source Project
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// ES 3 fragment shader that:
|
||||
// 1. Samples electrical BT.709 sRGB from an SDR base texture.
|
||||
// 2. Applies an EOTF, yielding optical linear BT 709 RGB.
|
||||
// 3. Samples from a gainmap texture and applies a gainmap to the base.
|
||||
// 4. Applies a BT709 to BT2020 OOTF, yielding optical linear BT 2020 RGB.
|
||||
// 5. Applies a 4x4 RGB color matrix to change the pixel colors.
|
||||
// 6. Outputs as requested by uOutputColorTransfer. Use COLOR_TRANSFER_LINEAR
|
||||
// for outputting to intermediate shaders, or COLOR_TRANSFER_ST2084 /
|
||||
// COLOR_TRANSFER_HLG to output electrical colors via an OETF (e.g. to an
|
||||
// encoder).
|
||||
// The output will be red or blue if an error has occurred.
|
||||
|
||||
precision mediump float;
|
||||
uniform sampler2D uTexSampler;
|
||||
uniform sampler2D uGainmapTexSampler;
|
||||
uniform mat4 uRgbMatrix;
|
||||
// C.java#ColorTransfer value.
|
||||
// Only COLOR_TRANSFER_LINEAR, COLOR_TRANSFER_GAMMA_2_2, COLOR_TRANSFER_ST2084,
|
||||
// and COLOR_TRANSFER_HLG are allowed.
|
||||
uniform int uOutputColorTransfer;
|
||||
|
||||
// Uniforms for applying gainmap to base.
|
||||
uniform int uGainmapIsAlpha;
|
||||
uniform int uNoGamma;
|
||||
uniform int uSingleChannel;
|
||||
uniform vec4 uLogRatioMin;
|
||||
uniform vec4 uLogRatioMax;
|
||||
uniform vec4 uEpsilonSdr;
|
||||
uniform vec4 uEpsilonHdr;
|
||||
uniform vec4 uGainmapGamma;
|
||||
uniform float uDisplayRatioHdr;
|
||||
uniform float uDisplayRatioSdr;
|
||||
|
||||
in vec2 vTexSamplingCoord;
|
||||
out vec4 outColor;
|
||||
|
||||
// TODO - b/320237307: Investigate possible HDR/SDR ratios. The value is
|
||||
// calculated as targetHdrPeakBrightnessInNits / targetSdrWhitePointInNits. In
|
||||
// other effect HDR processing and some parts of the wider android ecosystem the
|
||||
// assumption is targetHdrPeakBrightnessInNits=1000 and
|
||||
// targetSdrWhitePointInNits=500, but 1 seems to have the best white balance
|
||||
// upon visual testing.
|
||||
const float HDR_SDR_RATIO = 1.0;
|
||||
|
||||
// LINT.IfChange(color_transfer)
|
||||
const int COLOR_TRANSFER_LINEAR = 1;
|
||||
const int COLOR_TRANSFER_GAMMA_2_2 = 10;
|
||||
const int COLOR_TRANSFER_ST2084 = 6;
|
||||
const int COLOR_TRANSFER_HLG = 7;
|
||||
|
||||
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
|
||||
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
|
||||
const mat3 RGB_BT2020_TO_XYZ =
|
||||
mat3(0.63695805f, 0.26270021f, 0.00000000f, 0.14461690f, 0.67799807f,
|
||||
0.02807269f, 0.16888098f, 0.05930172f, 1.06098506f);
|
||||
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
|
||||
const mat3 XYZ_TO_RGB_BT709 =
|
||||
mat3(3.24096994f, -0.96924364f, 0.05563008f, -1.53738318f, 1.87596750f,
|
||||
-0.20397696f, -0.49861076f, 0.04155506f, 1.05697151f);
|
||||
// Matrix values are calculated as inverse of RGB_BT2020_TO_XYZ.
|
||||
const mat3 XYZ_TO_RGB_BT2020 =
|
||||
mat3(1.71665f, -0.666684f, 0.0176399f, -0.355671f, 1.61648f, -0.0427706,
|
||||
-0.253366f, 0.0157685f, 0.942103f);
|
||||
// Matrix values are calculated as inverse of XYZ_TO_RGB_BT709.
|
||||
const mat3 RGB_BT709_TO_XYZ =
|
||||
mat3(0.412391f, 0.212639f, 0.0193308f, 0.357584f, 0.715169f, 0.119195f,
|
||||
0.180481f, 0.0721923f, 0.950532f);
|
||||
|
||||
// TODO(b/227624622): Consider using mediump to save precision, if it won't lead
|
||||
// to noticeable quantization errors.
|
||||
|
||||
// Transforms a single channel from electrical to optical SDR using the sRGB
|
||||
// EOTF.
|
||||
float srgbEotfSingleChannel(float electricalChannel) {
|
||||
// Specification:
|
||||
// https://developer.android.com/ndk/reference/group/a-data-space#group___a_data_space_1gga2759ad19cae46646cc5f7002758c4a1cac1bef6aa3a72abbf4a651a0bfb117f96
|
||||
return electricalChannel <= 0.04045
|
||||
? electricalChannel / 12.92
|
||||
: pow((electricalChannel + 0.055) / 1.055, 2.4);
|
||||
}
|
||||
|
||||
// Transforms electrical to optical SDR using the sRGB EOTF.
|
||||
vec3 srgbEotf(const vec3 electricalColor) {
|
||||
return vec3(srgbEotfSingleChannel(electricalColor.r),
|
||||
srgbEotfSingleChannel(electricalColor.g),
|
||||
srgbEotfSingleChannel(electricalColor.b));
|
||||
}
|
||||
|
||||
// Applies the appropriate EOTF to convert nonlinear electrical values to linear
|
||||
// optical values. Input and output are both normalized to [0, 1].
|
||||
highp vec3 applyEotf(highp vec3 electricalColor) {
|
||||
return srgbEotf(electricalColor);
|
||||
}
|
||||
|
||||
// BT.2100 / BT.2020 HLG OETF for one channel.
|
||||
highp float hlgOetfSingleChannel(highp float linearChannel) {
|
||||
// Specification:
|
||||
// https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_HLG
|
||||
// Reference implementation:
|
||||
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/renderengine/gl/ProgramCache.cpp;l=529-543;drc=de09f10aa504fd8066370591a00c9ff1cafbb7fa
|
||||
const highp float a = 0.17883277;
|
||||
const highp float b = 0.28466892;
|
||||
const highp float c = 0.55991073;
|
||||
|
||||
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
|
||||
: a * log(12.0 * linearChannel - b) + c;
|
||||
}
|
||||
|
||||
// BT.2100 / BT.2020 HLG OETF.
|
||||
highp vec3 hlgOetf(highp vec3 linearColor) {
|
||||
return vec3(hlgOetfSingleChannel(linearColor.r),
|
||||
hlgOetfSingleChannel(linearColor.g),
|
||||
hlgOetfSingleChannel(linearColor.b));
|
||||
}
|
||||
|
||||
// BT.2100 / BT.2020, PQ / ST2084 OETF.
|
||||
highp vec3 pqOetf(highp vec3 linearColor) {
|
||||
// Specification:
|
||||
// https://registry.khronos.org/DataFormat/specs/1.3/dataformat.1.3.inline.html#TRANSFER_PQ
|
||||
// Reference implementation:
|
||||
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/renderengine/gl/ProgramCache.cpp;l=514-527;drc=de09f10aa504fd8066370591a00c9ff1cafbb7fa
|
||||
const highp float m1 = (2610.0 / 16384.0);
|
||||
const highp float m2 = (2523.0 / 4096.0) * 128.0;
|
||||
const highp float c1 = (3424.0 / 4096.0);
|
||||
const highp float c2 = (2413.0 / 4096.0) * 32.0;
|
||||
const highp float c3 = (2392.0 / 4096.0) * 32.0;
|
||||
|
||||
highp vec3 temp = pow(linearColor, vec3(m1));
|
||||
temp = (c1 + c2 * temp) / (1.0 + c3 * temp);
|
||||
return pow(temp, vec3(m2));
|
||||
}
|
||||
|
||||
// BT.709 gamma 2.2 OETF for one channel.
|
||||
float gamma22OetfSingleChannel(highp float linearChannel) {
|
||||
// Reference:
|
||||
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
|
||||
return pow(linearChannel, (1.0 / 2.2));
|
||||
}
|
||||
|
||||
// BT.709 gamma 2.2 OETF.
|
||||
vec3 gamma22Oetf(highp vec3 linearColor) {
|
||||
return vec3(gamma22OetfSingleChannel(linearColor.r),
|
||||
gamma22OetfSingleChannel(linearColor.g),
|
||||
gamma22OetfSingleChannel(linearColor.b));
|
||||
}
|
||||
|
||||
// Applies the appropriate OETF to convert linear optical signals to nonlinear
|
||||
// electrical signals. Input and output are both normalized to [0, 1].
|
||||
highp vec3 applyOetf(highp vec3 linearColor) {
|
||||
if (uOutputColorTransfer == COLOR_TRANSFER_ST2084) {
|
||||
return pqOetf(linearColor);
|
||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_HLG) {
|
||||
return hlgOetf(linearColor);
|
||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_GAMMA_2_2) {
|
||||
return gamma22Oetf(linearColor);
|
||||
} else if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR) {
|
||||
return linearColor;
|
||||
} else {
|
||||
// Output blue as an obviously visible error.
|
||||
return vec3(0.0, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
vec2 getVTexSamplingCoord() {
|
||||
// Whereas the Android system uses the top-left corner as (0,0) of the
|
||||
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
|
||||
// texture gets flipped. We flip the texture vertically to ensure the
|
||||
// orientation of the output is correct.
|
||||
return vec2(vTexSamplingCoord.x, 1.0 - vTexSamplingCoord.y);
|
||||
}
|
||||
|
||||
// Reference:
|
||||
// https://developer.android.com/reference/android/graphics/Gainmap#applying-a-gainmap-manually
|
||||
// Reference Implementation:
|
||||
// https://source.corp.google.com/h/googleplex-android/platform/superproject/main/+/main:frameworks/base/libs/hwui/effects/GainmapRenderer.cpp;l=97-147;drc=45fd4a5013383f37c8e8a354b1626a8e1aebe29a
|
||||
highp vec3 applyGainmapToBase(vec4 S) {
|
||||
vec4 G = texture(uGainmapTexSampler, getVTexSamplingCoord());
|
||||
float W = clamp((log(HDR_SDR_RATIO) - log(uDisplayRatioSdr)) /
|
||||
(log(uDisplayRatioHdr) - log(uDisplayRatioSdr)),
|
||||
0.0, 1.0);
|
||||
vec3 H;
|
||||
if (uGainmapIsAlpha == 1) {
|
||||
G = vec4(G.a, G.a, G.a, 1.0);
|
||||
}
|
||||
if (uSingleChannel == 1) {
|
||||
mediump float L;
|
||||
if (uNoGamma == 1) {
|
||||
L = mix(uLogRatioMin.r, uLogRatioMax.r, G.r);
|
||||
} else {
|
||||
L = mix(uLogRatioMin.r, uLogRatioMax.r, pow(G.r, uGainmapGamma.r));
|
||||
}
|
||||
H = (S.rgb + uEpsilonSdr.rgb) * exp(L * W) - uEpsilonHdr.rgb;
|
||||
} else {
|
||||
mediump vec3 L;
|
||||
if (uNoGamma == 1) {
|
||||
L = mix(uLogRatioMin.rgb, uLogRatioMax.rgb, G.rgb);
|
||||
} else {
|
||||
L = mix(uLogRatioMin.rgb, uLogRatioMax.rgb,
|
||||
pow(G.rgb, uGainmapGamma.rgb));
|
||||
}
|
||||
H = (S.rgb + uEpsilonSdr.rgb) * exp(L * W) - uEpsilonHdr.rgb;
|
||||
}
|
||||
return H;
|
||||
}
|
||||
|
||||
highp vec3 bt709ToBt2020(vec3 bt709Color) {
|
||||
return XYZ_TO_RGB_BT2020 * RGB_BT709_TO_XYZ * bt709Color;
|
||||
}
|
||||
|
||||
void main() {
|
||||
vec4 baseElectricalColor = texture(uTexSampler, getVTexSamplingCoord());
|
||||
float alpha = baseElectricalColor.a;
|
||||
vec4 baseOpticalColor = vec4(applyEotf(baseElectricalColor.xyz), alpha);
|
||||
vec3 opticalBt709Color = applyGainmapToBase(baseOpticalColor);
|
||||
vec3 opticalBt2020Color = bt709ToBt2020(opticalBt709Color);
|
||||
vec4 transformedColors = uRgbMatrix * vec4(opticalBt2020Color, alpha);
|
||||
outColor = vec4(applyOetf(transformedColors.rgb), alpha);
|
||||
}
|
@ -21,7 +21,6 @@ import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.FrameInfo;
|
||||
import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
@ -45,12 +44,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
|
||||
private final GlObjectsProvider glObjectsProvider;
|
||||
|
||||
private @MonotonicNonNull GlShaderProgram shaderProgram;
|
||||
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
|
||||
private @MonotonicNonNull GainmapShaderProgram gainmapShaderProgram;
|
||||
private @MonotonicNonNull GlTextureInfo currentSdrGlTextureInfo;
|
||||
private int downstreamShaderProgramCapacity;
|
||||
private boolean currentInputStreamEnded;
|
||||
private boolean isNextFrameInTexture;
|
||||
private boolean useHdr;
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
@ -67,10 +65,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
pendingBitmaps = new LinkedBlockingQueue<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>{@link GlShaderProgram} must be a {@link GainmapShaderProgram}.
|
||||
*/
|
||||
@Override
|
||||
public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) {
|
||||
checkState(samplingGlShaderProgram instanceof GainmapShaderProgram);
|
||||
downstreamShaderProgramCapacity = 0;
|
||||
this.shaderProgram = samplingGlShaderProgram;
|
||||
this.gainmapShaderProgram = (GainmapShaderProgram) samplingGlShaderProgram;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -92,11 +96,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
|
||||
this.useHdr = ColorInfo.isTransferHdr(inputFrameInfo.colorInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPendingFrameCount() {
|
||||
// Always treat all queued bitmaps as immediately processed.
|
||||
@ -108,7 +107,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
if (pendingBitmaps.isEmpty()) {
|
||||
checkNotNull(shaderProgram).signalEndOfCurrentInputStream();
|
||||
checkNotNull(gainmapShaderProgram).signalEndOfCurrentInputStream();
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
|
||||
} else {
|
||||
@ -121,8 +120,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
public void release() {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
if (currentGlTextureInfo != null) {
|
||||
currentGlTextureInfo.release();
|
||||
if (currentSdrGlTextureInfo != null) {
|
||||
currentSdrGlTextureInfo.release();
|
||||
}
|
||||
pendingBitmaps.clear();
|
||||
});
|
||||
@ -153,9 +152,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
|
||||
downstreamShaderProgramCapacity--;
|
||||
checkNotNull(shaderProgram)
|
||||
checkNotNull(gainmapShaderProgram)
|
||||
.queueInputFrame(
|
||||
glObjectsProvider, checkNotNull(currentGlTextureInfo), currentPresentationTimeUs);
|
||||
glObjectsProvider, checkNotNull(currentSdrGlTextureInfo), currentPresentationTimeUs);
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP,
|
||||
currentPresentationTimeUs,
|
||||
@ -169,7 +168,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
finishedBitmapInfo.bitmap.recycle();
|
||||
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
|
||||
// Only signal end of stream after all pending bitmaps are processed.
|
||||
checkNotNull(shaderProgram).signalEndOfCurrentInputStream();
|
||||
checkNotNull(gainmapShaderProgram).signalEndOfCurrentInputStream();
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
|
||||
currentInputStreamEnded = false;
|
||||
@ -201,28 +200,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
throws VideoFrameProcessingException {
|
||||
int currentTexId;
|
||||
try {
|
||||
if (currentGlTextureInfo != null) {
|
||||
currentGlTextureInfo.release();
|
||||
if (currentSdrGlTextureInfo != null) {
|
||||
currentSdrGlTextureInfo.release();
|
||||
}
|
||||
currentTexId = GlUtil.createTexture(bitmap);
|
||||
if (useHdr && Util.SDK_INT >= 34 && bitmap.hasGainmap()) {
|
||||
currentGlTextureInfo =
|
||||
new GlTextureInfo(
|
||||
currentTexId,
|
||||
/* fboId= */ C.INDEX_UNSET,
|
||||
/* rboId= */ C.INDEX_UNSET,
|
||||
frameInfo.width,
|
||||
frameInfo.height,
|
||||
checkNotNull(bitmap.getGainmap()),
|
||||
GlUtil.createTexture(bitmap.getGainmap().getGainmapContents()));
|
||||
} else {
|
||||
currentGlTextureInfo =
|
||||
currentSdrGlTextureInfo =
|
||||
new GlTextureInfo(
|
||||
currentTexId,
|
||||
/* fboId= */ C.INDEX_UNSET,
|
||||
/* rboId= */ C.INDEX_UNSET,
|
||||
frameInfo.width,
|
||||
frameInfo.height);
|
||||
if (Util.SDK_INT >= 34 && bitmap.hasGainmap()) {
|
||||
checkNotNull(gainmapShaderProgram).setGainmap(checkNotNull(bitmap.getGainmap()));
|
||||
}
|
||||
} catch (GlUtil.GlException e) {
|
||||
throw VideoFrameProcessingException.from(e);
|
||||
|
@ -20,8 +20,11 @@ import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Gainmap;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import androidx.annotation.RequiresApi;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.Format;
|
||||
@ -29,12 +32,16 @@ import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor.InputType;
|
||||
import androidx.media3.common.util.GlProgram;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.GlUtil.GlException;
|
||||
import androidx.media3.common.util.Size;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
/**
|
||||
* Applies a sequence of {@link MatrixTransformation MatrixTransformations} in the vertex shader and
|
||||
@ -55,7 +62,7 @@ import java.util.List;
|
||||
@UnstableApi
|
||||
@SuppressWarnings("FunctionalInterfaceClash") // b/228192298
|
||||
/* package */ final class DefaultShaderProgram extends BaseGlShaderProgram
|
||||
implements ExternalShaderProgram {
|
||||
implements ExternalShaderProgram, GainmapShaderProgram {
|
||||
|
||||
private static final String VERTEX_SHADER_TRANSFORMATION_PATH =
|
||||
"shaders/vertex_shader_transformation_es2.glsl";
|
||||
@ -73,6 +80,8 @@ import java.util.List;
|
||||
"shaders/fragment_shader_transformation_sdr_external_es2.glsl";
|
||||
private static final String FRAGMENT_SHADER_TRANSFORMATION_HDR_INTERNAL_ES3_PATH =
|
||||
"shaders/fragment_shader_transformation_hdr_internal_es3.glsl";
|
||||
private static final String FRAGMENT_SHADER_TRANSFORMATION_ULTRA_HDR_ES3_PATH =
|
||||
"shaders/fragment_shader_transformation_ultra_hdr_es3.glsl";
|
||||
private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_INTERNAL_PATH =
|
||||
"shaders/fragment_shader_transformation_sdr_internal_es2.glsl";
|
||||
private static final ImmutableList<float[]> NDC_SQUARE =
|
||||
@ -99,6 +108,8 @@ import java.util.List;
|
||||
private static final int GL_FALSE = 0;
|
||||
private static final int GL_TRUE = 1;
|
||||
|
||||
private final GlProgram glProgram;
|
||||
|
||||
/** The {@link MatrixTransformation MatrixTransformations} to apply. */
|
||||
private final ImmutableList<GlMatrixTransformation> matrixTransformations;
|
||||
|
||||
@ -142,7 +153,9 @@ import java.util.List;
|
||||
*/
|
||||
private ImmutableList<float[]> visiblePolygon;
|
||||
|
||||
private final GlProgram glProgram;
|
||||
private @MonotonicNonNull Gainmap lastGainmap;
|
||||
private int lastGainmapGenerationId;
|
||||
private int gainmapTexId;
|
||||
private @C.ColorTransfer int outputColorTransfer;
|
||||
|
||||
/**
|
||||
@ -209,16 +222,27 @@ import java.util.List;
|
||||
checkState(
|
||||
inputColorInfo.colorTransfer != C.COLOR_TRANSFER_SRGB || inputType == INPUT_TYPE_BITMAP);
|
||||
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
|
||||
boolean isUsingUltraHdr =
|
||||
inputType == INPUT_TYPE_BITMAP && outputColorInfo.colorSpace == C.COLOR_SPACE_BT2020;
|
||||
String vertexShaderFilePath =
|
||||
isInputTransferHdr
|
||||
isInputTransferHdr || isUsingUltraHdr
|
||||
? VERTEX_SHADER_TRANSFORMATION_ES3_PATH
|
||||
: VERTEX_SHADER_TRANSFORMATION_PATH;
|
||||
String fragmentShaderFilePath =
|
||||
isInputTransferHdr
|
||||
isUsingUltraHdr
|
||||
? FRAGMENT_SHADER_TRANSFORMATION_ULTRA_HDR_ES3_PATH
|
||||
: isInputTransferHdr
|
||||
? FRAGMENT_SHADER_TRANSFORMATION_HDR_INTERNAL_ES3_PATH
|
||||
: FRAGMENT_SHADER_TRANSFORMATION_SDR_INTERNAL_PATH;
|
||||
GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
|
||||
if (!isUsingUltraHdr) {
|
||||
glProgram.setIntUniform("uInputColorTransfer", inputColorInfo.colorTransfer);
|
||||
}
|
||||
if (isInputTransferHdr) {
|
||||
glProgram.setIntUniform(
|
||||
"uApplyHdrToSdrToneMapping",
|
||||
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020 ? GL_TRUE : GL_FALSE);
|
||||
}
|
||||
return createWithSampler(glProgram, inputColorInfo, outputColorInfo, enableColorTransfers);
|
||||
}
|
||||
|
||||
@ -272,6 +296,9 @@ import java.util.List;
|
||||
? BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX
|
||||
: BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX);
|
||||
glProgram.setIntUniform("uInputColorTransfer", inputColorInfo.colorTransfer);
|
||||
glProgram.setIntUniform(
|
||||
"uApplyHdrToSdrToneMapping",
|
||||
outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020 ? GL_TRUE : GL_FALSE);
|
||||
}
|
||||
|
||||
return createWithSampler(glProgram, inputColorInfo, outputColorInfo, enableColorTransfers);
|
||||
@ -343,14 +370,15 @@ import java.util.List;
|
||||
ColorInfo outputColorInfo,
|
||||
boolean enableColorTransfers) {
|
||||
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
|
||||
boolean isExpandingColorGamut =
|
||||
(inputColorInfo.colorSpace == C.COLOR_SPACE_BT709
|
||||
|| inputColorInfo.colorSpace == C.COLOR_SPACE_BT601)
|
||||
&& outputColorInfo.colorSpace == C.COLOR_SPACE_BT2020;
|
||||
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
|
||||
if (isInputTransferHdr) {
|
||||
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
|
||||
checkArgument(enableColorTransfers);
|
||||
// TODO(b/239735341): Add a setBooleanUniform method to GlProgram.
|
||||
glProgram.setIntUniform(
|
||||
"uApplyHdrToSdrToneMapping",
|
||||
/* value= */ (outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020) ? GL_TRUE : GL_FALSE);
|
||||
checkArgument(outputColorTransfer != Format.NO_VALUE);
|
||||
if (outputColorTransfer == C.COLOR_TRANSFER_SDR) {
|
||||
// When tone-mapping from HDR to SDR, COLOR_TRANSFER_SDR is interpreted as
|
||||
@ -358,6 +386,9 @@ import java.util.List;
|
||||
outputColorTransfer = C.COLOR_TRANSFER_GAMMA_2_2;
|
||||
}
|
||||
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
||||
} else if (isExpandingColorGamut) {
|
||||
checkArgument(enableColorTransfers);
|
||||
glProgram.setIntUniform("uOutputColorTransfer", outputColorTransfer);
|
||||
} else {
|
||||
glProgram.setIntUniform("uEnableColorTransfer", enableColorTransfers ? GL_TRUE : GL_FALSE);
|
||||
checkArgument(
|
||||
@ -372,7 +403,7 @@ import java.util.List;
|
||||
/* matrixTransformations= */ ImmutableList.of(),
|
||||
/* rgbMatrices= */ ImmutableList.of(),
|
||||
outputColorInfo.colorTransfer,
|
||||
isInputTransferHdr);
|
||||
/* useHdr= */ isInputTransferHdr || isExpandingColorGamut);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -406,6 +437,8 @@ import java.util.List;
|
||||
compositeRgbMatrixArray = GlUtil.create4x4IdentityMatrix();
|
||||
tempResultMatrix = new float[16];
|
||||
visiblePolygon = NDC_SQUARE;
|
||||
gainmapTexId = C.INDEX_UNSET;
|
||||
lastGainmapGenerationId = C.INDEX_UNSET;
|
||||
}
|
||||
|
||||
private static GlProgram createGlProgram(
|
||||
@ -444,6 +477,7 @@ import java.util.List;
|
||||
|
||||
try {
|
||||
glProgram.use();
|
||||
setGainmapSamplerAndUniforms();
|
||||
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
|
||||
glProgram.setFloatsUniform("uTransformationMatrix", compositeTransformationMatrixArray);
|
||||
glProgram.setFloatsUniform("uRgbMatrix", compositeRgbMatrixArray);
|
||||
@ -470,6 +504,31 @@ import java.util.List;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the {@link Gainmap} applied to the input frame to create a HDR output frame.
|
||||
*
|
||||
* <p>The gainmap is ignored if {@code useHdr} is {@code false}.
|
||||
*/
|
||||
@Override
|
||||
@RequiresApi(34) // getGainmapContents() added in API level 34.
|
||||
public void setGainmap(Gainmap gainmap) throws GlException {
|
||||
if (!useHdr) {
|
||||
return;
|
||||
}
|
||||
int gainmapGenerationId = gainmap.getGainmapContents().getGenerationId();
|
||||
if (Objects.equals(this.lastGainmap, gainmap)
|
||||
&& gainmapGenerationId == this.lastGainmapGenerationId) {
|
||||
return;
|
||||
}
|
||||
this.lastGainmap = gainmap;
|
||||
this.lastGainmapGenerationId = gainmapGenerationId;
|
||||
if (gainmapTexId == C.INDEX_UNSET) {
|
||||
gainmapTexId = GlUtil.createTexture(gainmap.getGainmapContents());
|
||||
} else {
|
||||
GlUtil.setTexture(gainmapTexId, gainmap.getGainmapContents());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the output {@link C.ColorTransfer}.
|
||||
*
|
||||
@ -592,4 +651,38 @@ import java.util.List;
|
||||
}
|
||||
return matrixChanged;
|
||||
}
|
||||
|
||||
private void setGainmapSamplerAndUniforms() throws GlUtil.GlException {
|
||||
if (lastGainmap == null) {
|
||||
return;
|
||||
}
|
||||
if (Util.SDK_INT < 34) {
|
||||
throw new IllegalStateException("Gainmaps not supported under API 34.");
|
||||
}
|
||||
glProgram.setSamplerTexIdUniform("uGainmapTexSampler", gainmapTexId, /* texUnitIndex= */ 1);
|
||||
|
||||
boolean gainmapIsAlpha = lastGainmap.getGainmapContents().getConfig() == Bitmap.Config.ALPHA_8;
|
||||
float[] gainmapGamma = lastGainmap.getGamma();
|
||||
boolean noGamma = gainmapGamma[0] == 1f && gainmapGamma[1] == 1f && gainmapGamma[2] == 1f;
|
||||
boolean singleChannel =
|
||||
areAllChannelsEqual(gainmapGamma)
|
||||
&& areAllChannelsEqual(lastGainmap.getRatioMax())
|
||||
&& areAllChannelsEqual(lastGainmap.getRatioMin());
|
||||
|
||||
glProgram.setIntUniform("uGainmapIsAlpha", gainmapIsAlpha ? GL_TRUE : GL_FALSE);
|
||||
glProgram.setIntUniform("uNoGamma", noGamma ? GL_TRUE : GL_FALSE);
|
||||
glProgram.setIntUniform("uSingleChannel", singleChannel ? GL_TRUE : GL_FALSE);
|
||||
glProgram.setFloatsUniform("uLogRatioMin", lastGainmap.getRatioMin());
|
||||
glProgram.setFloatsUniform("uLogRatioMax", lastGainmap.getRatioMax());
|
||||
glProgram.setFloatsUniform("uEpsilonSdr", lastGainmap.getEpsilonSdr());
|
||||
glProgram.setFloatsUniform("uEpsilonHdr", lastGainmap.getEpsilonHdr());
|
||||
glProgram.setFloatsUniform("uGainmapGamma", gainmapGamma);
|
||||
glProgram.setFloatUniform("uDisplayRatioHdr", lastGainmap.getDisplayRatioForFullHdr());
|
||||
glProgram.setFloatUniform("uDisplayRatioSdr", lastGainmap.getMinDisplayRatioForHdrTransition());
|
||||
GlUtil.checkGlError();
|
||||
}
|
||||
|
||||
private static boolean areAllChannelsEqual(float[] channels) {
|
||||
return channels[0] == channels[1] && channels[1] == channels[2];
|
||||
}
|
||||
}
|
||||
|
@ -426,6 +426,13 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
if (!inputStreamRegisteredCondition.isOpen()) {
|
||||
return false;
|
||||
}
|
||||
if (ColorInfo.isTransferHdr(outputColorInfo)) {
|
||||
checkArgument(
|
||||
Util.SDK_INT >= 34 && inputBitmap.hasGainmap(),
|
||||
"VideoFrameProcessor configured for HDR output, but either received SDR input, or is on"
|
||||
+ " an API level that doesn't support gainmaps. SDR to HDR tonemapping is not"
|
||||
+ " supported.");
|
||||
}
|
||||
FrameInfo frameInfo = checkNotNull(this.nextInputFrameInfo);
|
||||
inputSwitcher
|
||||
.activeTextureManager()
|
||||
@ -906,14 +913,27 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
checkArgument(outputColorInfo.colorTransfer != C.COLOR_TRANSFER_LINEAR);
|
||||
|
||||
if (ColorInfo.isTransferHdr(inputColorInfo) != ColorInfo.isTransferHdr(outputColorInfo)) {
|
||||
// OpenGL tone mapping is only implemented for BT2020 to BT709 and HDR to SDR.
|
||||
checkArgument(inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020);
|
||||
checkArgument(outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020);
|
||||
checkArgument(ColorInfo.isTransferHdr(inputColorInfo));
|
||||
checkArgument(
|
||||
outputColorInfo.colorTransfer == C.COLOR_TRANSFER_GAMMA_2_2
|
||||
isSupportedToneMapping(inputColorInfo, outputColorInfo)
|
||||
|| isUltraHdr(inputColorInfo, outputColorInfo));
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isSupportedToneMapping(
|
||||
ColorInfo inputColorInfo, ColorInfo outputColorInfo) {
|
||||
// OpenGL tone mapping is only implemented for BT2020 to BT709 and HDR to SDR.
|
||||
return inputColorInfo.colorSpace == C.COLOR_SPACE_BT2020
|
||||
&& outputColorInfo.colorSpace != C.COLOR_SPACE_BT2020
|
||||
&& ColorInfo.isTransferHdr(inputColorInfo)
|
||||
&& (outputColorInfo.colorTransfer == C.COLOR_TRANSFER_GAMMA_2_2
|
||||
|| outputColorInfo.colorTransfer == C.COLOR_TRANSFER_SDR);
|
||||
}
|
||||
|
||||
private static boolean isUltraHdr(ColorInfo inputColorInfo, ColorInfo outputColorInfo) {
|
||||
// UltraHDR is is only implemented from SRGB_BT709_FULL to BT2020 HDR.
|
||||
return inputColorInfo.equals(ColorInfo.SRGB_BT709_FULL)
|
||||
&& outputColorInfo.colorSpace == C.COLOR_SPACE_BT2020
|
||||
&& ColorInfo.isTransferHdr(outputColorInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2022 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import android.graphics.Gainmap;
|
||||
import androidx.media3.common.util.GlUtil.GlException;
|
||||
|
||||
/** Interface for a {@link GlShaderProgram} that samples from a gainmap. */
|
||||
/* package */ interface GainmapShaderProgram extends GlShaderProgram {
|
||||
|
||||
/**
|
||||
* Sets the {@link Gainmap} that is applied to the output frame.
|
||||
*
|
||||
* @param gainmap The {@link Gainmap}.
|
||||
*/
|
||||
void setGainmap(Gainmap gainmap) throws GlException;
|
||||
}
|
@ -101,12 +101,6 @@ import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
context, inputColorInfo, outputColorInfo, enableColorTransfers);
|
||||
break;
|
||||
case INPUT_TYPE_BITMAP:
|
||||
// HDR bitmap input is not supported.
|
||||
checkState(!ColorInfo.isTransferHdr(inputColorInfo));
|
||||
samplingShaderProgram =
|
||||
DefaultShaderProgram.createWithInternalSampler(
|
||||
context, inputColorInfo, outputColorInfo, enableColorTransfers, inputType);
|
||||
break;
|
||||
case INPUT_TYPE_TEXTURE_ID:
|
||||
samplingShaderProgram =
|
||||
DefaultShaderProgram.createWithInternalSampler(
|
||||
|
BIN
libraries/test_data/src/test/assets/media/jpeg/ultraHDR.jpg
Normal file
BIN
libraries/test_data/src/test/assets/media/jpeg/ultraHDR.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.1 MiB |
Binary file not shown.
After Width: | Height: | Size: 22 MiB |
Binary file not shown.
After Width: | Height: | Size: 19 MiB |
@ -26,12 +26,14 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECO
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.EncoderUtil.getSupportedEncodersForHdrEditing;
|
||||
import static androidx.media3.transformer.mh.UnoptimizedGlEffect.NO_OP_EFFECT;
|
||||
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.Effect;
|
||||
import androidx.media3.common.Format;
|
||||
@ -39,6 +41,7 @@ import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.effect.BitmapOverlay;
|
||||
import androidx.media3.effect.DefaultGlObjectsProvider;
|
||||
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||
@ -48,12 +51,14 @@ import androidx.media3.test.utils.BitmapPixelTestUtil;
|
||||
import androidx.media3.test.utils.TextureBitmapReader;
|
||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||
import androidx.media3.transformer.AndroidTestUtil;
|
||||
import androidx.media3.transformer.EncoderUtil;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.json.JSONException;
|
||||
import org.junit.After;
|
||||
import org.junit.AssumptionViolatedException;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@ -75,11 +80,16 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
private static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
|
||||
private static final String OVERLAY_PNG_ASSET_PATH = "media/png/media3test.png";
|
||||
private static final String ULTRA_HDR_ASSET_PATH = "media/jpeg/ultraHDR.jpg";
|
||||
|
||||
private static final String ORIGINAL_HLG10_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/original_hlg10.png";
|
||||
private static final String ORIGINAL_HDR10_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/original_hdr10.png";
|
||||
private static final String ULTRA_HDR_TO_HLG_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/hdr-goldens/ultrahdr_to_hlg.png";
|
||||
private static final String ULTRA_HDR_TO_PQ_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/hdr-goldens/ultrahdr_to_pq.png";
|
||||
|
||||
/** Input SDR video of which we only use the first frame. */
|
||||
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
|
||||
@ -117,7 +127,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
/* outputFormat= */ null)) {
|
||||
return;
|
||||
}
|
||||
videoFrameProcessorTestRunner = getSurfaceInputFrameProcessorTestRunnerBuilder(testId).build();
|
||||
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
|
||||
@ -172,7 +182,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
|
||||
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
|
||||
videoFrameProcessorTestRunner =
|
||||
getSurfaceInputFrameProcessorTestRunnerBuilder(testId)
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
|
||||
.build();
|
||||
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
|
||||
@ -220,17 +230,14 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No HLG editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
}
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
videoFrameProcessorTestRunner =
|
||||
getSurfaceInputFrameProcessorTestRunnerBuilder(testId)
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(colorInfo)
|
||||
.setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING)
|
||||
.build();
|
||||
@ -251,10 +258,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
public void noEffects_hlg10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No HLG editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
@ -282,21 +286,51 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noEffects_hlg10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
.setColorSpace(C.COLOR_SPACE_BT2020)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_HLG)
|
||||
.setColorRange(C.COLOR_RANGE_FULL)
|
||||
.build();
|
||||
videoFrameProcessorTestRunner =
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(outputColorInfo)
|
||||
.build();
|
||||
Bitmap originalBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_TO_HLG_PNG_ASSET_PATH);
|
||||
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
originalBitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ 0L,
|
||||
/* frameRate= */ 1);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||
expectedBitmap, actualBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference)
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noEffects_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No PQ editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
}
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
videoFrameProcessorTestRunner =
|
||||
getSurfaceInputFrameProcessorTestRunnerBuilder(testId)
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(colorInfo)
|
||||
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
|
||||
.build();
|
||||
@ -317,10 +351,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
public void noEffects_hdr10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No PQ editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
@ -348,21 +379,51 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noEffects_hdr10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
.setColorSpace(C.COLOR_SPACE_BT2020)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_ST2084)
|
||||
.setColorRange(C.COLOR_RANGE_FULL)
|
||||
.build();
|
||||
videoFrameProcessorTestRunner =
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(outputColorInfo)
|
||||
.build();
|
||||
Bitmap originalBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_TO_PQ_PNG_ASSET_PATH);
|
||||
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
originalBitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ 0L,
|
||||
/* frameRate= */ 1);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||
expectedBitmap, actualBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference)
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hlg10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No HLG editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
}
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
videoFrameProcessorTestRunner =
|
||||
getSurfaceInputFrameProcessorTestRunnerBuilder(testId)
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(colorInfo)
|
||||
.setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING)
|
||||
.setEffects(NO_OP_EFFECT)
|
||||
@ -384,10 +445,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
public void noOpEffect_hlg10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No HLG editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
@ -415,21 +473,52 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hlg10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
.setColorSpace(C.COLOR_SPACE_BT2020)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_HLG)
|
||||
.setColorRange(C.COLOR_RANGE_FULL)
|
||||
.build();
|
||||
videoFrameProcessorTestRunner =
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(outputColorInfo)
|
||||
.setEffects(NO_OP_EFFECT)
|
||||
.build();
|
||||
Bitmap originalBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_TO_HLG_PNG_ASSET_PATH);
|
||||
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
originalBitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ 0L,
|
||||
/* frameRate= */ 1);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||
expectedBitmap, actualBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference)
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hdr10Input_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No PQ editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
}
|
||||
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||
videoFrameProcessorTestRunner =
|
||||
getSurfaceInputFrameProcessorTestRunnerBuilder(testId)
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(colorInfo)
|
||||
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
|
||||
.setEffects(NO_OP_EFFECT)
|
||||
@ -451,10 +540,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
public void noOpEffect_hdr10TextureInput_matchesGoldenFile() throws Exception {
|
||||
Context context = getApplicationContext();
|
||||
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||
if (!deviceSupportsHdrEditing(format)) {
|
||||
recordTestSkipped(context, testId, "No PQ editing support");
|
||||
return;
|
||||
}
|
||||
assumeDeviceSupportsHdrEditing(format);
|
||||
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
|
||||
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
|
||||
return;
|
||||
@ -482,6 +568,40 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noOpEffect_hdr10UltraHDRImageInput_matchesGoldenFile() throws Exception {
|
||||
assumeDeviceSupportsHdrEditing(MP4_ASSET_720P_4_SECOND_HDR10_FORMAT);
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
ColorInfo outputColorInfo =
|
||||
new ColorInfo.Builder()
|
||||
.setColorSpace(C.COLOR_SPACE_BT2020)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_ST2084)
|
||||
.setColorRange(C.COLOR_RANGE_FULL)
|
||||
.build();
|
||||
videoFrameProcessorTestRunner =
|
||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||
.setOutputColorInfo(outputColorInfo)
|
||||
.setEffects(NO_OP_EFFECT)
|
||||
.build();
|
||||
Bitmap originalBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_TO_PQ_PNG_ASSET_PATH);
|
||||
|
||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||
originalBitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* offsetToAddUs= */ 0L,
|
||||
/* frameRate= */ 1);
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||
expectedBitmap, actualBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference)
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||
}
|
||||
|
||||
private VideoFrameProcessorTestRunner getTexIdProducingFrameProcessorTestRunner(
|
||||
String testId,
|
||||
TextureBitmapReader consumersBitmapReader,
|
||||
@ -555,7 +675,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
textureProducer.releaseOutputTexture(presentationTimeUs);
|
||||
}
|
||||
|
||||
private VideoFrameProcessorTestRunner.Builder getSurfaceInputFrameProcessorTestRunnerBuilder(
|
||||
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
|
||||
String testId) {
|
||||
TextureBitmapReader textureBitmapReader = new TextureBitmapReader();
|
||||
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||
@ -574,9 +694,22 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
||||
.setBitmapReader(textureBitmapReader);
|
||||
}
|
||||
|
||||
private static boolean deviceSupportsHdrEditing(Format format) {
|
||||
return !EncoderUtil.getSupportedEncodersForHdrEditing(
|
||||
checkNotNull(checkNotNull(format).sampleMimeType), format.colorInfo)
|
||||
.isEmpty();
|
||||
private void assumeDeviceSupportsHdrEditing(Format format) throws JSONException, IOException {
|
||||
if (getSupportedEncodersForHdrEditing(format.sampleMimeType, format.colorInfo).isEmpty()) {
|
||||
String skipReason =
|
||||
format.colorInfo.colorTransfer == C.COLOR_TRANSFER_HLG
|
||||
? "No HLG editing support."
|
||||
: "No PQ editing support.";
|
||||
recordTestSkipped(getApplicationContext(), testId, skipReason);
|
||||
throw new AssumptionViolatedException(skipReason);
|
||||
}
|
||||
}
|
||||
|
||||
private void assumeDeviceSupportsUltraHdrEditing() throws JSONException, IOException {
|
||||
if (Util.SDK_INT < 34) {
|
||||
recordTestSkipped(
|
||||
getApplicationContext(), testId, "Ultra HDR is not supported on this API level.");
|
||||
throw new AssumptionViolatedException("Ultra HDR is not supported on this API level.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user