Add support for ultra HDR overlays
PiperOrigin-RevId: 637863706
This commit is contained in:
parent
3bb6cf2129
commit
9622411b50
@ -139,6 +139,7 @@
|
|||||||
([#1055](https://github.com/androidx/media/pull/1055)).
|
([#1055](https://github.com/androidx/media/pull/1055)).
|
||||||
* Maintain a consistent luminance range across different HDR content (uses
|
* Maintain a consistent luminance range across different HDR content (uses
|
||||||
the HLG range).
|
the HLG range).
|
||||||
|
* Add support for Ultra HDR (bitmap) overlays on HDR content.
|
||||||
* Muxers:
|
* Muxers:
|
||||||
* IMA extension:
|
* IMA extension:
|
||||||
* Promote API that is required for apps to play
|
* Promote API that is required for apps to play
|
||||||
|
@ -0,0 +1,72 @@
|
|||||||
|
// The value is calculated as targetHdrPeakBrightnessInNits /
|
||||||
|
// targetSdrWhitePointInNits. In other effect HDR processing and some parts of
|
||||||
|
// the wider android ecosystem the assumption is
|
||||||
|
// targetHdrPeakBrightnessInNits=1000 and targetSdrWhitePointInNits=500
|
||||||
|
const float HDR_SDR_RATIO = 2.0;
|
||||||
|
|
||||||
|
// Matrix values are calculated as inverse of RGB_BT2020_TO_XYZ.
|
||||||
|
const mat3 XYZ_TO_RGB_BT2020 =
|
||||||
|
mat3(1.71665, -0.666684, 0.0176399, -0.355671, 1.61648, -0.0427706,
|
||||||
|
-0.253366, 0.0157685, 0.942103);
|
||||||
|
// Matrix values are calculated as inverse of XYZ_TO_RGB_BT709.
|
||||||
|
const mat3 RGB_BT709_TO_XYZ =
|
||||||
|
mat3(0.412391, 0.212639, 0.0193308, 0.357584, 0.715169, 0.119195, 0.180481,
|
||||||
|
0.0721923, 0.950532);
|
||||||
|
|
||||||
|
// Reference:
|
||||||
|
// https://developer.android.com/reference/android/graphics/Gainmap#applying-a-gainmap-manually
|
||||||
|
// Reference Implementation:
|
||||||
|
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/base/libs/hwui/effects/GainmapRenderer.cpp;l=117-146;drc=fadc20184ccb27fe15bb862e6e03fa6d05d41eac
|
||||||
|
highp vec3 applyGainmap(vec4 S, vec4 G, int uGainmapIsAlpha, int uNoGamma,
|
||||||
|
int uSingleChannel, vec4 uLogRatioMin,
|
||||||
|
vec4 uLogRatioMax, vec4 uEpsilonSdr, vec4 uEpsilonHdr,
|
||||||
|
vec4 uGainmapGamma, float uDisplayRatioHdr,
|
||||||
|
float uDisplayRatioSdr) {
|
||||||
|
float W = clamp((log(HDR_SDR_RATIO) - log(uDisplayRatioSdr)) /
|
||||||
|
(log(uDisplayRatioHdr) - log(uDisplayRatioSdr)),
|
||||||
|
0.0, 1.0);
|
||||||
|
vec3 H;
|
||||||
|
if (uGainmapIsAlpha == 1) {
|
||||||
|
G = vec4(G.a, G.a, G.a, 1.0);
|
||||||
|
}
|
||||||
|
if (uSingleChannel == 1) {
|
||||||
|
mediump float L;
|
||||||
|
if (uNoGamma == 1) {
|
||||||
|
L = mix(uLogRatioMin.r, uLogRatioMax.r, G.r);
|
||||||
|
} else {
|
||||||
|
L = mix(uLogRatioMin.r, uLogRatioMax.r, pow(G.r, uGainmapGamma.r));
|
||||||
|
}
|
||||||
|
H = (S.rgb + uEpsilonSdr.rgb) * exp(L * W) - uEpsilonHdr.rgb;
|
||||||
|
} else {
|
||||||
|
mediump vec3 L;
|
||||||
|
if (uNoGamma == 1) {
|
||||||
|
L = mix(uLogRatioMin.rgb, uLogRatioMax.rgb, G.rgb);
|
||||||
|
} else {
|
||||||
|
L = mix(uLogRatioMin.rgb, uLogRatioMax.rgb,
|
||||||
|
pow(G.rgb, uGainmapGamma.rgb));
|
||||||
|
}
|
||||||
|
H = (S.rgb + uEpsilonSdr.rgb) * exp(L * W) - uEpsilonHdr.rgb;
|
||||||
|
}
|
||||||
|
return H;
|
||||||
|
}
|
||||||
|
|
||||||
|
highp vec3 bt709ToBt2020(vec3 bt709Color) {
|
||||||
|
return XYZ_TO_RGB_BT2020 * RGB_BT709_TO_XYZ * bt709Color;
|
||||||
|
}
|
||||||
|
|
||||||
|
vec3 scaleHdrLuminance(vec3 linearColor) {
|
||||||
|
const float SDR_MAX_LUMINANCE = 500.0;
|
||||||
|
const float HDR_MAX_LUMINANCE = 1000.0;
|
||||||
|
return linearColor * SDR_MAX_LUMINANCE / HDR_MAX_LUMINANCE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// sRGB EOTF for one channel.
|
||||||
|
float srgbEotfSingleChannel(float srgb) {
|
||||||
|
return srgb <= 0.04045 ? srgb / 12.92 : pow((srgb + 0.055) / 1.055, 2.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
// sRGB EOTF.
|
||||||
|
vec4 srgbEotf(vec4 srgb) {
|
||||||
|
return vec4(srgbEotfSingleChannel(srgb.r), srgbEotfSingleChannel(srgb.g),
|
||||||
|
srgbEotfSingleChannel(srgb.b), srgb.a);
|
||||||
|
}
|
@ -21,10 +21,8 @@ import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
|
|||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.effect.DefaultVideoFrameProcessor.WORKING_COLOR_SPACE_LINEAR;
|
import static androidx.media3.effect.DefaultVideoFrameProcessor.WORKING_COLOR_SPACE_LINEAR;
|
||||||
import static java.lang.Math.log;
|
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.graphics.Bitmap;
|
|
||||||
import android.graphics.Gainmap;
|
import android.graphics.Gainmap;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
import android.opengl.Matrix;
|
import android.opengl.Matrix;
|
||||||
@ -694,33 +692,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
throw new IllegalStateException("Gainmaps not supported under API 34.");
|
throw new IllegalStateException("Gainmaps not supported under API 34.");
|
||||||
}
|
}
|
||||||
glProgram.setSamplerTexIdUniform("uGainmapTexSampler", gainmapTexId, /* texUnitIndex= */ 1);
|
glProgram.setSamplerTexIdUniform("uGainmapTexSampler", gainmapTexId, /* texUnitIndex= */ 1);
|
||||||
|
GainmapUtil.setGainmapUniforms(glProgram, lastGainmap, C.INDEX_UNSET);
|
||||||
boolean gainmapIsAlpha = lastGainmap.getGainmapContents().getConfig() == Bitmap.Config.ALPHA_8;
|
|
||||||
float[] gainmapGamma = lastGainmap.getGamma();
|
|
||||||
boolean noGamma = gainmapGamma[0] == 1f && gainmapGamma[1] == 1f && gainmapGamma[2] == 1f;
|
|
||||||
boolean singleChannel =
|
|
||||||
areAllChannelsEqual(gainmapGamma)
|
|
||||||
&& areAllChannelsEqual(lastGainmap.getRatioMax())
|
|
||||||
&& areAllChannelsEqual(lastGainmap.getRatioMin());
|
|
||||||
|
|
||||||
glProgram.setIntUniform("uGainmapIsAlpha", gainmapIsAlpha ? GL_TRUE : GL_FALSE);
|
|
||||||
glProgram.setIntUniform("uNoGamma", noGamma ? GL_TRUE : GL_FALSE);
|
|
||||||
glProgram.setIntUniform("uSingleChannel", singleChannel ? GL_TRUE : GL_FALSE);
|
|
||||||
glProgram.setFloatsUniform("uLogRatioMin", logRgb(lastGainmap.getRatioMin()));
|
|
||||||
glProgram.setFloatsUniform("uLogRatioMax", logRgb(lastGainmap.getRatioMax()));
|
|
||||||
glProgram.setFloatsUniform("uEpsilonSdr", lastGainmap.getEpsilonSdr());
|
|
||||||
glProgram.setFloatsUniform("uEpsilonHdr", lastGainmap.getEpsilonHdr());
|
|
||||||
glProgram.setFloatsUniform("uGainmapGamma", gainmapGamma);
|
|
||||||
glProgram.setFloatUniform("uDisplayRatioHdr", lastGainmap.getDisplayRatioForFullHdr());
|
|
||||||
glProgram.setFloatUniform("uDisplayRatioSdr", lastGainmap.getMinDisplayRatioForHdrTransition());
|
|
||||||
GlUtil.checkGlError();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean areAllChannelsEqual(float[] channels) {
|
|
||||||
return channels[0] == channels[1] && channels[1] == channels[2];
|
|
||||||
}
|
|
||||||
|
|
||||||
private static float[] logRgb(float[] values) {
|
|
||||||
return new float[] {(float) log(values[0]), (float) log(values[1]), (float) log(values[2])};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,8 +15,16 @@
|
|||||||
*/
|
*/
|
||||||
package androidx.media3.effect;
|
package androidx.media3.effect;
|
||||||
|
|
||||||
|
import static android.opengl.GLES20.GL_FALSE;
|
||||||
|
import static android.opengl.GLES20.GL_TRUE;
|
||||||
|
import static java.lang.Math.log;
|
||||||
|
|
||||||
|
import android.graphics.Bitmap;
|
||||||
import android.graphics.Gainmap;
|
import android.graphics.Gainmap;
|
||||||
import androidx.annotation.RequiresApi;
|
import androidx.annotation.RequiresApi;
|
||||||
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.util.GlProgram;
|
||||||
|
import androidx.media3.common.util.GlUtil;
|
||||||
import androidx.media3.common.util.UnstableApi;
|
import androidx.media3.common.util.UnstableApi;
|
||||||
|
|
||||||
/** Utilities for Gainmaps. */
|
/** Utilities for Gainmaps. */
|
||||||
@ -37,4 +45,51 @@ import androidx.media3.common.util.UnstableApi;
|
|||||||
&& g1.getGainmapContents() == g2.getGainmapContents()
|
&& g1.getGainmapContents() == g2.getGainmapContents()
|
||||||
&& g1.getGainmapContents().getGenerationId() == g2.getGainmapContents().getGenerationId();
|
&& g1.getGainmapContents().getGenerationId() == g2.getGainmapContents().getGenerationId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the uniforms for applying a gainmap to a base image.
|
||||||
|
*
|
||||||
|
* @param glProgram The {@link GlProgram}.
|
||||||
|
* @param gainmap The {@link Gainmap}.
|
||||||
|
* @param index The index to add to the end of the uniforms, or {@link C#INDEX_UNSET}, is no index
|
||||||
|
* is to be added.
|
||||||
|
*/
|
||||||
|
@RequiresApi(34)
|
||||||
|
public static void setGainmapUniforms(GlProgram glProgram, Gainmap gainmap, int index)
|
||||||
|
throws GlUtil.GlException {
|
||||||
|
boolean gainmapIsAlpha = gainmap.getGainmapContents().getConfig() == Bitmap.Config.ALPHA_8;
|
||||||
|
float[] gainmapGamma = gainmap.getGamma();
|
||||||
|
boolean noGamma = gainmapGamma[0] == 1f && gainmapGamma[1] == 1f && gainmapGamma[2] == 1f;
|
||||||
|
boolean singleChannel =
|
||||||
|
areAllChannelsEqual(gainmapGamma)
|
||||||
|
&& areAllChannelsEqual(gainmap.getRatioMax())
|
||||||
|
&& areAllChannelsEqual(gainmap.getRatioMin());
|
||||||
|
|
||||||
|
glProgram.setIntUniform(
|
||||||
|
addIndex("uGainmapIsAlpha", index), gainmapIsAlpha ? GL_TRUE : GL_FALSE);
|
||||||
|
glProgram.setIntUniform(addIndex("uNoGamma", index), noGamma ? GL_TRUE : GL_FALSE);
|
||||||
|
glProgram.setIntUniform(addIndex("uSingleChannel", index), singleChannel ? GL_TRUE : GL_FALSE);
|
||||||
|
glProgram.setFloatsUniform(addIndex("uLogRatioMin", index), logRgb(gainmap.getRatioMin()));
|
||||||
|
glProgram.setFloatsUniform(addIndex("uLogRatioMax", index), logRgb(gainmap.getRatioMax()));
|
||||||
|
glProgram.setFloatsUniform(addIndex("uEpsilonSdr", index), gainmap.getEpsilonSdr());
|
||||||
|
glProgram.setFloatsUniform(addIndex("uEpsilonHdr", index), gainmap.getEpsilonHdr());
|
||||||
|
glProgram.setFloatsUniform(addIndex("uGainmapGamma", index), gainmapGamma);
|
||||||
|
glProgram.setFloatUniform(
|
||||||
|
addIndex("uDisplayRatioHdr", index), gainmap.getDisplayRatioForFullHdr());
|
||||||
|
glProgram.setFloatUniform(
|
||||||
|
addIndex("uDisplayRatioSdr", index), gainmap.getMinDisplayRatioForHdrTransition());
|
||||||
|
GlUtil.checkGlError();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean areAllChannelsEqual(float[] channels) {
|
||||||
|
return channels[0] == channels[1] && channels[1] == channels[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String addIndex(String s, int index) {
|
||||||
|
return index == C.INDEX_UNSET ? s : s + index;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static float[] logRgb(float[] values) {
|
||||||
|
return new float[] {(float) log(values[0]), (float) log(values[1]), (float) log(values[2])};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,8 @@ import com.google.common.collect.ImmutableList;
|
|||||||
* is displayed on top).
|
* is displayed on top).
|
||||||
*
|
*
|
||||||
* <p>This effect assumes a non-{@linkplain DefaultVideoFrameProcessor#WORKING_COLOR_SPACE_LINEAR
|
* <p>This effect assumes a non-{@linkplain DefaultVideoFrameProcessor#WORKING_COLOR_SPACE_LINEAR
|
||||||
* linear} working color space.
|
* linear} working color space for SDR input and a {@linkplain
|
||||||
|
* DefaultVideoFrameProcessor#WORKING_COLOR_SPACE_LINEAR linear} working color space or HDR input.
|
||||||
*/
|
*/
|
||||||
@UnstableApi
|
@UnstableApi
|
||||||
public final class OverlayEffect implements GlEffect {
|
public final class OverlayEffect implements GlEffect {
|
||||||
@ -44,6 +45,6 @@ public final class OverlayEffect implements GlEffect {
|
|||||||
@Override
|
@Override
|
||||||
public BaseGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
|
public BaseGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
return new OverlayShaderProgram(useHdr, overlays);
|
return new OverlayShaderProgram(context, useHdr, overlays);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,44 +16,78 @@
|
|||||||
package androidx.media3.effect;
|
package androidx.media3.effect;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.common.util.Util.formatInvariant;
|
import static androidx.media3.common.util.Util.formatInvariant;
|
||||||
|
import static androidx.media3.common.util.Util.loadAsset;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
|
import android.content.Context;
|
||||||
|
import android.graphics.Bitmap;
|
||||||
|
import android.graphics.Gainmap;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
|
import android.util.SparseArray;
|
||||||
|
import android.util.SparseIntArray;
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.VideoFrameProcessingException;
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
import androidx.media3.common.util.GlProgram;
|
import androidx.media3.common.util.GlProgram;
|
||||||
import androidx.media3.common.util.GlUtil;
|
import androidx.media3.common.util.GlUtil;
|
||||||
import androidx.media3.common.util.Size;
|
import androidx.media3.common.util.Size;
|
||||||
|
import androidx.media3.common.util.Util;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/** Applies zero or more {@link TextureOverlay}s onto each frame. */
|
/** Applies zero or more {@link TextureOverlay}s onto each frame. */
|
||||||
/* package */ final class OverlayShaderProgram extends BaseGlShaderProgram {
|
/* package */ final class OverlayShaderProgram extends BaseGlShaderProgram {
|
||||||
|
|
||||||
|
private static final String ULTRA_HDR_INSERT = "shaders/insert_ultra_hdr.glsl";
|
||||||
|
|
||||||
private final GlProgram glProgram;
|
private final GlProgram glProgram;
|
||||||
private final SamplerOverlayMatrixProvider samplerOverlayMatrixProvider;
|
private final SamplerOverlayMatrixProvider samplerOverlayMatrixProvider;
|
||||||
private final ImmutableList<TextureOverlay> overlays;
|
private final ImmutableList<TextureOverlay> overlays;
|
||||||
|
private final boolean useHdr;
|
||||||
|
private final SparseArray<Gainmap> lastGainmaps;
|
||||||
|
private final SparseIntArray gainmapTexIds;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new instance.
|
* Creates a new instance.
|
||||||
*
|
*
|
||||||
|
* @param context The {@link Context}
|
||||||
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
|
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
|
||||||
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
|
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. useHdr is
|
||||||
|
* only supported on API 34+ for {@link BitmapOverlay}s, where the {@link Bitmap} contains a
|
||||||
|
* {@link Gainmap}.
|
||||||
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
|
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
|
||||||
*/
|
*/
|
||||||
public OverlayShaderProgram(boolean useHdr, ImmutableList<TextureOverlay> overlays)
|
public OverlayShaderProgram(
|
||||||
|
Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
super(/* useHighPrecisionColorComponents= */ useHdr, /* texturePoolCapacity= */ 1);
|
super(/* useHighPrecisionColorComponents= */ useHdr, /* texturePoolCapacity= */ 1);
|
||||||
checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet.");
|
if (useHdr) {
|
||||||
|
// Each UltraHDR overlay uses an extra texture to apply the gainmap to the base in the shader.
|
||||||
|
checkArgument(
|
||||||
|
overlays.size() <= 7,
|
||||||
|
"OverlayShaderProgram does not support more than 7 HDR overlays in the same instance.");
|
||||||
|
checkArgument(Util.SDK_INT >= 34);
|
||||||
|
} else {
|
||||||
// The maximum number of samplers allowed in a single GL program is 16.
|
// The maximum number of samplers allowed in a single GL program is 16.
|
||||||
// We use one for every overlay and one for the video.
|
// We use one for every overlay and one for the video.
|
||||||
checkArgument(
|
checkArgument(
|
||||||
overlays.size() <= 15,
|
overlays.size() <= 15,
|
||||||
"OverlayShaderProgram does not support more than 15 overlays in the same instance.");
|
"OverlayShaderProgram does not support more than 15 SDR overlays in the same instance.");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.useHdr = useHdr;
|
||||||
this.overlays = overlays;
|
this.overlays = overlays;
|
||||||
this.samplerOverlayMatrixProvider = new SamplerOverlayMatrixProvider();
|
this.samplerOverlayMatrixProvider = new SamplerOverlayMatrixProvider();
|
||||||
|
lastGainmaps = new SparseArray<>();
|
||||||
|
gainmapTexIds = new SparseIntArray();
|
||||||
try {
|
try {
|
||||||
glProgram =
|
glProgram =
|
||||||
new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size()));
|
new GlProgram(
|
||||||
} catch (GlUtil.GlException e) {
|
createVertexShader(overlays.size()),
|
||||||
|
createFragmentShader(context, overlays.size(), useHdr));
|
||||||
|
} catch (GlUtil.GlException | IOException e) {
|
||||||
throw new VideoFrameProcessingException(e);
|
throw new VideoFrameProcessingException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,12 +108,34 @@ import com.google.common.collect.ImmutableList;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@SuppressLint("NewApi") // Checked API level in constructor
|
||||||
public void drawFrame(int inputTexId, long presentationTimeUs)
|
public void drawFrame(int inputTexId, long presentationTimeUs)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
try {
|
try {
|
||||||
glProgram.use();
|
glProgram.use();
|
||||||
for (int texUnitIndex = 1; texUnitIndex <= overlays.size(); texUnitIndex++) {
|
for (int texUnitIndex = 1; texUnitIndex <= overlays.size(); texUnitIndex++) {
|
||||||
TextureOverlay overlay = overlays.get(texUnitIndex - 1);
|
TextureOverlay overlay = overlays.get(texUnitIndex - 1);
|
||||||
|
|
||||||
|
if (useHdr) {
|
||||||
|
checkArgument(overlay instanceof BitmapOverlay);
|
||||||
|
Bitmap bitmap = ((BitmapOverlay) overlay).getBitmap(presentationTimeUs);
|
||||||
|
checkArgument(bitmap.hasGainmap());
|
||||||
|
Gainmap gainmap = checkNotNull(bitmap.getGainmap());
|
||||||
|
@Nullable Gainmap lastGainmap = lastGainmaps.get(texUnitIndex);
|
||||||
|
if (lastGainmap == null || !GainmapUtil.equals(lastGainmap, gainmap)) {
|
||||||
|
lastGainmaps.put(texUnitIndex, gainmap);
|
||||||
|
if (gainmapTexIds.get(texUnitIndex, /* valueIfKeyNotFound= */ C.INDEX_UNSET)
|
||||||
|
== C.INDEX_UNSET) {
|
||||||
|
gainmapTexIds.put(texUnitIndex, GlUtil.createTexture(gainmap.getGainmapContents()));
|
||||||
|
} else {
|
||||||
|
GlUtil.setTexture(gainmapTexIds.get(texUnitIndex), gainmap.getGainmapContents());
|
||||||
|
}
|
||||||
|
glProgram.setSamplerTexIdUniform(
|
||||||
|
"uGainmapTexSampler" + texUnitIndex, gainmapTexIds.get(texUnitIndex), texUnitIndex);
|
||||||
|
GainmapUtil.setGainmapUniforms(glProgram, lastGainmaps.get(texUnitIndex), texUnitIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
glProgram.setSamplerTexIdUniform(
|
glProgram.setSamplerTexIdUniform(
|
||||||
formatInvariant("uOverlayTexSampler%d", texUnitIndex),
|
formatInvariant("uOverlayTexSampler%d", texUnitIndex),
|
||||||
overlay.getTextureId(presentationTimeUs),
|
overlay.getTextureId(presentationTimeUs),
|
||||||
@ -111,11 +167,17 @@ import com.google.common.collect.ImmutableList;
|
|||||||
super.release();
|
super.release();
|
||||||
try {
|
try {
|
||||||
glProgram.delete();
|
glProgram.delete();
|
||||||
} catch (GlUtil.GlException e) {
|
|
||||||
throw new VideoFrameProcessingException(e);
|
|
||||||
}
|
|
||||||
for (int i = 0; i < overlays.size(); i++) {
|
for (int i = 0; i < overlays.size(); i++) {
|
||||||
overlays.get(i).release();
|
overlays.get(i).release();
|
||||||
|
if (useHdr) {
|
||||||
|
int gainmapTexId = gainmapTexIds.get(i, /* valueIfKeyNotFound= */ C.INDEX_UNSET);
|
||||||
|
if (gainmapTexId != C.INDEX_UNSET) {
|
||||||
|
GlUtil.deleteTexture(gainmapTexId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (GlUtil.GlException e) {
|
||||||
|
throw new VideoFrameProcessingException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,13 +221,15 @@ import com.google.common.collect.ImmutableList;
|
|||||||
return shader.toString();
|
return shader.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String createFragmentShader(int numOverlays) {
|
private static String createFragmentShader(Context context, int numOverlays, boolean useHdr)
|
||||||
|
throws IOException {
|
||||||
StringBuilder shader =
|
StringBuilder shader =
|
||||||
new StringBuilder()
|
new StringBuilder()
|
||||||
.append("#version 100\n")
|
.append("#version 100\n")
|
||||||
.append("precision mediump float;\n")
|
.append("precision mediump float;\n")
|
||||||
.append("uniform sampler2D uVideoTexSampler0;\n")
|
.append("uniform sampler2D uVideoTexSampler0;\n")
|
||||||
.append("varying vec2 vVideoTexSamplingCoord0;\n")
|
.append("varying vec2 vVideoTexSamplingCoord0;\n")
|
||||||
|
.append("\n")
|
||||||
.append("// Manually implementing the CLAMP_TO_BORDER texture wrapping option\n")
|
.append("// Manually implementing the CLAMP_TO_BORDER texture wrapping option\n")
|
||||||
.append(
|
.append(
|
||||||
"// (https://open.gl/textures) since it's not implemented until OpenGL ES 3.2.\n")
|
"// (https://open.gl/textures) since it's not implemented until OpenGL ES 3.2.\n")
|
||||||
@ -190,11 +254,32 @@ import com.google.common.collect.ImmutableList;
|
|||||||
.append("}\n")
|
.append("}\n")
|
||||||
.append("\n");
|
.append("\n");
|
||||||
|
|
||||||
|
if (useHdr) {
|
||||||
|
shader.append(loadAsset(context, ULTRA_HDR_INSERT));
|
||||||
|
}
|
||||||
|
|
||||||
for (int texUnitIndex = 1; texUnitIndex <= numOverlays; texUnitIndex++) {
|
for (int texUnitIndex = 1; texUnitIndex <= numOverlays; texUnitIndex++) {
|
||||||
shader
|
shader
|
||||||
.append(formatInvariant("uniform sampler2D uOverlayTexSampler%d;\n", texUnitIndex))
|
.append(formatInvariant("uniform sampler2D uOverlayTexSampler%d;\n", texUnitIndex))
|
||||||
.append(formatInvariant("uniform float uOverlayAlphaScale%d;\n", texUnitIndex))
|
.append(formatInvariant("uniform float uOverlayAlphaScale%d;\n", texUnitIndex))
|
||||||
.append(formatInvariant("varying vec2 vOverlayTexSamplingCoord%d;\n", texUnitIndex));
|
.append(formatInvariant("varying vec2 vOverlayTexSamplingCoord%d;\n", texUnitIndex))
|
||||||
|
.append("\n");
|
||||||
|
if (useHdr) {
|
||||||
|
shader
|
||||||
|
.append("// Uniforms for applying the gainmap to the base.\n")
|
||||||
|
.append(formatInvariant("uniform sampler2D uGainmapTexSampler%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform int uGainmapIsAlpha%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform int uNoGamma%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform int uSingleChannel%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform vec4 uLogRatioMin%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform vec4 uLogRatioMax%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform vec4 uEpsilonSdr%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform vec4 uEpsilonHdr%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform vec4 uGainmapGamma%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform float uDisplayRatioHdr%d;\n", texUnitIndex))
|
||||||
|
.append(formatInvariant("uniform float uDisplayRatioSdr%d;\n", texUnitIndex))
|
||||||
|
.append("\n");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shader
|
shader
|
||||||
@ -212,11 +297,41 @@ import com.google.common.collect.ImmutableList;
|
|||||||
.append(
|
.append(
|
||||||
formatInvariant(
|
formatInvariant(
|
||||||
" uOverlayTexSampler%d, vOverlayTexSamplingCoord%d, uOverlayAlphaScale%d);\n",
|
" uOverlayTexSampler%d, vOverlayTexSamplingCoord%d, uOverlayAlphaScale%d);\n",
|
||||||
|
texUnitIndex, texUnitIndex, texUnitIndex));
|
||||||
|
String overlayMixColor = "electricalOverlayColor";
|
||||||
|
if (useHdr) {
|
||||||
|
shader
|
||||||
|
.append(
|
||||||
|
formatInvariant(
|
||||||
|
" vec4 gainmap%d = texture2D(uGainmapTexSampler%d,"
|
||||||
|
+ " vOverlayTexSamplingCoord%d);\n",
|
||||||
|
texUnitIndex, texUnitIndex, texUnitIndex))
|
||||||
|
.append(formatInvariant(" vec3 opticalBt709Color%d = applyGainmap(\n", texUnitIndex))
|
||||||
|
.append(
|
||||||
|
formatInvariant(
|
||||||
|
" srgbEotf(electricalOverlayColor%d), gainmap%d, uGainmapIsAlpha%d,\n",
|
||||||
texUnitIndex, texUnitIndex, texUnitIndex))
|
texUnitIndex, texUnitIndex, texUnitIndex))
|
||||||
.append(
|
.append(
|
||||||
formatInvariant(
|
formatInvariant(
|
||||||
" fragColor = getMixColor(fragColor, electricalOverlayColor%d);\n",
|
" uNoGamma%d, uSingleChannel%d, uLogRatioMin%d, uLogRatioMax%d,"
|
||||||
texUnitIndex));
|
+ " uEpsilonSdr%d,\n",
|
||||||
|
texUnitIndex, texUnitIndex, texUnitIndex, texUnitIndex, texUnitIndex))
|
||||||
|
.append(
|
||||||
|
formatInvariant(
|
||||||
|
" uEpsilonHdr%d, uGainmapGamma%d, uDisplayRatioHdr%d,"
|
||||||
|
+ " uDisplayRatioSdr%d);\n",
|
||||||
|
texUnitIndex, texUnitIndex, texUnitIndex, texUnitIndex))
|
||||||
|
.append(formatInvariant(" vec4 opticalBt2020OverlayColor%d =\n", texUnitIndex))
|
||||||
|
.append(
|
||||||
|
formatInvariant(
|
||||||
|
" vec4(scaleHdrLuminance(bt709ToBt2020(opticalBt709Color%d)),"
|
||||||
|
+ " electricalOverlayColor%d.a);\n",
|
||||||
|
texUnitIndex, texUnitIndex));
|
||||||
|
overlayMixColor = "opticalBt2020OverlayColor";
|
||||||
|
}
|
||||||
|
shader.append(
|
||||||
|
formatInvariant(
|
||||||
|
" fragColor = getMixColor(fragColor, %s%d);\n", overlayMixColor, texUnitIndex));
|
||||||
}
|
}
|
||||||
|
|
||||||
shader.append(" gl_FragColor = fragColor;\n").append("}\n");
|
shader.append(" gl_FragColor = fragColor;\n").append("}\n");
|
||||||
|
Binary file not shown.
After Width: | Height: | Size: 7.5 MiB |
Binary file not shown.
After Width: | Height: | Size: 1.7 MiB |
@ -34,6 +34,7 @@ import static com.google.common.truth.Truth.assertThat;
|
|||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.graphics.Bitmap;
|
import android.graphics.Bitmap;
|
||||||
|
import android.graphics.Matrix;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.Effect;
|
import androidx.media3.common.Effect;
|
||||||
@ -94,6 +95,10 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
|||||||
"test-generated-goldens/hdr-goldens/original_hlg10_to_pq.png";
|
"test-generated-goldens/hdr-goldens/original_hlg10_to_pq.png";
|
||||||
private static final String PQ_TO_HLG_PNG_ASSET_PATH =
|
private static final String PQ_TO_HLG_PNG_ASSET_PATH =
|
||||||
"test-generated-goldens/hdr-goldens/original_hdr10_to_hlg.png";
|
"test-generated-goldens/hdr-goldens/original_hdr10_to_hlg.png";
|
||||||
|
private static final String ULTRA_HDR_OVERLAY_HLG_PNG_ASSET_PATH =
|
||||||
|
"test-generated-goldens/hdr-goldens/ultrahdr_overlay_hlg.png";
|
||||||
|
private static final String ULTRA_HDR_OVERLAY_PQ_PNG_ASSET_PATH =
|
||||||
|
"test-generated-goldens/hdr-goldens/ultrahdr_overlay_pq.png";
|
||||||
|
|
||||||
/** Input SDR video of which we only use the first frame. */
|
/** Input SDR video of which we only use the first frame. */
|
||||||
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
|
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
|
||||||
@ -222,6 +227,88 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
|||||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ultraHdrBitmapOverlay_hlg10Input_matchesGoldenFile() throws Exception {
|
||||||
|
Context context = getApplicationContext();
|
||||||
|
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||||
|
assumeDeviceSupportsUltraHdrEditing();
|
||||||
|
assumeDeviceSupportsHdrEditing(testId, format);
|
||||||
|
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||||
|
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||||
|
Bitmap inputBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||||
|
inputBitmap =
|
||||||
|
Bitmap.createScaledBitmap(
|
||||||
|
inputBitmap,
|
||||||
|
inputBitmap.getWidth() / 8,
|
||||||
|
inputBitmap.getHeight() / 8,
|
||||||
|
/* filter= */ true);
|
||||||
|
Matrix matrix = new Matrix();
|
||||||
|
matrix.postRotate(/* degrees= */ 90);
|
||||||
|
Bitmap rotatedBitmap =
|
||||||
|
Bitmap.createBitmap(
|
||||||
|
inputBitmap,
|
||||||
|
/* x= */ 0,
|
||||||
|
/* y= */ 0,
|
||||||
|
inputBitmap.getWidth(),
|
||||||
|
inputBitmap.getHeight(),
|
||||||
|
matrix,
|
||||||
|
/* filter= */ true);
|
||||||
|
BitmapOverlay bitmapOverlay1 = BitmapOverlay.createStaticBitmapOverlay(inputBitmap);
|
||||||
|
BitmapOverlay bitmapOverlay2 = BitmapOverlay.createStaticBitmapOverlay(rotatedBitmap);
|
||||||
|
videoFrameProcessorTestRunner =
|
||||||
|
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||||
|
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay1, bitmapOverlay2)))
|
||||||
|
.setOutputColorInfo(colorInfo)
|
||||||
|
.setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING)
|
||||||
|
.build();
|
||||||
|
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_OVERLAY_HLG_PNG_ASSET_PATH);
|
||||||
|
|
||||||
|
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
|
||||||
|
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||||
|
|
||||||
|
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||||
|
float averagePixelAbsoluteDifference =
|
||||||
|
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||||
|
expectedBitmap, actualBitmap);
|
||||||
|
assertThat(averagePixelAbsoluteDifference)
|
||||||
|
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ultraHdrBitmapOverlay_hdr10Input_matchesGoldenFile() throws Exception {
|
||||||
|
Context context = getApplicationContext();
|
||||||
|
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
|
||||||
|
assumeDeviceSupportsUltraHdrEditing();
|
||||||
|
assumeDeviceSupportsHdrEditing(testId, format);
|
||||||
|
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
|
||||||
|
ColorInfo colorInfo = checkNotNull(format.colorInfo);
|
||||||
|
Bitmap overlayBitmap = readBitmap(ULTRA_HDR_ASSET_PATH);
|
||||||
|
overlayBitmap =
|
||||||
|
Bitmap.createScaledBitmap(
|
||||||
|
overlayBitmap,
|
||||||
|
overlayBitmap.getWidth() / 8,
|
||||||
|
overlayBitmap.getHeight() / 8,
|
||||||
|
/* filter= */ true);
|
||||||
|
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
|
||||||
|
videoFrameProcessorTestRunner =
|
||||||
|
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||||
|
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
|
||||||
|
.setOutputColorInfo(colorInfo)
|
||||||
|
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
|
||||||
|
.build();
|
||||||
|
Bitmap expectedBitmap = readBitmap(ULTRA_HDR_OVERLAY_PQ_PNG_ASSET_PATH);
|
||||||
|
|
||||||
|
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
|
||||||
|
Bitmap actualBitmap = videoFrameProcessorTestRunner.getOutputBitmap();
|
||||||
|
|
||||||
|
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||||
|
float averagePixelAbsoluteDifference =
|
||||||
|
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
|
||||||
|
expectedBitmap, actualBitmap);
|
||||||
|
assertThat(averagePixelAbsoluteDifference)
|
||||||
|
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
|
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
|
||||||
Context context = getApplicationContext();
|
Context context = getApplicationContext();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user