Tone map in CompositionPlayer if surface can't display HDR

PiperOrigin-RevId: 740290183
This commit is contained in:
Googler 2025-03-25 04:15:41 -07:00 committed by Copybara-Service
parent a1738f96f9
commit 6034a3c3d6
6 changed files with 377 additions and 9 deletions

View File

@ -228,6 +228,16 @@ public final class GlUtil {
return glExtensions != null && glExtensions.contains(EXTENSION_YUV_TARGET);
}
/** Returns whether the given {@link C.ColorTransfer} is supported. */
public static boolean isColorTransferSupported(@C.ColorTransfer int colorTransfer) {
if (colorTransfer == C.COLOR_TRANSFER_ST2084) {
return GlUtil.isBt2020PqExtensionSupported();
} else if (colorTransfer == C.COLOR_TRANSFER_HLG) {
return GlUtil.isBt2020HlgExtensionSupported();
}
return true;
}
/** Returns whether {@link #EXTENSION_COLORSPACE_BT2020_PQ} is supported. */
public static boolean isBt2020PqExtensionSupported() {
// On API<33, the system cannot display PQ content correctly regardless of whether BT2020 PQ

View File

@ -48,7 +48,9 @@ import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.VideoSize;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.TimedValueQueue;
import androidx.media3.common.util.TimestampIterator;
@ -268,6 +270,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@IntDef({STATE_CREATED, STATE_INITIALIZED, STATE_RELEASED})
private @interface State {}
private static final String TAG = "PlaybackVidGraphWrapper";
private static final int STATE_CREATED = 0;
private static final int STATE_INITIALIZED = 1;
private static final int STATE_RELEASED = 2;
@ -507,17 +510,26 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
ColorInfo outputColorInfo;
if (requestOpenGlToneMapping) {
outputColorInfo = ColorInfo.SDR_BT709_LIMITED;
} else if (inputColorInfo.colorTransfer == C.COLOR_TRANSFER_HLG
&& Util.SDK_INT < 34
&& GlUtil.isBt2020PqExtensionSupported()) {
// PQ SurfaceView output is supported from API 33, but HLG output is supported from API
// 34. Therefore, convert HLG to PQ if PQ is supported, so that HLG input can be displayed
// properly on API 33.
outputColorInfo =
inputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
// Force OpenGL tone mapping if the GL extension required to output HDR colors is not
// available. OpenGL tone mapping is only supported on API 29+.
} else if (!GlUtil.isColorTransferSupported(inputColorInfo.colorTransfer)
&& Util.SDK_INT >= 29) {
Log.w(
TAG,
Util.formatInvariant(
"Color transfer %d is not supported. Falling back to OpenGl tone mapping.",
inputColorInfo.colorTransfer));
outputColorInfo = ColorInfo.SDR_BT709_LIMITED;
} else {
outputColorInfo = inputColorInfo;
if (outputColorInfo.colorTransfer == C.COLOR_TRANSFER_HLG && Util.SDK_INT < 34) {
// PQ SurfaceView output is supported from API 33, but HLG output is supported from API
// 34.
// Therefore, convert HLG to PQ below API 34, so that HLG input can be displayed properly
// on
// API 33.
outputColorInfo =
outputColorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
}
}
handler = clock.createHandler(checkStateNotNull(Looper.myLooper()), /* callback= */ null);
try {

View File

@ -28,6 +28,7 @@ import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assume.assumeFalse;
import android.content.Context;
@ -43,14 +44,20 @@ import android.os.Build;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.VideoCompositorSettings;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.VideoGraph.Listener;
import androidx.media3.common.util.GlRect;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util;
import androidx.media3.effect.ByteBufferGlEffect;
@ -59,6 +66,7 @@ import androidx.media3.effect.GlEffect;
import androidx.media3.effect.GlShaderProgram;
import androidx.media3.effect.PassthroughShaderProgram;
import androidx.media3.effect.ScaleAndRotateTransformation;
import androidx.media3.effect.SingleInputVideoGraph;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import androidx.media3.muxer.MuxerException;
@ -66,6 +74,7 @@ import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.FakeExtractorOutput;
import androidx.media3.test.utils.FakeTrackOutput;
import androidx.media3.test.utils.VideoDecodingWrapper;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
@ -75,8 +84,12 @@ import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.json.JSONException;
import org.json.JSONObject;
@ -1090,6 +1103,87 @@ public final class AndroidTestUtil {
}
}
/** A {@link VideoGraph.Factory} that records test interactions. */
public static class TestVideoGraphFactory implements VideoGraph.Factory {
private final VideoGraph.Factory singleInputVideoGraphFactory;
@Nullable private ColorInfo outputColorInfo;
public TestVideoGraphFactory() {
singleInputVideoGraphFactory = new SingleInputVideoGraph.Factory();
}
@Override
public VideoGraph create(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
Listener listener,
Executor listenerExecutor,
VideoCompositorSettings videoCompositorSettings,
List<Effect> compositionEffects,
long initialTimestampOffsetUs,
boolean renderFramesAutomatically) {
this.outputColorInfo = outputColorInfo;
return singleInputVideoGraphFactory.create(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
videoCompositorSettings,
compositionEffects,
initialTimestampOffsetUs,
renderFramesAutomatically);
}
/** Runs the given task and blocks until it completes, or timeoutSeconds has elapsed. */
public static void runAsyncTaskAndWait(ThrowingRunnable task, int timeoutSeconds)
throws TimeoutException, InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<@NullableType Exception> unexpectedExceptionReference =
new AtomicReference<>();
InstrumentationRegistry.getInstrumentation()
.runOnMainSync(
() -> {
try {
task.run();
// Catch all exceptions to report. Exceptions thrown here and not caught will NOT
// propagate.
} catch (Exception e) {
unexpectedExceptionReference.set(e);
} finally {
countDownLatch.countDown();
}
});
// Block here until timeout reached or latch is counted down.
if (!countDownLatch.await(timeoutSeconds, SECONDS)) {
throw new TimeoutException("Timed out after " + timeoutSeconds + " seconds.");
}
@Nullable Exception unexpectedException = unexpectedExceptionReference.get();
if (unexpectedException != null) {
throw new IllegalStateException(unexpectedException);
}
}
@Override
public boolean supportsMultipleInputs() {
return singleInputVideoGraphFactory.supportsMultipleInputs();
}
@Nullable
public ColorInfo getOutputColorInfo() {
return outputColorInfo;
}
}
/** A type that can be used to succinctly wrap throwing {@link Runnable} objects. */
public interface ThrowingRunnable {
void run() throws Exception;
}
/**
* Creates the GL objects needed to set up a GL environment including an {@link EGLDisplay} and an
* {@link EGLContext}.

View File

@ -24,6 +24,8 @@ import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.test.utils.TestUtil.assertBitmapsAreSimilar;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_COLOR_TEST_1080P_HLG10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_TRIM_OPTIMIZATION_270;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceDoesNotSupportHdrColorTransfer;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsHdrColorTransfer;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.SECONDS;
@ -104,6 +106,7 @@ public class FrameExtractorHdrTest {
@Test
public void extractFrame_oneFrameHlgWithHdrOutput_returnsHlgFrame() throws Exception {
assumeDeviceSupportsOpenGlToneMapping(testId, MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat);
assumeDeviceSupportsHdrColorTransfer(testId, MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat);
// HLG Bitmaps are only supported on API 34+.
assumeTrue(SDK_INT >= 34);
frameExtractor =
@ -134,6 +137,33 @@ public class FrameExtractorHdrTest {
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
}
@Test
public void extractFrame_oneFrameHlgWithHdrDisplayUnsupported_returnsSdrFrame() throws Exception {
assumeDeviceSupportsOpenGlToneMapping(testId, MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat);
assumeDeviceDoesNotSupportHdrColorTransfer(
testId, MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat);
// HLG Bitmaps are only supported on API 34+.
assumeTrue(SDK_INT >= 34);
frameExtractor =
new ExperimentalFrameExtractor(
context,
new ExperimentalFrameExtractor.Configuration.Builder()
.setExtractHdrFrames(true)
.build());
frameExtractor.setMediaItem(
MediaItem.fromUri(MP4_ASSET_COLOR_TEST_1080P_HLG10.uri), /* effects= */ ImmutableList.of());
ListenableFuture<ExperimentalFrameExtractor.Frame> frameFuture =
frameExtractor.getFrame(/* positionMs= */ 0);
ExperimentalFrameExtractor.Frame frame = frameFuture.get(TIMEOUT_SECONDS, SECONDS);
Bitmap actualBitmap = frame.bitmap;
Bitmap expectedBitmap = readBitmap(TONE_MAP_HLG_TO_SDR_PNG_ASSET_PATH);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
assertThat(frame.presentationTimeMs).isEqualTo(0);
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
}
@Test
public void
extractFrame_changeMediaItemFromHdrToSdrWithToneMapping_extractsFrameFromTheCorrectItem()

View File

@ -16,14 +16,17 @@
package androidx.media3.transformer.mh;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import android.content.Context;
import android.opengl.EGLDisplay;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.GlUtil.GlException;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import androidx.media3.transformer.EncoderUtil;
@ -96,5 +99,49 @@ public final class HdrCapabilitiesUtil {
}
}
/**
* Assumes that the device supports HDR editing for the given {@code colorInfo}.
*
* @throws AssumptionViolatedException if the device does not support HDR editing.
*/
public static void assumeDeviceSupportsHdrColorTransfer(String testId, Format format)
throws JSONException, IOException, GlException {
checkStateNotNull(format.colorInfo);
// Required to ensure EGL extensions are initialised.
@SuppressWarnings("unused")
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
if (!GlUtil.isColorTransferSupported(format.colorInfo.colorTransfer)) {
String skipReason =
"HDR display not supported for sampleMimeType "
+ format.sampleMimeType
+ " and colorInfo "
+ format.colorInfo;
recordTestSkipped(getApplicationContext(), testId, skipReason);
throw new AssumptionViolatedException(skipReason);
}
}
/**
* Assumes that the device does not support HDR editing for the given {@code colorInfo}.
*
* @throws AssumptionViolatedException if the device does support HDR editing.
*/
public static void assumeDeviceDoesNotSupportHdrColorTransfer(String testId, Format format)
throws JSONException, IOException, GlException {
checkStateNotNull(format.colorInfo);
// Required to ensure EGL extensions are initialised.
@SuppressWarnings("unused")
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay();
if (GlUtil.isColorTransferSupported(format.colorInfo.colorTransfer)) {
String skipReason =
"HDR display is supported for sampleMimeType "
+ format.sampleMimeType
+ " and colorInfo "
+ format.colorInfo;
recordTestSkipped(getApplicationContext(), testId, skipReason);
throw new AssumptionViolatedException(skipReason);
}
}
private HdrCapabilitiesUtil() {}
}

View File

@ -0,0 +1,175 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_BT2020_SDR;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_COLOR_TEST_1080P_HLG10;
import static androidx.media3.transformer.AndroidTestUtil.TestVideoGraphFactory.runAsyncTaskAndWait;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceDoesNotSupportHdrColorTransfer;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsHdrColorTransfer;
import static androidx.media3.transformer.mh.HdrCapabilitiesUtil.assumeDeviceSupportsOpenGlToneMapping;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.video.PlaybackVideoGraphWrapper;
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
import androidx.media3.exoplayer.video.VideoSink;
import androidx.media3.transformer.AndroidTestUtil.TestVideoGraphFactory;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
/** Instrumentation tests for {@link PlaybackVideoGraphWrapper}. */
@RunWith(AndroidJUnit4.class)
public class PlaybackVideoGraphWrapperTest {
private static final int TEST_TIMEOUT_SECOND = 1;
@Rule public final TestName testName = new TestName();
private String testId;
@Before
public void setUpTestId() {
testId = testName.getMethodName();
}
@Test
public void initialize_sdrInput_retainsSdr() throws Exception {
Format inputFormat = MP4_ASSET_BT2020_SDR.videoFormat;
TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(testVideoGraphFactory);
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
runAsyncTaskAndWait(() -> sink.initialize(inputFormat), TEST_TIMEOUT_SECOND);
assertThat(testVideoGraphFactory.getOutputColorInfo()).isEqualTo(inputFormat.colorInfo);
}
@Test
public void initialize_hdr10InputUnsupported_toneMapsToSdr() throws Exception {
Format inputFormat = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
assumeDeviceSupportsOpenGlToneMapping(testId, inputFormat);
assumeDeviceDoesNotSupportHdrColorTransfer(testId, inputFormat);
TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(testVideoGraphFactory);
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
runAsyncTaskAndWait(() -> sink.initialize(inputFormat), TEST_TIMEOUT_SECOND);
assertThat(testVideoGraphFactory.getOutputColorInfo()).isEqualTo(ColorInfo.SDR_BT709_LIMITED);
}
@Test
public void initialize_hlgInputUnsupported_toneMapsToSdr() throws Exception {
Format inputFormat = MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat;
assumeDeviceSupportsOpenGlToneMapping(testId, inputFormat);
assumeDeviceDoesNotSupportHdrColorTransfer(testId, inputFormat);
TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(testVideoGraphFactory);
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
runAsyncTaskAndWait(() -> sink.initialize(inputFormat), TEST_TIMEOUT_SECOND);
ColorInfo expectedColorInfo;
// HLG is converted to PQ on API 33.
if (Util.SDK_INT < 34 && GlUtil.isBt2020PqExtensionSupported()) {
expectedColorInfo =
inputFormat.colorInfo.buildUpon().setColorTransfer(C.COLOR_TRANSFER_ST2084).build();
} else {
expectedColorInfo = ColorInfo.SDR_BT709_LIMITED;
}
assertThat(testVideoGraphFactory.getOutputColorInfo()).isEqualTo(expectedColorInfo);
}
@Test
public void initialize_hdr10InputSupported_retainsHdr() throws Exception {
Format inputFormat = MP4_ASSET_720P_4_SECOND_HDR10.videoFormat;
assumeDeviceSupportsHdrColorTransfer(testId, inputFormat);
TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(testVideoGraphFactory);
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
runAsyncTaskAndWait(() -> sink.initialize(inputFormat), TEST_TIMEOUT_SECOND);
assertThat(testVideoGraphFactory.getOutputColorInfo()).isEqualTo(inputFormat.colorInfo);
}
@Test
public void initialize_hlgInputSupported_retainsHdr() throws Exception {
Format inputFormat = MP4_ASSET_COLOR_TEST_1080P_HLG10.videoFormat;
assumeDeviceSupportsHdrColorTransfer(testId, inputFormat);
TestVideoGraphFactory testVideoGraphFactory = new TestVideoGraphFactory();
PlaybackVideoGraphWrapper playbackVideoGraphWrapper =
createPlaybackVideoGraphWrapper(testVideoGraphFactory);
VideoSink sink = playbackVideoGraphWrapper.getSink(/* inputIndex= */ 0);
runAsyncTaskAndWait(() -> sink.initialize(inputFormat), TEST_TIMEOUT_SECOND);
assertThat(testVideoGraphFactory.getOutputColorInfo()).isEqualTo(inputFormat.colorInfo);
}
private static PlaybackVideoGraphWrapper createPlaybackVideoGraphWrapper(
VideoGraph.Factory videoGraphFactory) {
Context context = ApplicationProvider.getApplicationContext();
return new PlaybackVideoGraphWrapper.Builder(context, createVideoFrameReleaseControl())
.setVideoGraphFactory(videoGraphFactory)
.build();
}
private static VideoFrameReleaseControl createVideoFrameReleaseControl() {
Context context = ApplicationProvider.getApplicationContext();
VideoFrameReleaseControl.FrameTimingEvaluator frameTimingEvaluator =
new VideoFrameReleaseControl.FrameTimingEvaluator() {
@Override
public boolean shouldForceReleaseFrame(long earlyUs, long elapsedSinceLastReleaseUs) {
return false;
}
@Override
public boolean shouldDropFrame(
long earlyUs, long elapsedRealtimeUs, boolean isLastFrame) {
return false;
}
@Override
public boolean shouldIgnoreFrame(
long earlyUs,
long positionUs,
long elapsedRealtimeUs,
boolean isLastFrame,
boolean treatDroppedBuffersAsSkipped) {
return false;
}
};
return new VideoFrameReleaseControl(
context, frameTimingEvaluator, /* allowedJoiningTimeMs= */ 0);
}
}